diff --git a/.changeset/all-cougars-hide.md b/.changeset/all-cougars-hide.md new file mode 100644 index 00000000000..c8e55280026 --- /dev/null +++ b/.changeset/all-cougars-hide.md @@ -0,0 +1,6 @@ +--- +'@clerk/clerk-js': patch +'@clerk/types': patch +--- + +Introduce debugLogger for internal debugging support diff --git a/.typedoc/__tests__/__snapshots__/file-structure.test.ts.snap b/.typedoc/__tests__/__snapshots__/file-structure.test.ts.snap index be155abe172..f1bce58fdcc 100644 --- a/.typedoc/__tests__/__snapshots__/file-structure.test.ts.snap +++ b/.typedoc/__tests__/__snapshots__/file-structure.test.ts.snap @@ -125,6 +125,7 @@ exports[`Typedoc output > should have a deliberate file structure 1`] = ` "types/sign-up-resource.mdx", "types/signed-in-session-resource.mdx", "types/state-selectors.mdx", + "types/telemetry-log-entry.mdx", "types/use-auth-return.mdx", "types/use-session-list-return.mdx", "types/use-session-return.mdx", diff --git a/packages/clerk-js/bundlewatch.config.json b/packages/clerk-js/bundlewatch.config.json index da405fd6a95..6d8e19c9e68 100644 --- a/packages/clerk-js/bundlewatch.config.json +++ b/packages/clerk-js/bundlewatch.config.json @@ -1,12 +1,12 @@ { "files": [ - { "path": "./dist/clerk.js", "maxSize": "622KB" }, + { "path": "./dist/clerk.js", "maxSize": "624KB" }, { "path": "./dist/clerk.browser.js", "maxSize": "76KB" }, - { "path": "./dist/clerk.legacy.browser.js", "maxSize": "117KB" }, - { "path": "./dist/clerk.headless*.js", "maxSize": "58KB" }, - { "path": "./dist/ui-common*.js", "maxSize": "113KB" }, + { "path": "./dist/clerk.legacy.browser.js", "maxSize": "118KB" }, + { "path": "./dist/clerk.headless*.js", "maxSize": "59KB" }, + { "path": "./dist/ui-common*.js", "maxSize": "114KB" }, { "path": "./dist/ui-common*.legacy.*.js", "maxSize": "118KB" }, - { "path": "./dist/vendors*.js", "maxSize": "40.2KB" }, + { "path": "./dist/vendors*.js", "maxSize": "41KB" }, { "path": "./dist/coinbase*.js", "maxSize": "38KB" }, { "path": "./dist/stripe-vendors*.js", "maxSize": "1KB" }, { "path": "./dist/createorganization*.js", "maxSize": "5KB" }, diff --git a/packages/clerk-js/src/core/clerk.ts b/packages/clerk-js/src/core/clerk.ts index 539b50e961e..5c90df8ca32 100644 --- a/packages/clerk-js/src/core/clerk.ts +++ b/packages/clerk-js/src/core/clerk.ts @@ -161,6 +161,32 @@ type SetActiveHook = (intent?: 'sign-out') => void | Promise; export type ClerkCoreBroadcastChannelEvent = { type: 'signout' }; +/** + * Interface for the debug logger with all available logging methods + */ +interface DebugLoggerInterface { + debug(message: string, context?: Record, source?: string): void; + error(message: string, context?: Record, source?: string): void; + info(message: string, context?: Record, source?: string): void; + trace(message: string, context?: Record, source?: string): void; + warn(message: string, context?: Record, source?: string): void; +} + +/** + * Type guard to check if an object implements the DebugLoggerInterface + */ +function _isDebugLogger(obj: unknown): obj is DebugLoggerInterface { + return ( + typeof obj === 'object' && + obj !== null && + typeof (obj as DebugLoggerInterface).debug === 'function' && + typeof (obj as DebugLoggerInterface).error === 'function' && + typeof (obj as DebugLoggerInterface).info === 'function' && + typeof (obj as DebugLoggerInterface).trace === 'function' && + typeof (obj as DebugLoggerInterface).warn === 'function' + ); +} + declare global { interface Window { Clerk?: Clerk; @@ -199,8 +225,8 @@ export class Clerk implements ClerkInterface { public static sdkMetadata: SDKMetadata = { name: __PKG_NAME__, version: __PKG_VERSION__, - environment: process.env.NODE_ENV || 'production', }; + private static _billing: CommerceBillingNamespace; private static _apiKeys: APIKeysNamespace; private _checkout: ClerkInterface['__experimental_checkout'] | undefined; @@ -212,6 +238,7 @@ export class Clerk implements ClerkInterface { public __internal_country?: string | null; public telemetry: TelemetryCollector | undefined; public readonly __internal_state: State = new State(); + public debugLogger?: DebugLoggerInterface; protected internal_last_error: ClerkAPIError | null = null; // converted to protected environment to support `updateEnvironment` type assertion @@ -1509,6 +1536,7 @@ export class Clerk implements ClerkInterface { const customNavigate = options?.replace && this.#options.routerReplace ? this.#options.routerReplace : this.#options.routerPush; + this.debugLogger?.info(`Clerk is navigating to: ${toURL}`); if (this.#options.routerDebug) { console.log(`Clerk is navigating to: ${toURL}`); } @@ -2233,6 +2261,23 @@ export class Clerk implements ClerkInterface { public updateEnvironment(environment: EnvironmentResource): asserts this is { environment: EnvironmentResource } { this.environment = environment; + + // Initialize debug module if client_debug_mode is enabled + if (environment.clientDebugMode) { + this.#initializeDebugModule(); + } + } + + async #initializeDebugModule(): Promise { + try { + const { getDebugLogger } = await import('./modules/debug'); + const logger = await getDebugLogger({}); + if (_isDebugLogger(logger)) { + this.debugLogger = logger; + } + } catch (error) { + console.error('Failed to initialize debug module:', error); + } } __internal_setCountry = (country: string | null) => { @@ -2893,4 +2938,13 @@ export class Clerk implements ClerkInterface { return allowedProtocols; } + + /** + * @internal + */ + public static async __internal_resetDebugLogger(): Promise { + // This method is now handled by the debug module itself + const { __internal_resetDebugLogger } = await import('./modules/debug'); + __internal_resetDebugLogger(); + } } diff --git a/packages/clerk-js/src/core/modules/debug/__tests__/logger.test.ts b/packages/clerk-js/src/core/modules/debug/__tests__/logger.test.ts new file mode 100644 index 00000000000..1fc77109d64 --- /dev/null +++ b/packages/clerk-js/src/core/modules/debug/__tests__/logger.test.ts @@ -0,0 +1,373 @@ +import { DebugLogger } from '../logger'; +import type { DebugLogFilter } from '../types'; + +// Mock transport for testing +class MockTransport { + public sentEntries: any[] = []; + + async send(entry: any): Promise { + this.sentEntries.push(entry); + } + + reset(): void { + this.sentEntries = []; + } +} + +describe('DebugLogger', () => { + let logger: DebugLogger; + let mockTransport: MockTransport; + + beforeEach(() => { + mockTransport = new MockTransport(); + logger = new DebugLogger(mockTransport, 'trace'); + }); + + afterEach(() => { + mockTransport.reset(); + }); + + describe('basic logging functionality', () => { + it('should log messages at appropriate levels', () => { + logger.error('error message'); + logger.warn('warn message'); + logger.info('info message'); + logger.debug('debug message'); + logger.trace('trace message'); + + expect(mockTransport.sentEntries).toHaveLength(5); + expect(mockTransport.sentEntries[0].level).toBe('error'); + expect(mockTransport.sentEntries[1].level).toBe('warn'); + expect(mockTransport.sentEntries[2].level).toBe('info'); + expect(mockTransport.sentEntries[3].level).toBe('debug'); + expect(mockTransport.sentEntries[4].level).toBe('trace'); + }); + + it('should include context and source in log entries', () => { + const context = { userId: '123', action: 'test' }; + const source = 'test-module'; + + logger.info('test message', context, source); + + expect(mockTransport.sentEntries).toHaveLength(1); + expect(mockTransport.sentEntries[0].context).toEqual(context); + expect(mockTransport.sentEntries[0].source).toBe(source); + }); + + it('should respect log level filtering', () => { + const infoLogger = new DebugLogger(mockTransport, 'info'); + + infoLogger.trace('trace message'); + infoLogger.debug('debug message'); + infoLogger.info('info message'); + infoLogger.warn('warn message'); + infoLogger.error('error message'); + + expect(mockTransport.sentEntries).toHaveLength(3); + expect(mockTransport.sentEntries.map(e => e.level)).toEqual(['info', 'warn', 'error']); + }); + }); + + describe('filter functionality', () => { + describe('level filtering', () => { + it('should filter by specific log level', () => { + const filters: DebugLogFilter[] = [{ level: 'error' }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('info message'); + filteredLogger.warn('warn message'); + filteredLogger.error('error message'); + + expect(mockTransport.sentEntries).toHaveLength(1); + expect(mockTransport.sentEntries[0].level).toBe('error'); + }); + + it('should allow all levels when no level filter is specified', () => { + const filters: DebugLogFilter[] = [{}]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('info message'); + filteredLogger.warn('warn message'); + filteredLogger.error('error message'); + + expect(mockTransport.sentEntries).toHaveLength(3); + }); + }); + + describe('source filtering', () => { + it('should filter by exact string source', () => { + const filters: DebugLogFilter[] = [{ source: 'auth-module' }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('message 1', undefined, 'auth-module'); + filteredLogger.info('message 2', undefined, 'other-module'); + filteredLogger.info('message 3', undefined, 'auth-module'); + + expect(mockTransport.sentEntries).toHaveLength(2); + expect(mockTransport.sentEntries[0].source).toBe('auth-module'); + expect(mockTransport.sentEntries[1].source).toBe('auth-module'); + }); + + it('should filter by RegExp source pattern', () => { + const filters: DebugLogFilter[] = [{ source: /auth-.*/ }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('message 1', undefined, 'auth-module'); + filteredLogger.info('message 2', undefined, 'auth-service'); + filteredLogger.info('message 3', undefined, 'other-module'); + + expect(mockTransport.sentEntries).toHaveLength(2); + expect(mockTransport.sentEntries[0].source).toBe('auth-module'); + expect(mockTransport.sentEntries[1].source).toBe('auth-service'); + }); + + it('should not log when source is undefined and filter expects a source', () => { + const filters: DebugLogFilter[] = [{ source: 'auth-module' }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('message without source'); + + expect(mockTransport.sentEntries).toHaveLength(0); + }); + }); + + describe('include pattern filtering', () => { + it('should include messages matching string patterns', () => { + const filters: DebugLogFilter[] = [{ includePatterns: ['error', 'failed'] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('User login failed'); + filteredLogger.info('Operation completed successfully'); + filteredLogger.error('Database connection error'); + + expect(mockTransport.sentEntries).toHaveLength(2); + expect(mockTransport.sentEntries[0].message).toBe('User login failed'); + expect(mockTransport.sentEntries[1].message).toBe('Database connection error'); + }); + + it('should include messages matching RegExp patterns', () => { + const filters: DebugLogFilter[] = [{ includePatterns: [/error/i, /failed/i] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('User login FAILED'); + filteredLogger.info('Operation completed successfully'); + filteredLogger.error('Database connection ERROR'); + + expect(mockTransport.sentEntries).toHaveLength(2); + expect(mockTransport.sentEntries[0].message).toBe('User login FAILED'); + expect(mockTransport.sentEntries[1].message).toBe('Database connection ERROR'); + }); + + it('should include messages matching mixed string and RegExp patterns', () => { + const filters: DebugLogFilter[] = [{ includePatterns: ['error', /failed/i] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('User login FAILED'); + filteredLogger.info('Operation completed successfully'); + filteredLogger.error('Database connection error'); + + expect(mockTransport.sentEntries).toHaveLength(2); + expect(mockTransport.sentEntries[0].message).toBe('User login FAILED'); + expect(mockTransport.sentEntries[1].message).toBe('Database connection error'); + }); + + it('should not log when no include patterns match', () => { + const filters: DebugLogFilter[] = [{ includePatterns: ['error', 'failed'] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('Operation completed successfully'); + filteredLogger.info('User logged in'); + + expect(mockTransport.sentEntries).toHaveLength(0); + }); + }); + + describe('exclude pattern filtering', () => { + it('should exclude messages matching string patterns', () => { + const filters: DebugLogFilter[] = [{ excludePatterns: ['debug', 'trace'] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('User login debug info'); + filteredLogger.info('Operation completed successfully'); + filteredLogger.error('Database connection error'); + + expect(mockTransport.sentEntries).toHaveLength(2); + expect(mockTransport.sentEntries[0].message).toBe('Operation completed successfully'); + expect(mockTransport.sentEntries[1].message).toBe('Database connection error'); + }); + + it('should exclude messages matching RegExp patterns', () => { + const filters: DebugLogFilter[] = [{ excludePatterns: [/debug/i, /trace/i] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('User login DEBUG info'); + filteredLogger.info('Operation completed successfully'); + filteredLogger.error('Database connection error'); + + expect(mockTransport.sentEntries).toHaveLength(2); + expect(mockTransport.sentEntries[0].message).toBe('Operation completed successfully'); + expect(mockTransport.sentEntries[1].message).toBe('Database connection error'); + }); + + it('should exclude messages matching mixed string and RegExp patterns', () => { + const filters: DebugLogFilter[] = [{ excludePatterns: ['debug', /trace/i] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('User login debug info'); + filteredLogger.info('Operation completed successfully'); + filteredLogger.error('Database connection error'); + + expect(mockTransport.sentEntries).toHaveLength(2); + expect(mockTransport.sentEntries[0].message).toBe('Operation completed successfully'); + expect(mockTransport.sentEntries[1].message).toBe('Database connection error'); + }); + + it('should exclude messages containing error in the message', () => { + const filters: DebugLogFilter[] = [{ excludePatterns: ['error'] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('User login successful'); + filteredLogger.info('Operation completed successfully'); + filteredLogger.error('Database connection error'); + + expect(mockTransport.sentEntries).toHaveLength(2); + expect(mockTransport.sentEntries[0].message).toBe('User login successful'); + expect(mockTransport.sentEntries[1].message).toBe('Operation completed successfully'); + }); + }); + + describe('complex filter combinations', () => { + it('should apply multiple filters with AND logic', () => { + const filters: DebugLogFilter[] = [{ level: 'error', source: 'auth-module' }, { includePatterns: ['failed'] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.error('Login failed', undefined, 'auth-module'); + filteredLogger.error('Database error', undefined, 'auth-module'); + filteredLogger.info('Login failed', undefined, 'auth-module'); + filteredLogger.error('Login failed', undefined, 'other-module'); + + expect(mockTransport.sentEntries).toHaveLength(1); + expect(mockTransport.sentEntries[0].message).toBe('Login failed'); + expect(mockTransport.sentEntries[0].level).toBe('error'); + expect(mockTransport.sentEntries[0].source).toBe('auth-module'); + }); + + it('should handle empty filter arrays', () => { + const filteredLogger = new DebugLogger(mockTransport, 'debug', []); + + filteredLogger.info('test message'); + filteredLogger.warn('test warning'); + + expect(mockTransport.sentEntries).toHaveLength(2); + }); + + it('should handle undefined filters', () => { + const filteredLogger = new DebugLogger(mockTransport, 'debug', undefined); + + filteredLogger.info('test message'); + filteredLogger.warn('test warning'); + + expect(mockTransport.sentEntries).toHaveLength(2); + }); + }); + + describe('edge cases', () => { + it('should handle empty string patterns', () => { + const filters: DebugLogFilter[] = [{ includePatterns: [''] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('any message'); + + expect(mockTransport.sentEntries).toHaveLength(1); + }); + + it('should handle empty RegExp patterns', () => { + const filters: DebugLogFilter[] = [{ includePatterns: [/.*/] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('any message'); + + expect(mockTransport.sentEntries).toHaveLength(1); + }); + + it('should handle special RegExp characters in string patterns', () => { + const filters: DebugLogFilter[] = [{ includePatterns: ['user.*login'] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('user.*login attempt'); + filteredLogger.info('user login attempt'); + + expect(mockTransport.sentEntries).toHaveLength(1); + expect(mockTransport.sentEntries[0].message).toBe('user.*login attempt'); + }); + + it('should handle case-sensitive RegExp patterns', () => { + const filters: DebugLogFilter[] = [{ includePatterns: [/ERROR/] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('Database ERROR'); + filteredLogger.info('Database error'); + + expect(mockTransport.sentEntries).toHaveLength(1); + expect(mockTransport.sentEntries[0].message).toBe('Database ERROR'); + }); + + it('should handle multiple include and exclude patterns', () => { + const filters: DebugLogFilter[] = [{ includePatterns: ['error', 'failed'], excludePatterns: ['debug'] }]; + const filteredLogger = new DebugLogger(mockTransport, 'debug', filters); + + filteredLogger.info('Login failed'); + filteredLogger.info('Database error debug info'); + filteredLogger.info('Operation completed successfully'); + + expect(mockTransport.sentEntries).toHaveLength(1); + expect(mockTransport.sentEntries[0].message).toBe('Login failed'); + }); + }); + }); + + describe('log entry structure', () => { + it('should generate proper log entry structure', () => { + const context = { userId: '123' }; + const source = 'test-module'; + + logger.info('test message', context, source); + + expect(mockTransport.sentEntries).toHaveLength(1); + const entry = mockTransport.sentEntries[0]; + + expect(entry).toHaveProperty('id'); + expect(entry).toHaveProperty('timestamp'); + expect(entry).toHaveProperty('level'); + expect(entry).toHaveProperty('message'); + expect(entry).toHaveProperty('context'); + expect(entry).toHaveProperty('source'); + + expect(typeof entry.id).toBe('string'); + expect(typeof entry.timestamp).toBe('number'); + expect(entry.level).toBe('info'); + expect(entry.message).toBe('test message'); + expect(entry.context).toEqual(context); + expect(entry.source).toBe(source); + }); + + it('should generate unique IDs for each log entry', () => { + logger.info('message 1'); + logger.info('message 2'); + + expect(mockTransport.sentEntries).toHaveLength(2); + expect(mockTransport.sentEntries[0].id).not.toBe(mockTransport.sentEntries[1].id); + }); + + it('should use current timestamp for log entries', () => { + const before = Date.now(); + logger.info('test message'); + const after = Date.now(); + + expect(mockTransport.sentEntries).toHaveLength(1); + const timestamp = mockTransport.sentEntries[0].timestamp; + expect(timestamp).toBeGreaterThanOrEqual(before); + expect(timestamp).toBeLessThanOrEqual(after); + }); + }); +}); diff --git a/packages/clerk-js/src/core/modules/debug/index.ts b/packages/clerk-js/src/core/modules/debug/index.ts new file mode 100644 index 00000000000..778aacfcf41 --- /dev/null +++ b/packages/clerk-js/src/core/modules/debug/index.ts @@ -0,0 +1,259 @@ +import type { TelemetryCollector } from '@clerk/shared/telemetry'; + +import { DebugLogger } from './logger'; +import { CompositeTransport } from './transports/composite'; +import { ConsoleTransport } from './transports/console'; +import { TelemetryTransport } from './transports/telemetry'; +import type { DebugLogFilter, DebugLogLevel } from './types'; + +const DEFAULT_LOG_LEVEL: DebugLogLevel = 'info'; + +/** + * Validates logger options + */ +function validateLoggerOptions(options: T): void { + if (options.logLevel && typeof options.logLevel !== 'string') { + throw new Error('logLevel must be a string'); + } +} + +/** + * Options for configuring the debug logger + */ +export interface LoggerOptions { + endpoint?: string; + filters?: DebugLogFilter[]; + logLevel?: DebugLogLevel; + telemetryCollector?: TelemetryCollector; +} + +/** + * Options for console-only logger configuration + */ +export interface ConsoleLoggerOptions { + filters?: DebugLogFilter[]; + logLevel?: DebugLogLevel; +} + +/** + * Options for telemetry logger configuration + */ +export interface TelemetryLoggerOptions { + endpoint?: string; + logLevel?: DebugLogLevel; + filters?: DebugLogFilter[]; + telemetryCollector?: TelemetryCollector; +} + +/** + * Options for composite logger configuration + */ +export interface CompositeLoggerOptions { + transports: Array<{ transport: ConsoleTransport | TelemetryTransport }>; + logLevel?: DebugLogLevel; + filters?: DebugLogFilter[]; +} + +/** + * Singleton manager for the debug logger + */ +class DebugLoggerManager { + private static instance: DebugLoggerManager; + private initialized = false; + private logger: DebugLogger | null = null; + private initializationPromise: Promise | null = null; + + private constructor() {} + + /** + * Gets the singleton instance + */ + static getInstance(): DebugLoggerManager { + if (!DebugLoggerManager.instance) { + DebugLoggerManager.instance = new DebugLoggerManager(); + } + return DebugLoggerManager.instance; + } + + /** + * Initializes the debug logger with the given options + * @param options - Configuration options for the logger + * @returns Promise resolving to the debug logger instance + */ + async initialize(options: LoggerOptions = {}): Promise { + if (this.initialized) { + return this.logger; + } + + if (this.initializationPromise) { + return this.initializationPromise; + } + + this.initializationPromise = this.performInitialization(options); + return this.initializationPromise; + } + + /** + * Performs the actual initialization logic + * @param options - Configuration options for the logger + * @returns Promise resolving to the debug logger instance + */ + private async performInitialization(options: LoggerOptions): Promise { + try { + validateLoggerOptions(options); + const { logLevel, filters, telemetryCollector } = options; + const finalLogLevel = logLevel ?? DEFAULT_LOG_LEVEL; + + const transports = [ + { transport: new ConsoleTransport() }, + { transport: new TelemetryTransport(telemetryCollector) }, + ]; + + const transportInstances = transports.map(t => t.transport); + const compositeTransport = new CompositeTransport(transportInstances); + const logger = new DebugLogger(compositeTransport, finalLogLevel, filters); + + this.logger = logger; + this.initialized = true; + return this.logger; + } catch (error) { + console.error('Failed to initialize debug module:', error); + this.initializationPromise = null; + return null; + } + } + + /** + * Gets the current logger instance + */ + getLogger(): DebugLogger | null { + return this.logger; + } + + /** + * Checks if the debug logger is initialized + */ + isInitialized(): boolean { + return this.initialized; + } + + /** + * Resets the initialization state (for testing purposes) + */ + reset(): void { + this.initialized = false; + this.logger = null; + this.initializationPromise = null; + } +} + +/** + * Gets or initializes the debug logger + * @param options - Configuration options for the logger + * @returns Promise resolving to the debug logger instance + */ +export async function getDebugLogger(options: LoggerOptions = {}): Promise { + const manager = DebugLoggerManager.getInstance(); + return manager.initialize(options); +} + +/** + * Creates a composite logger with both console and telemetry transports + * @param options - Configuration options for the logger + * @returns Object containing the logger and composite transport + */ +export function createLogger(options: { + endpoint?: string; + logLevel?: DebugLogLevel; + filters?: DebugLogFilter[]; + telemetryCollector?: TelemetryCollector; +}): { logger: DebugLogger; transport: CompositeTransport } | null { + try { + validateLoggerOptions(options); + const { logLevel, filters, telemetryCollector } = options; + const finalLogLevel = logLevel ?? DEFAULT_LOG_LEVEL; + + return createCompositeLogger({ + transports: [{ transport: new ConsoleTransport() }, { transport: new TelemetryTransport(telemetryCollector) }], + logLevel: finalLogLevel, + filters, + }); + } catch (error) { + console.error('Failed to create logger:', error); + return null; + } +} + +/** + * Creates a console-only logger + * @param options - Configuration options for the console logger + * @returns Object containing the logger and console transport + */ +export function createConsoleLogger( + options: ConsoleLoggerOptions, +): { logger: DebugLogger; transport: ConsoleTransport } | null { + try { + validateLoggerOptions(options); + const { logLevel, filters } = options; + const finalLogLevel = logLevel ?? DEFAULT_LOG_LEVEL; + const transport = new ConsoleTransport(); + const logger = new DebugLogger(transport, finalLogLevel, filters); + return { logger, transport }; + } catch (error) { + console.error('Failed to create console logger:', error); + return null; + } +} + +/** + * Creates a telemetry-only logger + * @param options - Configuration options for the telemetry logger + * @returns Object containing the logger and telemetry transport + */ +export function createTelemetryLogger( + options: TelemetryLoggerOptions, +): { logger: DebugLogger; transport: TelemetryTransport } | null { + try { + validateLoggerOptions(options); + const { logLevel, filters, telemetryCollector } = options; + const finalLogLevel = logLevel ?? DEFAULT_LOG_LEVEL; + const transport = new TelemetryTransport(telemetryCollector); + const logger = new DebugLogger(transport, finalLogLevel, filters); + return { logger, transport }; + } catch (error) { + console.error('Failed to create telemetry logger:', error); + return null; + } +} + +/** + * Creates a composite logger with multiple transports + * @param options - Configuration options for the composite logger + * @returns Object containing the logger and composite transport + */ +export function createCompositeLogger( + options: CompositeLoggerOptions, +): { logger: DebugLogger; transport: CompositeTransport } | null { + try { + validateLoggerOptions(options); + const { transports, logLevel, filters } = options; + const finalLogLevel = logLevel ?? DEFAULT_LOG_LEVEL; + + const transportInstances = transports.map(t => t.transport); + const compositeTransport = new CompositeTransport(transportInstances); + const logger = new DebugLogger(compositeTransport, finalLogLevel, filters); + + return { logger, transport: compositeTransport }; + } catch (error) { + console.error('Failed to create composite logger:', error); + return null; + } +} + +/** + * Internal function to reset the debug logger (for testing purposes) + */ +export function __internal_resetDebugLogger(): void { + const manager = DebugLoggerManager.getInstance(); + manager.reset(); +} diff --git a/packages/clerk-js/src/core/modules/debug/logger.ts b/packages/clerk-js/src/core/modules/debug/logger.ts new file mode 100644 index 00000000000..ad91d3a157b --- /dev/null +++ b/packages/clerk-js/src/core/modules/debug/logger.ts @@ -0,0 +1,139 @@ +import type { DebugLogEntry, DebugLogFilter, DebugLogLevel, DebugTransport } from './types'; + +/** + * Default log level for debug logging + */ +const DEFAULT_LOG_LEVEL: DebugLogLevel = 'debug'; + +/** + * Minimal debug logger interface for engineers + */ +export class DebugLogger { + private readonly transport: DebugTransport; + private readonly logLevel: DebugLogLevel; + private readonly filters?: DebugLogFilter[]; + + constructor(transport: DebugTransport, logLevel?: DebugLogLevel, filters?: DebugLogFilter[]) { + this.transport = transport; + this.logLevel = logLevel ?? DEFAULT_LOG_LEVEL; + this.filters = filters; + } + + error(message: string, context?: Record, source?: string): void { + this.log('error', message, context, source); + } + + warn(message: string, context?: Record, source?: string): void { + this.log('warn', message, context, source); + } + + info(message: string, context?: Record, source?: string): void { + this.log('info', message, context, source); + } + + debug(message: string, context?: Record, source?: string): void { + this.log('debug', message, context, source); + } + + trace(message: string, context?: Record, source?: string): void { + this.log('trace', message, context, source); + } + + private log(level: DebugLogLevel, message: string, context?: Record, source?: string): void { + if (!this.shouldLogLevel(level)) { + return; + } + + if (!this.shouldLogFilters(level, message, source)) { + return; + } + + const entry: DebugLogEntry = { + id: crypto.randomUUID(), + timestamp: Date.now(), + level, + message, + context, + source, + }; + + this.transport.send(entry).catch(err => { + console.error('Failed to send log entry:', err); + }); + } + + private shouldLogLevel(level: DebugLogLevel): boolean { + const levels: DebugLogLevel[] = ['error', 'warn', 'info', 'debug', 'trace']; + const currentLevelIndex = levels.indexOf(this.logLevel); + const messageLevelIndex = levels.indexOf(level); + return messageLevelIndex <= currentLevelIndex; + } + + private shouldLogFilters(level: DebugLogLevel, message: string, source?: string): boolean { + if (!this.filters || this.filters.length === 0) { + return true; + } + + return this.filters.every(filter => { + if (filter.level && filter.level !== level) { + return false; + } + + if (filter.source && !this.matchesSource(filter.source, source)) { + return false; + } + + if ( + filter.includePatterns && + filter.includePatterns.length > 0 && + !this.shouldInclude(message, filter.includePatterns) + ) { + return false; + } + + if ( + filter.excludePatterns && + filter.excludePatterns.length > 0 && + this.shouldExclude(message, filter.excludePatterns) + ) { + return false; + } + + return true; + }); + } + + /** + * Checks if a source matches the given pattern (string or RegExp) + */ + private matchesSource(pattern: string | RegExp, source?: string): boolean { + if (typeof pattern === 'string') { + return source === pattern; + } + return source !== undefined && pattern.test(source); + } + + /** + * Checks if a message should be included based on the given patterns + */ + private shouldInclude(message: string, patterns: (string | RegExp)[]): boolean { + return patterns.some(pattern => this.matchesPattern(message, pattern)); + } + + /** + * Checks if a message should be excluded based on the given patterns + */ + private shouldExclude(message: string, patterns: (string | RegExp)[]): boolean { + return patterns.some(pattern => this.matchesPattern(message, pattern)); + } + + /** + * Checks if a message matches a given pattern (string or RegExp) + */ + private matchesPattern(message: string, pattern: string | RegExp): boolean { + if (typeof pattern === 'string') { + return message.includes(pattern); + } + return pattern.test(message); + } +} diff --git a/packages/clerk-js/src/core/modules/debug/transports/__tests__/telemetry.test.ts b/packages/clerk-js/src/core/modules/debug/transports/__tests__/telemetry.test.ts new file mode 100644 index 00000000000..89e6ed07f91 --- /dev/null +++ b/packages/clerk-js/src/core/modules/debug/transports/__tests__/telemetry.test.ts @@ -0,0 +1,61 @@ +import type { TelemetryCollector } from '@clerk/shared/telemetry'; + +import type { DebugLogEntry } from '../../types'; +import { TelemetryTransport } from '../telemetry'; + +describe('TelemetryTransport', () => { + let mockCollector: jest.Mocked; + let transport: TelemetryTransport; + + beforeEach(() => { + mockCollector = { + recordLog: jest.fn(), + record: jest.fn(), + isEnabled: true, + isDebug: false, + } as jest.Mocked; + + transport = new TelemetryTransport(mockCollector); + }); + + it('should send debug log entries to the telemetry collector', async () => { + const logEntry: DebugLogEntry = { + id: 'test-id', + level: 'info', + message: 'Test message', + timestamp: Date.now(), + context: { test: 'value' }, + source: 'test', + userId: 'user-123', + sessionId: 'session-456', + organizationId: 'org-789', + }; + + await transport.send(logEntry); + + expect(mockCollector.recordLog).toHaveBeenCalledWith({ + id: 'test-id', + level: 'info', + message: 'Test message', + timestamp: logEntry.timestamp, + context: { test: 'value' }, + source: 'test', + userId: 'user-123', + sessionId: 'session-456', + organizationId: 'org-789', + }); + }); + + it('should handle missing telemetry collector gracefully', async () => { + const transportWithoutCollector = new TelemetryTransport(); + const logEntry: DebugLogEntry = { + id: 'test-id', + level: 'info', + message: 'Test message', + timestamp: Date.now(), + }; + + // Should not throw when no collector is provided + await expect(transportWithoutCollector.send(logEntry)).resolves.toBeUndefined(); + }); +}); diff --git a/packages/clerk-js/src/core/modules/debug/transports/composite.ts b/packages/clerk-js/src/core/modules/debug/transports/composite.ts new file mode 100644 index 00000000000..943c417f058 --- /dev/null +++ b/packages/clerk-js/src/core/modules/debug/transports/composite.ts @@ -0,0 +1,27 @@ +import type { DebugLogEntry, DebugLogFilter, DebugTransport } from '../types'; + +export interface CompositeLoggerOptions { + transports: Array<{ + transport: DebugTransport; + options?: Record; + }>; + logLevel?: 'error' | 'warn' | 'info' | 'debug' | 'trace'; + filters?: DebugLogFilter[]; +} + +export class CompositeTransport implements DebugTransport { + private readonly transports: DebugTransport[]; + + constructor(transports: DebugTransport[]) { + this.transports = transports; + } + + async send(entry: DebugLogEntry): Promise { + const promises = this.transports.map(transport => + transport.send(entry).catch(err => { + console.error('Failed to send to transport:', err); + }), + ); + await Promise.allSettled(promises); + } +} diff --git a/packages/clerk-js/src/core/modules/debug/transports/console.ts b/packages/clerk-js/src/core/modules/debug/transports/console.ts new file mode 100644 index 00000000000..db847ded80c --- /dev/null +++ b/packages/clerk-js/src/core/modules/debug/transports/console.ts @@ -0,0 +1,69 @@ +import type { DebugLogEntry, DebugTransport } from '../types'; + +/** + * ANSI color codes for console output + */ +const colors = { + reset: '\x1b[0m', + bright: '\x1b[1m', + dim: '\x1b[2m', + red: '\x1b[31m', + green: '\x1b[32m', + yellow: '\x1b[33m', + blue: '\x1b[34m', + magenta: '\x1b[35m', + cyan: '\x1b[36m', + white: '\x1b[37m', + gray: '\x1b[90m', +} as const; + +/** + * Color mapping for different log levels + */ +const levelColors = { + error: colors.red, + warn: colors.yellow, + info: colors.blue, + debug: colors.green, + trace: colors.magenta, +} as const; + +export class ConsoleTransport implements DebugTransport { + async send(entry: DebugLogEntry): Promise { + const timestamp = new Date(entry.timestamp).toISOString(); + const level = entry.level.toUpperCase(); + const source = entry.source ? `[${entry.source}]` : ''; + const context = entry.context ? ` ${JSON.stringify(entry.context)}` : ''; + + const levelColor = levelColors[entry.level] || colors.white; + + const prefix = `${colors.bright}${colors.cyan}[Clerk Debug]${colors.reset}`; + const timestampColored = `${colors.dim}${timestamp}${colors.reset}`; + const levelColored = `${levelColor}${level}${colors.reset}`; + const sourceColored = source ? `${colors.gray}${source}${colors.reset}` : ''; + const messageColored = `${colors.white}${entry.message}${colors.reset}`; + const contextColored = context ? `${colors.dim}${context}${colors.reset}` : ''; + + const message = `${prefix} ${timestampColored} ${levelColored}${sourceColored}: ${messageColored}${contextColored}`; + + switch (entry.level) { + case 'error': + console.error(message); + break; + case 'warn': + console.warn(message); + break; + case 'info': + console.info(message); + break; + case 'debug': + console.debug(message); + break; + case 'trace': + console.trace(message); + break; + default: + console.log(message); + } + } +} diff --git a/packages/clerk-js/src/core/modules/debug/transports/telemetry.ts b/packages/clerk-js/src/core/modules/debug/transports/telemetry.ts new file mode 100644 index 00000000000..f968346c444 --- /dev/null +++ b/packages/clerk-js/src/core/modules/debug/transports/telemetry.ts @@ -0,0 +1,35 @@ +import type { TelemetryCollector } from '@clerk/shared/telemetry'; + +import type { DebugLogEntry, DebugLogFilter, DebugLogLevel, DebugTransport } from '../types'; + +export interface TelemetryLoggerOptions { + endpoint?: string; + logLevel?: DebugLogLevel; + filters?: DebugLogFilter[]; +} + +export class TelemetryTransport implements DebugTransport { + private readonly collector?: TelemetryCollector; + + constructor(collector?: TelemetryCollector) { + this.collector = collector; + } + + async send(entry: DebugLogEntry): Promise { + if (!this.collector) { + return; + } + + this.collector.recordLog({ + context: entry.context, + id: entry.id, + level: entry.level, + message: entry.message, + organizationId: entry.organizationId, + sessionId: entry.sessionId, + source: entry.source, + timestamp: entry.timestamp, + userId: entry.userId, + }); + } +} diff --git a/packages/clerk-js/src/core/modules/debug/types.ts b/packages/clerk-js/src/core/modules/debug/types.ts new file mode 100644 index 00000000000..3260b625a74 --- /dev/null +++ b/packages/clerk-js/src/core/modules/debug/types.ts @@ -0,0 +1,155 @@ +/** + * Debug logging levels for different types of information + */ +export type DebugLogLevel = 'error' | 'warn' | 'info' | 'debug' | 'trace'; + +/** + * Valid debug log levels + */ +export const VALID_LOG_LEVELS: readonly DebugLogLevel[] = ['error', 'warn', 'info', 'debug', 'trace'] as const; + +/** + * Debug event types that can be tracked + */ +export type DebugEventType = 'navigation' | 'custom_event'; + +/** + * Base interface for all debug log entries + */ +export interface DebugLogEntry { + readonly context?: Record; + readonly id: string; + readonly level: DebugLogLevel; + readonly message: string; + readonly organizationId?: string; + readonly sessionId?: string; + readonly source?: string; + readonly timestamp: number; + readonly userId?: string; +} + +/** + * Debug data structure for sending debug information to endpoints + */ +export interface DebugData { + readonly error?: ErrorDetails; + readonly eventId: string; + readonly eventType: DebugEventType; + readonly metadata?: Record; + readonly organizationId?: string; + readonly sessionId?: string; + readonly timestamp: number; + readonly userId?: string; +} + +/** + * Transport interface for sending debug log entries to different destinations + */ +export interface DebugTransport { + /** + * Send a single debug log entry + */ + send(entry: DebugLogEntry): Promise; +} + +/** + * Error details for debugging purposes + */ +export interface ErrorDetails { + readonly cause?: unknown; + readonly code?: string | number; + readonly columnNumber?: number; + readonly lineNumber?: number; + readonly message: string; + readonly name: string; + readonly stack?: string; + readonly url?: string; +} + +/** + * Configuration options for the debug logger + */ +export interface DebugLoggerConfig { + readonly bufferSize: number; + readonly filters?: DebugLogFilter[]; + readonly flushInterval: number; + readonly logLevel: DebugLogLevel; + readonly maxLogEntries: number; + readonly transport?: DebugTransport; +} + +/** + * Filter configuration for debug logs + */ +export interface DebugLogFilter { + readonly excludePatterns?: (string | RegExp)[]; + readonly includePatterns?: (string | RegExp)[]; + readonly level?: DebugLogLevel; + readonly sessionId?: string; + readonly source?: string | RegExp; + readonly userId?: string; +} + +/** + * Validates if a value is a valid debug log level + */ +export function isValidLogLevel(level: unknown): level is DebugLogLevel { + return typeof level === 'string' && VALID_LOG_LEVELS.includes(level as DebugLogLevel); +} + +/** + * Type guard for checking if an object is a DebugLogEntry + */ +export function isDebugLogEntry(obj: unknown): obj is DebugLogEntry { + return ( + typeof obj === 'object' && + obj !== null && + 'id' in obj && + 'timestamp' in obj && + 'level' in obj && + 'message' in obj && + typeof (obj as DebugLogEntry).id === 'string' && + typeof (obj as DebugLogEntry).timestamp === 'number' && + typeof (obj as DebugLogEntry).level === 'string' && + isValidLogLevel((obj as DebugLogEntry).level) && + typeof (obj as DebugLogEntry).message === 'string' + ); +} + +/** + * Type guard for checking if an object is DebugData + */ +export function isDebugData(obj: unknown): obj is DebugData { + const validEventTypes: DebugEventType[] = ['navigation', 'custom_event']; + + return ( + typeof obj === 'object' && + obj !== null && + 'eventType' in obj && + 'eventId' in obj && + 'timestamp' in obj && + typeof (obj as DebugData).eventType === 'string' && + validEventTypes.includes((obj as DebugData).eventType) && + typeof (obj as DebugData).eventId === 'string' && + typeof (obj as DebugData).timestamp === 'number' + ); +} + +/** + * Utility type for creating partial debug logger configurations + */ +export type PartialDebugLoggerConfig = Partial; + +/** + * Utility type for creating debug log entries without readonly constraint + */ +export type MutableDebugLogEntry = { + -readonly [K in keyof DebugLogEntry]: DebugLogEntry[K]; +}; + +/** + * Utility type for creating debug data without readonly constraint + */ +export type MutableDebugData = { + -readonly [K in keyof DebugData]: DebugData[K]; +}; diff --git a/packages/clerk-js/src/core/resources/Environment.ts b/packages/clerk-js/src/core/resources/Environment.ts index eadb9a9f698..8ea58a5f0b5 100644 --- a/packages/clerk-js/src/core/resources/Environment.ts +++ b/packages/clerk-js/src/core/resources/Environment.ts @@ -20,6 +20,7 @@ export class Environment extends BaseResource implements EnvironmentResource { authConfig: AuthConfigResource = new AuthConfig(); displayConfig: DisplayConfigResource = new DisplayConfig(); maintenanceMode: boolean = false; + clientDebugMode: boolean = false; pathRoot = '/environment'; userSettings: UserSettingsResource = new UserSettings(); organizationSettings: OrganizationSettingsResource = new OrganizationSettings(); @@ -48,6 +49,7 @@ export class Environment extends BaseResource implements EnvironmentResource { this.authConfig = new AuthConfig(data.auth_config); this.displayConfig = new DisplayConfig(data.display_config); this.maintenanceMode = this.withDefault(data.maintenance_mode, this.maintenanceMode); + this.clientDebugMode = this.withDefault(data.client_debug_mode, this.clientDebugMode); this.organizationSettings = new OrganizationSettings(data.organization_settings); this.userSettings = new UserSettings(data.user_settings); this.commerceSettings = new CommerceSettings(data.commerce_settings); @@ -88,6 +90,7 @@ export class Environment extends BaseResource implements EnvironmentResource { display_config: this.displayConfig.__internal_toSnapshot(), id: this.id ?? '', maintenance_mode: this.maintenanceMode, + client_debug_mode: this.clientDebugMode, organization_settings: this.organizationSettings.__internal_toSnapshot(), user_settings: this.userSettings.__internal_toSnapshot(), commerce_settings: this.commerceSettings.__internal_toSnapshot(), diff --git a/packages/shared/src/__tests__/telemetry.logs.test.ts b/packages/shared/src/__tests__/telemetry.logs.test.ts new file mode 100644 index 00000000000..6d8351f1618 --- /dev/null +++ b/packages/shared/src/__tests__/telemetry.logs.test.ts @@ -0,0 +1,164 @@ +import 'cross-fetch/polyfill'; + +import { TelemetryCollector } from '../telemetry'; + +jest.useFakeTimers(); + +const TEST_PK = 'pk_test_Zm9vLWJhci0xMy5jbGVyay5hY2NvdW50cy5kZXYk'; + +describe('TelemetryCollector.recordLog', () => { + let fetchSpy: jest.SpyInstance; + let windowSpy: jest.SpyInstance; + + beforeEach(() => { + fetchSpy = jest.spyOn(global, 'fetch'); + windowSpy = jest.spyOn(window, 'window', 'get'); + }); + + afterEach(() => { + windowSpy.mockRestore(); + fetchSpy.mockRestore(); + }); + + test('sends a valid log with normalized timestamp and sanitized context', () => { + const collector = new TelemetryCollector({ publishableKey: TEST_PK }); + + const ts = Date.now(); + collector.recordLog({ + id: 'abc123', + level: 'info', + message: 'Hello world', + timestamp: ts, + context: { a: 1, b: undefined, c: () => {} }, + } as any); + + jest.runAllTimers(); + + expect(fetchSpy).toHaveBeenCalled(); + const [url, init] = fetchSpy.mock.calls[0]; + expect(String(url)).toMatch('/v1/logs'); + + const initOptions = init as RequestInit; + expect(typeof initOptions.body).toBe('string'); + const body = JSON.parse(initOptions.body as string); + expect(Array.isArray(body)).toBe(true); + expect(body).toHaveLength(1); + + const log = body[0]; + expect(log.lvl).toBe('info'); + expect(log.msg).toBe('Hello world'); + expect(log.iid).toBe('abc123'); + expect(log.ts).toBe(new Date(ts).toISOString()); + expect(log.pk).toBe(TEST_PK); + // Function and undefined stripped out + expect(log.payload).toEqual({ a: 1 }); + }); + + test('nullifies context when missing, non-object, array, or circular', () => { + const collector = new TelemetryCollector({ publishableKey: TEST_PK }); + + const base = { + id: 'id1', + level: 'error' as const, + message: 'msg', + timestamp: Date.now(), + }; + + // undefined context + fetchSpy.mockClear(); + collector.recordLog({ ...base, context: undefined } as any); + jest.runAllTimers(); + const initOptions1 = fetchSpy.mock.calls[0][1] as RequestInit; + expect(typeof initOptions1.body).toBe('string'); + let body = JSON.parse(initOptions1.body as string); + expect(body[0].payload).toBeNull(); + + // array context + fetchSpy.mockClear(); + collector.recordLog({ ...base, context: [1, 2, 3] } as any); + jest.runAllTimers(); + const initOptions2 = fetchSpy.mock.calls[0][1] as RequestInit; + expect(typeof initOptions2.body).toBe('string'); + body = JSON.parse(initOptions2.body as string); + expect(body[0].payload).toBeNull(); + + // circular context + fetchSpy.mockClear(); + const circular: any = { foo: 'bar' }; + circular.self = circular; + collector.recordLog({ ...base, context: circular } as any); + jest.runAllTimers(); + const initOptions3 = fetchSpy.mock.calls[0][1] as RequestInit; + expect(typeof initOptions3.body).toBe('string'); + body = JSON.parse(initOptions3.body as string); + expect(body[0].payload).toBeNull(); + }); + + test('drops invalid entries: missing id, invalid level, empty message, invalid timestamp', () => { + const collector = new TelemetryCollector({ publishableKey: TEST_PK }); + + // missing id + fetchSpy.mockClear(); + collector.recordLog({ + id: '' as unknown as string, // force invalid at runtime + level: 'info', + message: 'ok', + timestamp: Date.now(), + } as any); + jest.runAllTimers(); + expect(fetchSpy).not.toHaveBeenCalled(); + + // invalid level + fetchSpy.mockClear(); + collector.recordLog({ + id: 'id', + level: 'fatal' as unknown as any, + message: 'ok', + timestamp: Date.now(), + } as any); + jest.runAllTimers(); + expect(fetchSpy).not.toHaveBeenCalled(); + + // empty message + fetchSpy.mockClear(); + collector.recordLog({ + id: 'id', + level: 'debug', + message: '', + timestamp: Date.now(), + }); + jest.runAllTimers(); + expect(fetchSpy).not.toHaveBeenCalled(); + + // invalid timestamp (NaN) + fetchSpy.mockClear(); + collector.recordLog({ + id: 'id', + level: 'warn', + message: 'ok', + timestamp: Number.NaN, + }); + jest.runAllTimers(); + expect(fetchSpy).not.toHaveBeenCalled(); + }); + + test('accepts parsable timestamp strings', () => { + const collector = new TelemetryCollector({ publishableKey: TEST_PK }); + const tsString = new Date().toISOString(); + + collector.recordLog({ + id: 'abc', + level: 'trace', + message: 'ts string', + // @ts-expect-error testing runtime acceptance of string timestamps + timestamp: tsString, + }); + + jest.runAllTimers(); + expect(fetchSpy).toHaveBeenCalled(); + const initOptions4 = fetchSpy.mock.calls[0][1] as RequestInit; + expect(typeof initOptions4.body).toBe('string'); + const body = JSON.parse(initOptions4.body as string); + expect(body[0].ts).toBe(new Date(tsString).toISOString()); + }); +}); diff --git a/packages/shared/src/telemetry/collector.ts b/packages/shared/src/telemetry/collector.ts index ae9944e36bc..c260ba5909c 100644 --- a/packages/shared/src/telemetry/collector.ts +++ b/packages/shared/src/telemetry/collector.ts @@ -16,6 +16,7 @@ import type { TelemetryCollector as TelemetryCollectorInterface, TelemetryEvent, TelemetryEventRaw, + TelemetryLogEntry, } from '@clerk/types'; import { parsePublishableKey } from '../keys'; @@ -60,6 +61,33 @@ type TelemetryMetadata = Required< instanceType: InstanceType; }; +/** + * Structure of log data sent to the telemetry endpoint. + */ +type TelemetryLogData = { + /** Service that generated the log. */ + sdk: string; + /** The version of the SDK where the event originated from. */ + sdkv: string; + /** The version of Clerk where the event originated from. */ + cv: string; + /** Log level (info, warn, error, debug, etc.). */ + lvl: string; + /** Log message. */ + msg: string; + /** Instance ID. */ + iid: string; + /** Timestamp when log was generated. */ + ts: string; + /** Primary key. */ + pk: string | null; + /** Additional payload for the log. */ + payload: Record | null; +}; + +// Accepted log levels for runtime validation +const VALID_LOG_LEVELS = new Set(['error', 'warn', 'info', 'debug', 'trace']); + const DEFAULT_CONFIG: Partial = { samplingRate: 1, maxBufferSize: 5, @@ -74,7 +102,9 @@ export class TelemetryCollector implements TelemetryCollectorInterface { #eventThrottler: TelemetryEventThrottler; #metadata: TelemetryMetadata = {} as TelemetryMetadata; #buffer: TelemetryEvent[] = []; - #pendingFlush: any; + #logBuffer: TelemetryLogData[] = []; + #pendingFlush: number | ReturnType | null = null; + #pendingLogFlush: number | ReturnType | null = null; constructor(options: TelemetryCollectorOptions) { this.#config = { @@ -159,10 +189,68 @@ export class TelemetryCollector implements TelemetryCollectorInterface { this.#scheduleFlush(); } + /** + * Records a telemetry log entry if logging is enabled and not in debug mode. + * + * @param entry - The telemetry log entry to record. + */ + recordLog(entry: TelemetryLogEntry): void { + if (!this.#shouldRecordLog(entry)) { + return; + } + + const idIsValid = typeof entry?.id === 'string' && entry.id.trim().length > 0; + const levelIsValid = typeof entry?.level === 'string' && VALID_LOG_LEVELS.has(entry.level); + const messageIsValid = typeof entry?.message === 'string' && entry.message.trim().length > 0; + + let normalizedTimestamp: Date | null = null; + const timestampInput: unknown = (entry as unknown as { timestamp?: unknown })?.timestamp; + if (typeof timestampInput === 'number' || typeof timestampInput === 'string') { + const candidate = new Date(timestampInput); + if (!Number.isNaN(candidate.getTime())) { + normalizedTimestamp = candidate; + } + } + + if (!idIsValid || !levelIsValid || !messageIsValid || normalizedTimestamp === null) { + if (this.isDebug && typeof console !== 'undefined') { + console.warn('[clerk/telemetry] Dropping invalid telemetry log entry', { + idIsValid, + levelIsValid, + messageIsValid, + timestampIsValid: normalizedTimestamp !== null, + }); + } + return; + } + + const sdkMetadata = this.#getSDKMetadata(); + + const logData: TelemetryLogData = { + sdk: sdkMetadata.name, + sdkv: sdkMetadata.version, + cv: this.#metadata.clerkVersion ?? '', + lvl: entry.level, + msg: entry.message, + iid: entry.id, + ts: normalizedTimestamp.toISOString(), + pk: this.#metadata.publishableKey || null, + payload: this.#sanitizeContext(entry.context), + }; + + this.#logBuffer.push(logData); + + this.#scheduleLogFlush(); + } + #shouldRecord(preparedPayload: TelemetryEvent, eventSamplingRate?: number) { return this.isEnabled && !this.isDebug && this.#shouldBeSampled(preparedPayload, eventSamplingRate); } + #shouldRecordLog(_entry: TelemetryLogEntry): boolean { + return this.isEnabled && !this.isDebug; + } + #shouldBeSampled(preparedPayload: TelemetryEvent, eventSamplingRate?: number) { const randomSeed = Math.random(); @@ -191,8 +279,11 @@ export class TelemetryCollector implements TelemetryCollectorInterface { // If the buffer is full, flush immediately to make sure we minimize the chance of event loss. // Cancel any pending flushes as we're going to flush immediately if (this.#pendingFlush) { - const cancel = typeof cancelIdleCallback !== 'undefined' ? cancelIdleCallback : clearTimeout; - cancel(this.#pendingFlush); + if (typeof cancelIdleCallback !== 'undefined') { + cancelIdleCallback(Number(this.#pendingFlush)); + } else { + clearTimeout(Number(this.#pendingFlush)); + } } this.#flush(); return; @@ -206,11 +297,49 @@ export class TelemetryCollector implements TelemetryCollectorInterface { if ('requestIdleCallback' in window) { this.#pendingFlush = requestIdleCallback(() => { this.#flush(); + this.#pendingFlush = null; }); } else { // This is not an ideal solution, but it at least waits until the next tick this.#pendingFlush = setTimeout(() => { this.#flush(); + this.#pendingFlush = null; + }, 0); + } + } + + #scheduleLogFlush(): void { + if (typeof window === 'undefined') { + this.#flushLogs(); + return; + } + + const isBufferFull = this.#logBuffer.length >= this.#config.maxBufferSize; + if (isBufferFull) { + if (this.#pendingLogFlush) { + if (typeof cancelIdleCallback !== 'undefined') { + cancelIdleCallback(Number(this.#pendingLogFlush)); + } else { + clearTimeout(Number(this.#pendingLogFlush)); + } + } + this.#flushLogs(); + return; + } + + if (this.#pendingLogFlush) { + return; + } + + if ('requestIdleCallback' in window) { + this.#pendingLogFlush = requestIdleCallback(() => { + this.#flushLogs(); + this.#pendingLogFlush = null; + }); + } else { + this.#pendingLogFlush = setTimeout(() => { + this.#flushLogs(); + this.#pendingLogFlush = null; }, 0); } } @@ -239,6 +368,27 @@ export class TelemetryCollector implements TelemetryCollectorInterface { }).catch(() => void 0); } + #flushLogs(): void { + // Capture the current buffer and clear it immediately to avoid closure references + const entriesToSend = [...this.#logBuffer]; + this.#logBuffer = []; + + this.#pendingLogFlush = null; + + if (entriesToSend.length === 0) { + return; + } + + fetch(new URL('/v1/logs', this.#config.endpoint), { + method: 'POST', + body: JSON.stringify(entriesToSend), + keepalive: true, + headers: { + 'Content-Type': 'application/json', + }, + }).catch(() => void 0); + } + /** * If running in debug mode, log the event and its payload to the console. */ @@ -276,7 +426,6 @@ export class TelemetryCollector implements TelemetryCollectorInterface { if (isWindowClerkWithMetadata(windowClerk) && windowClerk.constructor.sdkMetadata) { const { name, version } = windowClerk.constructor.sdkMetadata; - // Only update properties if they exist to avoid overwriting with undefined if (name !== undefined) { sdkMetadata.name = name; } @@ -307,4 +456,26 @@ export class TelemetryCollector implements TelemetryCollectorInterface { payload, }; } + + /** + * Best-effort sanitization of the context payload. Returns a plain object with JSON-serializable + * values or null when the input is missing or not serializable. Arrays are not accepted. + */ + #sanitizeContext(context: unknown): Record | null { + if (context === null || typeof context === 'undefined') { + return null; + } + if (typeof context !== 'object') { + return null; + } + try { + const cleaned = JSON.parse(JSON.stringify(context)); + if (cleaned && typeof cleaned === 'object' && !Array.isArray(cleaned)) { + return cleaned as Record; + } + return null; + } catch { + return null; + } + } } diff --git a/packages/types/src/environment.ts b/packages/types/src/environment.ts index b8e2d23cbc2..fbe1725b412 100644 --- a/packages/types/src/environment.ts +++ b/packages/types/src/environment.ts @@ -19,5 +19,6 @@ export interface EnvironmentResource extends ClerkResource { isDevelopmentOrStaging: () => boolean; onWindowLocationHost: () => boolean; maintenanceMode: boolean; + clientDebugMode: boolean; __internal_toSnapshot: () => EnvironmentJSONSnapshot; } diff --git a/packages/types/src/json.ts b/packages/types/src/json.ts index 93c289b549b..e4a075464c9 100644 --- a/packages/types/src/json.ts +++ b/packages/types/src/json.ts @@ -71,13 +71,14 @@ export interface ImageJSON { } export interface EnvironmentJSON extends ClerkResourceJSON { - auth_config: AuthConfigJSON; api_keys_settings: APIKeysSettingsJSON; + auth_config: AuthConfigJSON; + client_debug_mode: boolean; commerce_settings: CommerceSettingsJSON; display_config: DisplayConfigJSON; - user_settings: UserSettingsJSON; - organization_settings: OrganizationSettingsJSON; maintenance_mode: boolean; + organization_settings: OrganizationSettingsJSON; + user_settings: UserSettingsJSON; } export interface ClientJSON extends ClerkResourceJSON { diff --git a/packages/types/src/telemetry.ts b/packages/types/src/telemetry.ts index 7cd0118e056..60725596271 100644 --- a/packages/types/src/telemetry.ts +++ b/packages/types/src/telemetry.ts @@ -43,8 +43,24 @@ export type TelemetryEventRaw = { payload: Payload; }; +/** + * Debug log entry interface for telemetry collector + */ +export interface TelemetryLogEntry { + readonly context?: Record; + readonly id: string; + readonly level: 'error' | 'warn' | 'info' | 'debug' | 'trace'; + readonly message: string; + readonly organizationId?: string; + readonly sessionId?: string; + readonly source?: string; + readonly timestamp: number; + readonly userId?: string; +} + export interface TelemetryCollector { isEnabled: boolean; isDebug: boolean; record(event: TelemetryEventRaw): void; + recordLog(entry: TelemetryLogEntry): void; } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index eb6a9a11bb7..c9bd096e07e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -3412,82 +3412,66 @@ packages: '@miniflare/cache@2.14.4': resolution: {integrity: sha512-ayzdjhcj+4mjydbNK7ZGDpIXNliDbQY4GPcY2KrYw0v1OSUdj5kZUkygD09fqoGRfAks0d91VelkyRsAXX8FQA==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/core@2.14.4': resolution: {integrity: sha512-FMmZcC1f54YpF4pDWPtdQPIO8NXfgUxCoR9uyrhxKJdZu7M6n8QKopPVNuaxR40jcsdxb7yKoQoFWnHfzJD9GQ==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/d1@2.14.4': resolution: {integrity: sha512-pMBVq9XWxTDdm+RRCkfXZP+bREjPg1JC8s8C0JTovA9OGmLQXqGTnFxIaS9vf1d8k3uSUGhDzPTzHr0/AUW1gA==} engines: {node: '>=16.7'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/durable-objects@2.14.4': resolution: {integrity: sha512-+JrmHP6gHHrjxV8S3axVw5lGHLgqmAGdcO/1HJUPswAyJEd3Ah2YnKhpo+bNmV4RKJCtEq9A2hbtVjBTD2YzwA==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/html-rewriter@2.14.4': resolution: {integrity: sha512-GB/vZn7oLbnhw+815SGF+HU5EZqSxbhIa3mu2L5MzZ2q5VOD5NHC833qG8c2GzDPhIaZ99ITY+ZJmbR4d+4aNQ==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/kv@2.14.4': resolution: {integrity: sha512-QlERH0Z+klwLg0xw+/gm2yC34Nnr/I0GcQ+ASYqXeIXBwjqOtMBa3YVQnocaD+BPy/6TUtSpOAShHsEj76R2uw==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/queues@2.14.4': resolution: {integrity: sha512-aXQ5Ik8Iq1KGMBzGenmd6Js/jJgqyYvjom95/N9GptCGpiVWE5F0XqC1SL5rCwURbHN+aWY191o8XOFyY2nCUA==} engines: {node: '>=16.7'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/r2@2.14.4': resolution: {integrity: sha512-4ctiZWh7Ty7LB3brUjmbRiGMqwyDZgABYaczDtUidblo2DxX4JZPnJ/ZAyxMPNJif32kOJhcg6arC2hEthR9Sw==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/runner-vm@2.14.4': resolution: {integrity: sha512-Nog0bB9SVhPbZAkTWfO4lpLAUsBXKEjlb4y+y66FJw77mPlmPlVdpjElCvmf8T3VN/pqh83kvELGM+/fucMf4g==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/shared-test-environment@2.14.4': resolution: {integrity: sha512-FdU2/8wEd00vIu+MfofLiHcfZWz+uCbE2VTL85KpyYfBsNGAbgRtzFMpOXdoXLqQfRu6MBiRwWpb2FbMrBzi7g==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/shared@2.14.4': resolution: {integrity: sha512-upl4RSB3hyCnITOFmRZjJj4A72GmkVrtfZTilkdq5Qe5TTlzsjVeDJp7AuNUM9bM8vswRo+N5jOiot6O4PVwwQ==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/sites@2.14.4': resolution: {integrity: sha512-O5npWopi+fw9W9Ki0gy99nuBbgDva/iXy8PDC4dAXDB/pz45nISDqldabk0rL2t4W2+lY6LXKzdOw+qJO1GQTA==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/storage-file@2.14.4': resolution: {integrity: sha512-JxcmX0hXf4cB0cC9+s6ZsgYCq+rpyUKRPCGzaFwymWWplrO3EjPVxKCcMxG44jsdgsII6EZihYUN2J14wwCT7A==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/storage-memory@2.14.4': resolution: {integrity: sha512-9jB5BqNkMZ3SFjbPFeiVkLi1BuSahMhc/W1Y9H0W89qFDrrD+z7EgRgDtHTG1ZRyi9gIlNtt9qhkO1B6W2qb2A==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/watcher@2.14.4': resolution: {integrity: sha512-PYn05ET2USfBAeXF6NZfWl0O32KVyE8ncQ/ngysrh3hoIV7l3qGGH7ubeFx+D8VWQ682qYhwGygUzQv2j1tGGg==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@miniflare/web-sockets@2.14.4': resolution: {integrity: sha512-stTxvLdJ2IcGOs76AnvGYAzGvx8JvQPRxC5DW0P5zdAAnhL33noqb5LKdPt3P37BKp9FzBKZHuihQI9oVqwm0g==} engines: {node: '>=16.13'} - deprecated: Miniflare v2 is no longer supported. Please upgrade to Miniflare v4 '@modelcontextprotocol/sdk@1.7.0': resolution: {integrity: sha512-IYPe/FLpvF3IZrd/f5p5ffmWhMc3aEMuM2wGJASDqC2Ge7qatVCdbfPx3n/5xFeb19xN0j/911M2AaFuircsWA==}