diff --git a/app/lib/utils/memoize.ts b/app/lib/utils/memoize.ts new file mode 100644 index 0000000000..4a3fe21ff7 --- /dev/null +++ b/app/lib/utils/memoize.ts @@ -0,0 +1,240 @@ +/** + * Memoization Utilities for Performance Optimization + * Author: Keoma Wright + * Purpose: Cache expensive function results to improve performance + */ + +interface MemoizeOptions { + maxSize?: number; + ttl?: number; // Time to live in milliseconds + keyGenerator?: (...args: any[]) => string; +} + +interface CacheEntry { + value: T; + timestamp: number; +} + +/** + * Creates a memoized version of a function with configurable cache + */ +export function memoize any>(fn: T, options: MemoizeOptions = {}): T { + const { maxSize = 100, ttl = 60000, keyGenerator = defaultKeyGenerator } = options; + const cache = new Map>>(); + + return ((...args: Parameters): ReturnType => { + const key = keyGenerator(...args); + const now = Date.now(); + + // Check if we have a valid cached result + if (cache.has(key)) { + const entry = cache.get(key)!; + + if (!ttl || now - entry.timestamp < ttl) { + return entry.value; + } + + // Remove expired entry + cache.delete(key); + } + + // Compute result + const result = fn(...args); + + // Cache result + cache.set(key, { value: result, timestamp: now }); + + // Enforce max size + if (cache.size > maxSize) { + const firstKey = cache.keys().next().value; + + if (firstKey !== undefined) { + cache.delete(firstKey); + } + } + + return result; + }) as T; +} + +/** + * Creates a memoized async function + */ +export function memoizeAsync Promise>(fn: T, options: MemoizeOptions = {}): T { + const { maxSize = 100, ttl = 60000, keyGenerator = defaultKeyGenerator } = options; + const cache = new Map>>>>(); + const pendingCache = new Map>>>(); + + return (async (...args: Parameters): Promise>> => { + const key = keyGenerator(...args); + const now = Date.now(); + + // Check if we have a pending request for this key + if (pendingCache.has(key)) { + return pendingCache.get(key)!; + } + + // Check if we have a valid cached result + if (cache.has(key)) { + const entry = cache.get(key)!; + + if (!ttl || now - entry.timestamp < ttl) { + return entry.value; + } + + // Remove expired entry + cache.delete(key); + } + + // Create promise for this computation + const promise = fn(...args); + pendingCache.set(key, promise); + + try { + const result = await promise; + + // Cache successful result + cache.set(key, { value: Promise.resolve(result), timestamp: now }); + + // Enforce max size + if (cache.size > maxSize) { + const firstKey = cache.keys().next().value; + + if (firstKey !== undefined) { + cache.delete(firstKey); + } + } + + return result; + } finally { + // Remove from pending cache + pendingCache.delete(key); + } + }) as T; +} + +/** + * Default key generator for memoization + */ +function defaultKeyGenerator(...args: any[]): string { + return JSON.stringify(args, (_, value) => { + // Handle special types + if (value instanceof Set) { + return [...value]; + } + + if (value instanceof Map) { + return Object.fromEntries(value); + } + + if (typeof value === 'function') { + return value.toString(); + } + + return value; + }); +} + +/** + * Weak memoization for object arguments + */ +export function weakMemoize any>(fn: T): T { + const cache = new WeakMap>(); + + return ((arg: object): ReturnType => { + if (cache.has(arg)) { + return cache.get(arg)!; + } + + const result = fn(arg); + cache.set(arg, result); + + return result; + }) as T; +} + +/** + * LRU (Least Recently Used) cache implementation + */ +export class LRUCache { + private _cache = new Map(); + private _maxSize: number; + + constructor(maxSize: number = 100) { + this._maxSize = maxSize; + } + + get(key: K): V | undefined { + if (!this._cache.has(key)) { + return undefined; + } + + // Move to end (most recently used) + const value = this._cache.get(key)!; + this._cache.delete(key); + this._cache.set(key, value); + + return value; + } + + set(key: K, value: V): void { + // Remove if exists (to update position) + if (this._cache.has(key)) { + this._cache.delete(key); + } else if (this._cache.size >= this._maxSize) { + // Remove least recently used (first item) + const firstKey = this._cache.keys().next().value; + + if (firstKey !== undefined) { + this._cache.delete(firstKey); + } + } + + this._cache.set(key, value); + } + + has(key: K): boolean { + return this._cache.has(key); + } + + clear(): void { + this._cache.clear(); + } + + get size(): number { + return this._cache.size; + } +} + +/** + * Memoize with LRU cache + */ +export function memoizeLRU any>(fn: T, maxSize: number = 100): T { + const cache = new LRUCache>(maxSize); + + return ((...args: Parameters): ReturnType => { + const key = JSON.stringify(args); + + const cached = cache.get(key); + + if (cached !== undefined) { + return cached; + } + + const result = fn(...args); + cache.set(key, result); + + return result; + }) as T; +} + +/** + * Clear all memoization caches (useful for testing) + */ +export function clearAllCaches(): void { + /* + * This would need to track all created caches + * For now, it's a placeholder for manual cache management + */ + console.log('Cache clearing requested'); +} diff --git a/app/lib/utils/stream-buffer.ts b/app/lib/utils/stream-buffer.ts new file mode 100644 index 0000000000..ee1f4f56d6 --- /dev/null +++ b/app/lib/utils/stream-buffer.ts @@ -0,0 +1,102 @@ +/** + * Stream Buffer Utility for Optimized Chunk Processing + * Author: Keoma Wright + * Purpose: Provides efficient buffering and batching for stream processing + */ + +export class StreamBuffer { + private _buffer: string[] = []; + private _bufferSize = 0; + private readonly _maxBufferSize: number; + private readonly _flushInterval: number; + private _flushTimer: ReturnType | null = null; + private _onFlush: (data: string) => void; + + constructor(options: { maxBufferSize?: number; flushInterval?: number; onFlush: (data: string) => void }) { + this._maxBufferSize = options.maxBufferSize || 4096; // 4KB default + this._flushInterval = options.flushInterval || 50; // 50ms default + this._onFlush = options.onFlush; + } + + add(chunk: string): void { + this._buffer.push(chunk); + this._bufferSize += chunk.length; + + // Flush if buffer size exceeds threshold + if (this._bufferSize >= this._maxBufferSize) { + this.flush(); + } else if (!this._flushTimer) { + // Set timer for time-based flush + this._flushTimer = setTimeout(() => this.flush(), this._flushInterval); + } + } + + flush(): void { + if (this._buffer.length === 0) { + return; + } + + // Join all buffered chunks + const data = this._buffer.join(''); + + // Clear buffer + this._buffer = []; + this._bufferSize = 0; + + // Clear timer + if (this._flushTimer) { + clearTimeout(this._flushTimer); + this._flushTimer = null; + } + + // Call flush handler + this._onFlush(data); + } + + destroy(): void { + this.flush(); + + if (this._flushTimer) { + clearTimeout(this._flushTimer); + this._flushTimer = null; + } + } +} + +/** + * Creates an optimized transform stream with buffering + */ +export function createBufferedTransformStream(options?: { + maxBufferSize?: number; + flushInterval?: number; + transform?: (chunk: string) => string; +}): TransformStream { + const encoder = new TextEncoder(); + let buffer: StreamBuffer | null = null; + + return new TransformStream({ + start(controller) { + buffer = new StreamBuffer({ + maxBufferSize: options?.maxBufferSize, + flushInterval: options?.flushInterval, + onFlush: (data) => { + const transformed = options?.transform ? options.transform(data) : data; + controller.enqueue(encoder.encode(transformed)); + }, + }); + }, + + transform(chunk) { + if (buffer && typeof chunk === 'string') { + buffer.add(chunk); + } + }, + + flush() { + if (buffer) { + buffer.destroy(); + buffer = null; + } + }, + }); +}