diff --git a/package.json b/package.json index 1005c46..40e7056 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@eppo/js-client-sdk-common", - "version": "4.3.0", + "version": "4.4.0", "description": "Eppo SDK for client-side JavaScript applications (base for both web and react native)", "main": "dist/index.js", "files": [ diff --git a/src/cache/abstract-assignment-cache.ts b/src/cache/abstract-assignment-cache.ts index d5bb9d7..118fbe2 100644 --- a/src/cache/abstract-assignment-cache.ts +++ b/src/cache/abstract-assignment-cache.ts @@ -1,7 +1,5 @@ import { getMD5Hash } from '../obfuscation'; -import { LRUCache } from './lru-cache'; - /** * Assignment cache keys are only on the subject and flag level, while the entire value is used * for uniqueness checking. This way that if an assigned variation or bandit action changes for a @@ -60,7 +58,7 @@ export abstract class AbstractAssignmentCache> return this.get(entry) === assignmentCacheValueToString(entry); } - private get(key: AssignmentCacheKey): string | undefined { + get(key: AssignmentCacheKey): string | undefined { return this.delegate.get(assignmentCacheKeyToString(key)); } @@ -80,32 +78,3 @@ export abstract class AbstractAssignmentCache> return this.delegate.entries(); } } - -/** - * A cache that never expires. - * - * The primary use case is for client-side SDKs, where the cache is only used - * for a single user. - */ -export class NonExpiringInMemoryAssignmentCache extends AbstractAssignmentCache< - Map -> { - constructor(store = new Map()) { - super(store); - } -} - -/** - * A cache that uses the LRU algorithm to evict the least recently used items. - * - * It is used to limit the size of the cache. - * - * The primary use case is for server-side SDKs, where the cache is shared across - * multiple users. In this case, the cache size should be set to the maximum number - * of users that can be active at the same time. - */ -export class LRUInMemoryAssignmentCache extends AbstractAssignmentCache { - constructor(maxSize: number) { - super(new LRUCache(maxSize)); - } -} diff --git a/src/cache/lru-cache.spec.ts b/src/cache/lru-cache.spec.ts index 9812a5e..b0e311a 100644 --- a/src/cache/lru-cache.spec.ts +++ b/src/cache/lru-cache.spec.ts @@ -61,4 +61,19 @@ describe('LRUCache', () => { expect(oneCache.get('a')).toBeFalsy(); expect(oneCache.get('b')).toBe('banana'); }); + + /** + This test case might be an overkill but in case Map() changes, + or we want to ditch it completely this will remind us that insertion + order is crucial for this cache to work properly + **/ + it('should preserve insertion order when inserting on capacity limit', () => { + cache.set('a', 'apple'); + cache.set('b', 'banana'); + cache.set('c', 'cherry'); + + const keys = Array.from(cache.keys()); + expect(keys[0]).toBe('b'); + expect(keys[1]).toBe('c'); + }); }); diff --git a/src/cache/lru-cache.ts b/src/cache/lru-cache.ts index d87d29c..507d905 100644 --- a/src/cache/lru-cache.ts +++ b/src/cache/lru-cache.ts @@ -12,12 +12,12 @@ * Source: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map */ export class LRUCache implements Map { - private readonly cache = new Map(); + protected readonly cache = new Map(); // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore [Symbol.toStringTag]: string; - constructor(private readonly capacity: number) {} + constructor(protected readonly capacity: number) {} [Symbol.iterator](): IterableIterator<[string, string]> { return this.cache[Symbol.iterator](); diff --git a/src/cache/lru-in-memory-assignment-cache.ts b/src/cache/lru-in-memory-assignment-cache.ts new file mode 100644 index 0000000..2b51b72 --- /dev/null +++ b/src/cache/lru-in-memory-assignment-cache.ts @@ -0,0 +1,18 @@ +import { AbstractAssignmentCache } from './abstract-assignment-cache'; +import { LRUCache } from './lru-cache'; + +/** + * A cache that uses the LRU algorithm to evict the least recently used items. + * + * It is used to limit the size of the cache. + * + * The primary use case is for server-side SDKs, where the cache is shared across + * multiple users. In this case, the cache size should be set to the maximum number + * of users that can be active at the same time. + * @param {number} maxSize - Maximum cache size + */ +export class LRUInMemoryAssignmentCache extends AbstractAssignmentCache { + constructor(maxSize: number) { + super(new LRUCache(maxSize)); + } +} diff --git a/src/cache/abstract-assignment-cache.spec.ts b/src/cache/non-expiring-in-memory-assignment-cache.ts similarity index 95% rename from src/cache/abstract-assignment-cache.spec.ts rename to src/cache/non-expiring-in-memory-assignment-cache.ts index 6482dd7..4668ade 100644 --- a/src/cache/abstract-assignment-cache.spec.ts +++ b/src/cache/non-expiring-in-memory-assignment-cache.ts @@ -1,8 +1,8 @@ import { assignmentCacheKeyToString, assignmentCacheValueToString, - NonExpiringInMemoryAssignmentCache, } from './abstract-assignment-cache'; +import { NonExpiringInMemoryAssignmentCache } from './non-expiring-in-memory-cache-assignment'; describe('NonExpiringInMemoryAssignmentCache', () => { it('read and write variation entries', () => { diff --git a/src/cache/non-expiring-in-memory-cache-assignment.ts b/src/cache/non-expiring-in-memory-cache-assignment.ts new file mode 100644 index 0000000..10120f5 --- /dev/null +++ b/src/cache/non-expiring-in-memory-cache-assignment.ts @@ -0,0 +1,15 @@ +import { AbstractAssignmentCache } from './abstract-assignment-cache'; + +/** + * A cache that never expires. + * + * The primary use case is for client-side SDKs, where the cache is only used + * for a single user. + */ +export class NonExpiringInMemoryAssignmentCache extends AbstractAssignmentCache< + Map +> { + constructor(store = new Map()) { + super(store); + } +} diff --git a/src/cache/tlru-cache.spec.ts b/src/cache/tlru-cache.spec.ts new file mode 100644 index 0000000..b4038a6 --- /dev/null +++ b/src/cache/tlru-cache.spec.ts @@ -0,0 +1,175 @@ +import { TLRUCache } from './tlru-cache'; + +describe('TLRU Cache', () => { + let cache: TLRUCache; + const expectedCacheTimeoutMs = 10; + + beforeEach(async () => { + cache = new TLRUCache(2, expectedCacheTimeoutMs); + }); + + afterEach(async () => { + jest.restoreAllMocks(); + jest.clearAllTimers(); + }); + + it('should evict cache after expiration', () => { + jest.useFakeTimers(); + + cache.set('a', 'apple'); + jest.advanceTimersByTime(expectedCacheTimeoutMs); + + expect(cache.get('a')).toBeUndefined(); + }); + + it('should not evict cache before expiration', () => { + jest.useFakeTimers(); + + cache.set('a', 'apple'); + jest.advanceTimersByTime(expectedCacheTimeoutMs - 1); + expect(cache.get('a')).toBe('apple'); + }); + + it('should evict all expired entries on .entries() call', () => { + jest.useFakeTimers(); + + cache.set('a', 'avocado'); + jest.advanceTimersByTime(expectedCacheTimeoutMs); + cache.set('b', 'banana'); + jest.advanceTimersByTime(expectedCacheTimeoutMs); + + const cacheEntries = []; + + for (const entry of cache.entries()) { + cacheEntries.push(entry); + } + + expect(cacheEntries.length).toBe(0); + }); + + it('should evict all expired entries on .keys() call', () => { + jest.useFakeTimers(); + + cache = new TLRUCache(3, expectedCacheTimeoutMs); + cache.set('a', 'avocado'); + jest.advanceTimersByTime(expectedCacheTimeoutMs); + cache.set('b', 'banana'); + jest.advanceTimersByTime(expectedCacheTimeoutMs); + cache.set('c', 'cherry'); + + const cacheKeys = []; + + for (const key of cache.keys()) { + cacheKeys.push(key); + } + + expect(cacheKeys.length).toBe(1); + expect(cache.get('c')).toBe('cherry'); + }); + + it('should evict all expired entries on .values() call', () => { + jest.useFakeTimers(); + cache = new TLRUCache(3, expectedCacheTimeoutMs); + + cache.set('a', 'avocado'); + jest.advanceTimersByTime(expectedCacheTimeoutMs); + cache.set('b', 'banana'); + jest.advanceTimersByTime(expectedCacheTimeoutMs); + cache.set('c', 'cherry'); + + const cacheValues = []; + + for (const value of cache.values()) { + cacheValues.push(value); + } + + expect(cacheValues.length).toBe(1); + expect(cache.get('c')).toBe('cherry'); + }); + + it('should overwrite existing cache entry', () => { + jest.useFakeTimers(); + + cache.set('a', 'apple'); + jest.advanceTimersByTime(expectedCacheTimeoutMs - 1); + cache.set('a', 'avocado'); + + // spin the clock by 5sec. After that time cache entry should be still valid. + jest.advanceTimersByTime(expectedCacheTimeoutMs / 2); + + // setting assertion in a weird way because calling cache.get() + // will reset eviction timer which will mess up next assertion + let avocadoInCache = false; + cache.forEach((value, key) => { + if (key === 'a' && value === 'avocado') { + avocadoInCache = true; + } + }); + expect(avocadoInCache).toBe(true); + + // after another spin of 5 sec, cache entry should evict itself + jest.advanceTimersByTime(expectedCacheTimeoutMs / 2); + expect(cache.get('a')).toBeUndefined(); + }); + + it('should check if a key exists', () => { + cache.set('a', 'apple'); + expect(cache.has('a')).toBeTruthy(); + expect(cache.has('b')).toBeFalsy(); + }); + + it('should handle the cache capacity of zero', () => { + const zeroCache = new TLRUCache(0, expectedCacheTimeoutMs); + zeroCache.set('a', 'apple'); + expect(zeroCache.get('a')).toBeFalsy(); + }); + + it('should handle the cache capacity of one', () => { + jest.useFakeTimers(); + const oneCache = new TLRUCache(1, expectedCacheTimeoutMs); + oneCache.set('a', 'apple'); + jest.advanceTimersByTime(expectedCacheTimeoutMs); + expect(oneCache.get('a')).toBeUndefined(); + + oneCache.set('a', 'avocado'); + expect(oneCache.get('a')).toBe('avocado'); + oneCache.set('b', 'banana'); + expect(oneCache.get('a')).toBeFalsy(); + expect(oneCache.get('b')).toBe('banana'); + }); + + it('should evict oldest entry when capacity limit is reached', () => { + cache.set('a', 'apple'); + cache.set('b', 'banana'); + cache.set('c', 'cherry'); + + expect(cache.get('a')).toBeUndefined(); + expect(cache.has('b')).toBeTruthy(); + expect(cache.has('c')).toBeTruthy(); + }); + + /** + This test case might be an overkill but in case Map() changes, + or we want to ditch it completely this will remind us that insertion + order is crucial for this cache to work properly + **/ + it('should preserve insertion order when inserting on capacity limit', () => { + cache.set('a', 'apple'); + cache.set('b', 'banana'); + cache.set('c', 'cherry'); + + let keys = Array.from(cache.keys()); + expect(keys[0]).toBe('b'); + expect(keys[1]).toBe('c'); + + cache = new TLRUCache(2, expectedCacheTimeoutMs); + cache.set('a', 'apple'); + cache.set('b', 'banana'); + cache.get('a'); + cache.set('c', 'cherry'); + + keys = Array.from(cache.keys()); + expect(keys[0]).toBe('a'); + expect(keys[1]).toBe('c'); + }); +}); diff --git a/src/cache/tlru-cache.ts b/src/cache/tlru-cache.ts new file mode 100644 index 0000000..c16f14e --- /dev/null +++ b/src/cache/tlru-cache.ts @@ -0,0 +1,106 @@ +import { LRUCache } from './lru-cache'; + +/** + * Time-aware, least-recently-used cache (TLRU). Variant of LRU where entries have valid lifetime. + * @param {number} maxSize - Maximum cache size + * @param {number} ttl - Time in milliseconds after which cache entry will evict itself + * @param {number} evictionInterval - Frequency of cache entries eviction check + **/ +export class TLRUCache extends LRUCache { + private readonly cacheEntriesTTLRegistry = new Map(); + constructor(readonly maxSize: number, readonly ttl: number) { + super(maxSize); + } + + private getCacheEntryEvictionTime(): Date { + return new Date(Date.now() + this.ttl); + } + + private clearCacheEntryEvictionTimeIfExists(key: string): void { + if (this.cacheEntriesTTLRegistry.has(key)) { + this.cacheEntriesTTLRegistry.delete(key); + } + } + + private isCacheEntryValid(key: string): boolean { + const now = new Date(Date.now()); + const evictionDate = this.cacheEntriesTTLRegistry.get(key); + return evictionDate !== undefined ? now < evictionDate : false; + } + + private setCacheEntryEvictionTime(key: string): void { + this.cacheEntriesTTLRegistry.set(key, this.getCacheEntryEvictionTime()); + } + + private resetCacheEntryEvictionTime(key: string): void { + this.clearCacheEntryEvictionTimeIfExists(key); + this.setCacheEntryEvictionTime(key); + } + + private evictExpiredCacheEntries() { + let cacheKey: string; + + // Not using this.cache.forEach so we can break the loop once + // we find the fist non-expired entry. Each entry after that + // is guaranteed to also be non-expired, because iteration happens + // in insertion order + for (cacheKey of this.cache.keys()) { + if (!this.isCacheEntryValid(cacheKey)) { + this.delete(cacheKey); + } else { + break; + } + } + } + + entries(): IterableIterator<[string, string]> { + this.evictExpiredCacheEntries(); + return super.entries(); + } + + keys(): IterableIterator { + this.evictExpiredCacheEntries(); + return super.keys(); + } + + values(): IterableIterator { + this.evictExpiredCacheEntries(); + return super.values(); + } + + delete(key: string): boolean { + this.clearCacheEntryEvictionTimeIfExists(key); + return super.delete(key); + } + + has(key: string): boolean { + if (!this.isCacheEntryValid(key)) { + this.delete(key); + return false; + } + return this.cache.has(key); + } + + get(key: string): string | undefined { + if (!this.isCacheEntryValid(key)) { + this.delete(key); + return undefined; + } + + const value = super.get(key); + if (value !== undefined) { + // Whenever we get a cache hit, we need to reset the timer + // for eviction, because it is now considered most recently + // accessed thus the timer should start over. Not doing that + // will cause a de-sync that will stop proper eviction + this.resetCacheEntryEvictionTime(key); + } + return value; + } + + set(key: string, value: string): this { + const cache = super.set(key, value); + this.resetCacheEntryEvictionTime(key); + return cache; + } +} diff --git a/src/cache/tlru-in-memory-assignment-cache.spec.ts b/src/cache/tlru-in-memory-assignment-cache.spec.ts new file mode 100644 index 0000000..886ca62 --- /dev/null +++ b/src/cache/tlru-in-memory-assignment-cache.spec.ts @@ -0,0 +1,43 @@ +import { DEFAULT_TLRU_TTL_MS } from '../constants'; + +import { TLRUInMemoryAssignmentCache } from './tlru-in-memory-assignment-cache'; + +describe('ExpiringLRUInMemoryAssignmentCache', () => { + let cache: TLRUInMemoryAssignmentCache; + const defaultTimout = DEFAULT_TLRU_TTL_MS; // 10 minutes + + beforeAll(() => { + jest.useFakeTimers(); + cache = new TLRUInMemoryAssignmentCache(2); + }); + + afterAll(() => { + jest.clearAllTimers(); + }); + + it(`assignment cache's timeout should default to 10 minutes `, () => { + const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' }; + cache.set(cacheEntry); + jest.advanceTimersByTime(defaultTimout); + expect(cache.has(cacheEntry)).toBeFalsy(); + }); + + it(`assignment cache's timeout value is used on construction`, () => { + const expectedTimout = 88; + cache = new TLRUInMemoryAssignmentCache(2, expectedTimout); + const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' }; + cache.set(cacheEntry); + jest.advanceTimersByTime(expectedTimout); + expect(cache.has(cacheEntry)).toBeFalsy(); + }); + + it(`cache shouldn't be invalidated before timeout`, () => { + const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' }; + cache.set(cacheEntry); + + expect(cache.has(cacheEntry)).toBeTruthy(); + + jest.advanceTimersByTime(defaultTimout); + expect(cache.has(cacheEntry)).toBeFalsy(); + }); +}); diff --git a/src/cache/tlru-in-memory-assignment-cache.ts b/src/cache/tlru-in-memory-assignment-cache.ts new file mode 100644 index 0000000..854f76c --- /dev/null +++ b/src/cache/tlru-in-memory-assignment-cache.ts @@ -0,0 +1,19 @@ +import { DEFAULT_TLRU_TTL_MS } from '../constants'; + +import { AbstractAssignmentCache } from './abstract-assignment-cache'; +import { TLRUCache } from './tlru-cache'; + +/** + * Variation of LRU caching mechanism that will automatically evict items after + * set time of milliseconds. + * + * It is used to limit the size of the cache. + * + * @param {number} maxSize - Maximum cache size + * @param {number} ttl - Time in milliseconds after cache will expire. + */ +export class TLRUInMemoryAssignmentCache extends AbstractAssignmentCache { + constructor(maxSize: number, ttl = DEFAULT_TLRU_TTL_MS) { + super(new TLRUCache(maxSize, ttl)); + } +} diff --git a/src/client/eppo-client-with-bandits.spec.ts b/src/client/eppo-client-with-bandits.spec.ts index 5c71d33..23517ef 100644 --- a/src/client/eppo-client-with-bandits.spec.ts +++ b/src/client/eppo-client-with-bandits.spec.ts @@ -211,7 +211,7 @@ describe('EppoClient Bandits E2E test', () => { it('Flushed queued logging events when a logger is set', () => { client.useLRUInMemoryAssignmentCache(5); - client.useLRUInMemoryBanditAssignmentCache(5); + client.useExpiringInMemoryBanditAssignmentCache(5); client.setAssignmentLogger(null as unknown as IAssignmentLogger); client.setBanditLogger(null as unknown as IBanditLogger); const banditAssignment = client.getBanditAction( diff --git a/src/client/eppo-client.ts b/src/client/eppo-client.ts index 5c6e326..1c9c318 100644 --- a/src/client/eppo-client.ts +++ b/src/client/eppo-client.ts @@ -3,11 +3,10 @@ import { logger } from '../application-logger'; import { IAssignmentEvent, IAssignmentLogger } from '../assignment-logger'; import { BanditEvaluator } from '../bandit-evaluator'; import { IBanditEvent, IBanditLogger } from '../bandit-logger'; -import { - AssignmentCache, - LRUInMemoryAssignmentCache, - NonExpiringInMemoryAssignmentCache, -} from '../cache/abstract-assignment-cache'; +import { AssignmentCache } from '../cache/abstract-assignment-cache'; +import { LRUInMemoryAssignmentCache } from '../cache/lru-in-memory-assignment-cache'; +import { NonExpiringInMemoryAssignmentCache } from '../cache/non-expiring-in-memory-cache-assignment'; +import { TLRUInMemoryAssignmentCache } from '../cache/tlru-in-memory-assignment-cache'; import ConfigurationRequestor from '../configuration-requestor'; import { IConfigurationStore } from '../configuration-store/configuration-store'; import { @@ -1011,8 +1010,12 @@ export default class EppoClient { this.banditAssignmentCache = new NonExpiringInMemoryAssignmentCache(); } - useLRUInMemoryBanditAssignmentCache(maxSize: number) { - this.banditAssignmentCache = new LRUInMemoryAssignmentCache(maxSize); + /** + * @param {number} maxSize - Maximum cache size + * @param {number} timeout - TTL of cache entries + */ + useExpiringInMemoryBanditAssignmentCache(maxSize: number, timeout?: number) { + this.banditAssignmentCache = new TLRUInMemoryAssignmentCache(maxSize, timeout); } // noinspection JSUnusedGlobalSymbols diff --git a/src/constants.ts b/src/constants.ts index 4546722..f26a13b 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -12,3 +12,4 @@ export const NULL_SENTINEL = 'EPPO_NULL'; // number of logging events that may be queued while waiting for initialization export const MAX_EVENT_QUEUE_SIZE = 100; export const BANDIT_ASSIGNMENT_SHARDS = 10000; +export const DEFAULT_TLRU_TTL_MS = 600_000; diff --git a/src/index.ts b/src/index.ts index a4b0eec..7bc3d9a 100644 --- a/src/index.ts +++ b/src/index.ts @@ -6,8 +6,6 @@ import { IBanditLogger, IBanditEvent } from './bandit-logger'; import { AbstractAssignmentCache, AssignmentCache, - NonExpiringInMemoryAssignmentCache, - LRUInMemoryAssignmentCache, AsyncMap, AssignmentCacheKey, AssignmentCacheValue, @@ -15,6 +13,8 @@ import { assignmentCacheKeyToString, assignmentCacheValueToString, } from './cache/abstract-assignment-cache'; +import { LRUInMemoryAssignmentCache } from './cache/lru-in-memory-assignment-cache'; +import { NonExpiringInMemoryAssignmentCache } from './cache/non-expiring-in-memory-cache-assignment'; import EppoClient, { FlagConfigurationRequestParameters, IAssignmentDetails,