Skip to content
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/cache/abstract-assignment-cache.spec.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import {
assignmentCacheKeyToString,
assignmentCacheValueToString,
NonExpiringInMemoryAssignmentCache,
} from './abstract-assignment-cache';
import { NonExpiringInMemoryAssignmentCache } from './non-expiring-in-memory-cache-assignment';

describe('NonExpiringInMemoryAssignmentCache', () => {
it('read and write variation entries', () => {
Expand Down
33 changes: 1 addition & 32 deletions src/cache/abstract-assignment-cache.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { getMD5Hash } from '../obfuscation';

import { LRUCache } from './lru-cache';

/**
* Assignment cache keys are only on the subject and flag level, while the entire value is used
* for uniqueness checking. This way that if an assigned variation or bandit action changes for a
Expand Down Expand Up @@ -60,7 +58,7 @@ export abstract class AbstractAssignmentCache<T extends Map<string, string>>
return this.get(entry) === assignmentCacheValueToString(entry);
}

private get(key: AssignmentCacheKey): string | undefined {
get(key: AssignmentCacheKey): string | undefined {
return this.delegate.get(assignmentCacheKeyToString(key));
}

Expand All @@ -80,32 +78,3 @@ export abstract class AbstractAssignmentCache<T extends Map<string, string>>
return this.delegate.entries();
}
}

/**
* A cache that never expires.
*
* The primary use case is for client-side SDKs, where the cache is only used
* for a single user.
*/
export class NonExpiringInMemoryAssignmentCache extends AbstractAssignmentCache<
Map<string, string>
> {
constructor(store = new Map<string, string>()) {
super(store);
}
}

/**
* A cache that uses the LRU algorithm to evict the least recently used items.
*
* It is used to limit the size of the cache.
*
* The primary use case is for server-side SDKs, where the cache is shared across
* multiple users. In this case, the cache size should be set to the maximum number
* of users that can be active at the same time.
*/
export class LRUInMemoryAssignmentCache extends AbstractAssignmentCache<LRUCache> {
constructor(maxSize: number) {
super(new LRUCache(maxSize));
}
}
4 changes: 2 additions & 2 deletions src/cache/lru-cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@
* Source: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map
*/
export class LRUCache implements Map<string, string> {
private readonly cache = new Map<string, string>();
protected readonly cache = new Map<string, string>();
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
[Symbol.toStringTag]: string;

constructor(private readonly capacity: number) {}
constructor(protected readonly capacity: number) {}

[Symbol.iterator](): IterableIterator<[string, string]> {
return this.cache[Symbol.iterator]();
Expand Down
18 changes: 18 additions & 0 deletions src/cache/lru-in-memory-assignment-cache.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { AbstractAssignmentCache } from './abstract-assignment-cache';
import { LRUCache } from './lru-cache';

/**
* A cache that uses the LRU algorithm to evict the least recently used items.
*
* It is used to limit the size of the cache.
*
* The primary use case is for server-side SDKs, where the cache is shared across
* multiple users. In this case, the cache size should be set to the maximum number
* of users that can be active at the same time.
* @param {number} maxSize - Maximum cache size
*/
export class LRUInMemoryAssignmentCache extends AbstractAssignmentCache<LRUCache> {
constructor(maxSize: number) {
super(new LRUCache(maxSize));
}
}
15 changes: 15 additions & 0 deletions src/cache/non-expiring-in-memory-cache-assignment.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { AbstractAssignmentCache } from './abstract-assignment-cache';

/**
* A cache that never expires.
*
* The primary use case is for client-side SDKs, where the cache is only used
* for a single user.
*/
export class NonExpiringInMemoryAssignmentCache extends AbstractAssignmentCache<
Map<string, string>
> {
constructor(store = new Map<string, string>()) {
super(store);
}
}
91 changes: 91 additions & 0 deletions src/cache/tlru-cache.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
import { TLRUCache } from './tlru-cache';

describe('TLRU Cache', () => {
let cache: TLRUCache;
const expectedCacheTimeoutMs = 10;

beforeEach(async () => {
cache = new TLRUCache(2, expectedCacheTimeoutMs);
});

afterEach(async () => {
jest.restoreAllMocks();
jest.clearAllTimers();
});

it('should evict cache after expiration', () => {
jest.useFakeTimers();

cache.set('a', 'apple');
jest.advanceTimersByTime(expectedCacheTimeoutMs);

expect(cache.get('a')).toBeUndefined();
});

it('should evict all expired entries', () => {
jest.useFakeTimers();

cache.set('a', 'avocado');
jest.advanceTimersByTime(expectedCacheTimeoutMs);
cache.set('b', 'banana');
jest.advanceTimersByTime(expectedCacheTimeoutMs);

expect(cache.get('b')).toBeUndefined();
expect(cache.get('a')).toBeUndefined();
});

/**
* This test assumes implementation which is not ideal, but that's
* the only way I know of how to go around timers in jest
**/
it('should overwrite existing cache entry', () => {
jest.useFakeTimers();

cache.set('a', 'apple');
jest.advanceTimersByTime(expectedCacheTimeoutMs - 1);
cache.set('a', 'avocado');

// spin the clock by 5sec. After that time cache entry should be still valid.
jest.advanceTimersByTime(expectedCacheTimeoutMs / 2);

// setting assertion in a weird way because calling cache.get()
// will reset eviction timer which will mess up next assertion
let avocadoInCache = false;
cache.forEach((value, key) => {
if (key === 'a' && value === 'avocado') {
avocadoInCache = true;
}
});
expect(avocadoInCache).toBe(true);

// after another spin of 5 sec, cache entry should evict itself
jest.advanceTimersByTime(expectedCacheTimeoutMs / 2);
expect(cache.get('a')).toBeUndefined();
});

it('should check if a key exists', () => {
cache.set('a', 'apple');
expect(cache.has('a')).toBeTruthy();
expect(cache.has('b')).toBeFalsy();
});

it('should handle the cache capacity of zero', () => {
const zeroCache = new TLRUCache(0, expectedCacheTimeoutMs);
zeroCache.set('a', 'apple');
expect(zeroCache.get('a')).toBeFalsy();
});

it('should handle the cache capacity of one', () => {
jest.useFakeTimers();
const oneCache = new TLRUCache(1, expectedCacheTimeoutMs);
oneCache.set('a', 'apple');
jest.advanceTimersByTime(expectedCacheTimeoutMs);
expect(oneCache.get('a')).toBeUndefined();

oneCache.set('a', 'avocado');
expect(oneCache.get('a')).toBe('avocado');
oneCache.set('b', 'banana');
expect(oneCache.get('a')).toBeFalsy();
expect(oneCache.get('b')).toBe('banana');
});
});
77 changes: 77 additions & 0 deletions src/cache/tlru-cache.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import { LRUCache } from './lru-cache';

/**
* Time-aware, least-recently-used cache (TLRU). Variant of LRU where entries have valid lifetime.
* @param {number} maxSize - Maximum cache size
* @param {number} ttl - Time in milliseconds after which cache entry will evict itself
* @param {number} evictionInterval - Frequency of cache entries eviction check
**/
export class TLRUCache extends LRUCache {
private readonly cacheEntriesTTLRegistry = new Map<string, Date>();
constructor(readonly maxSize: number, readonly ttl: number) {
super(maxSize);
}

private getCacheEntryEvictionTime(): Date {
return new Date(Date.now() + this.ttl);
}

private clearCacheEntryEvictionTimeIfExists(key: string): void {
if (this.cacheEntriesTTLRegistry.has(key)) {
this.cacheEntriesTTLRegistry.delete(key);
}
}

private setCacheEntryEvictionTime(key: string): void {
this.cacheEntriesTTLRegistry.set(key, this.getCacheEntryEvictionTime());
}

private resetCacheEntryEvictionTime(key: string): void {
this.clearCacheEntryEvictionTimeIfExists(key);
this.setCacheEntryEvictionTime(key);
}

private evictExpiredCacheEntries() {
const now = new Date(Date.now());
let cacheKey: string;
let evictionDate: Date;

// Not using this.cache.forEach so we can break the loop once
// we find the fist non-expired entry. Each entry after that
// is guaranteed to also be non-expired, because they are oldest->newest
for ([cacheKey, evictionDate] of this.cacheEntriesTTLRegistry.entries()) {
if (now >= evictionDate) {
this.delete(cacheKey);
} else {
break;
}
}
}

delete(key: string): boolean {
this.clearCacheEntryEvictionTimeIfExists(key);
return super.delete(key);
}

get(key: string): string | undefined {
this.evictExpiredCacheEntries();

const value = super.get(key);
if (value !== undefined) {
// Whenever we get a cache hit, we need to reset the timer
// for eviction, because it is now considered most recently
// accessed thus the timer should start over. Not doing that
// will cause a de-sync that will stop proper eviction
this.resetCacheEntryEvictionTime(key);
}
return value;
}

set(key: string, value: string): this {
this.evictExpiredCacheEntries();

const cache = super.set(key, value);
this.resetCacheEntryEvictionTime(key);
return cache;
}
}
43 changes: 43 additions & 0 deletions src/cache/tlru-in-memory-assignment-cache.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import { DEFAULT_TLRU_TTL_MS } from '../constants';

import { TLRUInMemoryAssignmentCache } from './tlru-in-memory-assignment-cache';

describe('ExpiringLRUInMemoryAssignmentCache', () => {
let cache: TLRUInMemoryAssignmentCache;
const defaultTimout = DEFAULT_TLRU_TTL_MS; // 10 minutes

beforeAll(() => {
jest.useFakeTimers();
cache = new TLRUInMemoryAssignmentCache(2);
});

afterAll(() => {
jest.clearAllTimers();
});

it(`assignment cache's timeout should default to 10 minutes `, () => {
const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' };
cache.set(cacheEntry);
jest.advanceTimersByTime(defaultTimout);
expect(cache.has(cacheEntry)).toBeFalsy();
});

it(`assignment cache's timeout value is used on construction`, () => {
const expectedTimout = 88;
cache = new TLRUInMemoryAssignmentCache(2, expectedTimout);
const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' };
cache.set(cacheEntry);
jest.advanceTimersByTime(expectedTimout);
expect(cache.has(cacheEntry)).toBeFalsy();
});

it(`cache shouldn't be invalidated before timeout`, () => {
const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' };
cache.set(cacheEntry);

expect(cache.has(cacheEntry)).toBeTruthy();

jest.advanceTimersByTime(defaultTimout);
expect(cache.has(cacheEntry)).toBeFalsy();
});
});
19 changes: 19 additions & 0 deletions src/cache/tlru-in-memory-assignment-cache.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import { DEFAULT_TLRU_TTL_MS } from '../constants';

import { AbstractAssignmentCache } from './abstract-assignment-cache';
import { TLRUCache } from './tlru-cache';

/**
* Variation of LRU caching mechanism that will automatically evict items after
* set time of milliseconds.
*
* It is used to limit the size of the cache.
*
* @param {number} maxSize - Maximum cache size
* @param {number} ttl - Time in milliseconds after cache will expire.
*/
export class TLRUInMemoryAssignmentCache extends AbstractAssignmentCache<TLRUCache> {
constructor(maxSize: number, ttl = DEFAULT_TLRU_TTL_MS) {
super(new TLRUCache(maxSize, ttl));
}
}
4 changes: 2 additions & 2 deletions src/client/eppo-client-with-bandits.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ describe('EppoClient Bandits E2E test', () => {
expect(banditEvent.action).toBe('adidas');
expect(banditEvent.actionProbability).toBeCloseTo(0.099);
expect(banditEvent.optimalityGap).toBe(7.1);
expect(banditEvent.modelVersion).toBe('v123');
expect(banditEvent.modelVersion).toBe('123');
expect(banditEvent.subjectNumericAttributes).toStrictEqual({ age: 25 });
expect(banditEvent.subjectCategoricalAttributes).toStrictEqual({
country: 'USA',
Expand Down Expand Up @@ -206,7 +206,7 @@ describe('EppoClient Bandits E2E test', () => {

it('Flushed queued logging events when a logger is set', () => {
client.useLRUInMemoryAssignmentCache(5);
client.useLRUInMemoryBanditAssignmentCache(5);
client.useExpiringInMemoryBanditAssignmentCache(5);
client.setAssignmentLogger(null as unknown as IAssignmentLogger);
client.setBanditLogger(null as unknown as IBanditLogger);
const banditAssignment = client.getBanditAction(
Expand Down
17 changes: 10 additions & 7 deletions src/client/eppo-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,10 @@ import { logger } from '../application-logger';
import { IAssignmentEvent, IAssignmentLogger } from '../assignment-logger';
import { BanditEvaluator } from '../bandit-evaluator';
import { IBanditEvent, IBanditLogger } from '../bandit-logger';
import {
AssignmentCache,
LRUInMemoryAssignmentCache,
NonExpiringInMemoryAssignmentCache,
} from '../cache/abstract-assignment-cache';
import { AssignmentCache } from '../cache/abstract-assignment-cache';
import { LRUInMemoryAssignmentCache } from '../cache/lru-in-memory-assignment-cache';
import { NonExpiringInMemoryAssignmentCache } from '../cache/non-expiring-in-memory-cache-assignment';
import { TLRUInMemoryAssignmentCache } from '../cache/tlru-in-memory-assignment-cache';
import ConfigurationRequestor from '../configuration-requestor';
import { IConfigurationStore } from '../configuration-store/configuration-store';
import {
Expand Down Expand Up @@ -983,8 +982,12 @@ export default class EppoClient {
this.banditAssignmentCache = new NonExpiringInMemoryAssignmentCache();
}

public useLRUInMemoryBanditAssignmentCache(maxSize: number) {
this.banditAssignmentCache = new LRUInMemoryAssignmentCache(maxSize);
/**
* @param {number} maxSize - Maximum cache size
* @param {number} timeout - TTL of cache entries
*/
public useExpiringInMemoryBanditAssignmentCache(maxSize: number, timeout?: number) {
this.banditAssignmentCache = new TLRUInMemoryAssignmentCache(maxSize, timeout);
}

public useCustomBanditAssignmentCache(cache: AssignmentCache) {
Expand Down
Loading
Loading