Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/cache/abstract-assignment-cache.spec.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import {
assignmentCacheKeyToString,
assignmentCacheValueToString,
NonExpiringInMemoryAssignmentCache,
} from './abstract-assignment-cache';
import { NonExpiringInMemoryAssignmentCache } from './non-expiring-in-memory-cache-assignment';

describe('NonExpiringInMemoryAssignmentCache', () => {
it('read and write variation entries', () => {
Expand Down
31 changes: 0 additions & 31 deletions src/cache/abstract-assignment-cache.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { getMD5Hash } from '../obfuscation';

import { LRUCache } from './lru-cache';

/**
* Assignment cache keys are only on the subject and flag level, while the entire value is used
* for uniqueness checking. This way that if an assigned variation or bandit action changes for a
Expand Down Expand Up @@ -80,32 +78,3 @@ export abstract class AbstractAssignmentCache<T extends Map<string, string>>
return this.delegate.entries();
}
}

/**
* A cache that never expires.
*
* The primary use case is for client-side SDKs, where the cache is only used
* for a single user.
*/
export class NonExpiringInMemoryAssignmentCache extends AbstractAssignmentCache<
Map<string, string>
> {
constructor(store = new Map<string, string>()) {
super(store);
}
}

/**
* A cache that uses the LRU algorithm to evict the least recently used items.
*
* It is used to limit the size of the cache.
*
* The primary use case is for server-side SDKs, where the cache is shared across
* multiple users. In this case, the cache size should be set to the maximum number
* of users that can be active at the same time.
*/
export class LRUInMemoryAssignmentCache extends AbstractAssignmentCache<LRUCache> {
constructor(maxSize: number) {
super(new LRUCache(maxSize));
}
}
18 changes: 18 additions & 0 deletions src/cache/lru-in-memory-assignment-cache.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { AbstractAssignmentCache } from './abstract-assignment-cache';
import { LRUCache } from './lru-cache';

/**
* A cache that uses the LRU algorithm to evict the least recently used items.
*
* It is used to limit the size of the cache.
*
* The primary use case is for server-side SDKs, where the cache is shared across
* multiple users. In this case, the cache size should be set to the maximum number
* of users that can be active at the same time.
* @param {number} maxSize - Maximum cache size
*/
export class LRUInMemoryAssignmentCache extends AbstractAssignmentCache<LRUCache> {
constructor(maxSize: number) {
super(new LRUCache(maxSize));
}
}
15 changes: 15 additions & 0 deletions src/cache/non-expiring-in-memory-cache-assignment.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { AbstractAssignmentCache } from './abstract-assignment-cache';

/**
* A cache that never expires.
*
* The primary use case is for client-side SDKs, where the cache is only used
* for a single user.
*/
export class NonExpiringInMemoryAssignmentCache extends AbstractAssignmentCache<
Map<string, string>
> {
constructor(store = new Map<string, string>()) {
super(store);
}
}
61 changes: 61 additions & 0 deletions src/cache/tlru-cache.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import { TLRUCache } from './tlru-cache';

describe('TLRU Cache', () => {
let cache: TLRUCache;
const expectedCacheTimeoutMs = 10;

beforeEach(async () => {
cache = new TLRUCache(2, expectedCacheTimeoutMs);
});

afterEach(async () => {
jest.restoreAllMocks();
jest.clearAllTimers();
});

it('should evict cache after timeout', () => {
jest.useFakeTimers();
jest.spyOn(global, 'setTimeout');

cache.set('a', 'apple');
jest.advanceTimersByTime(expectedCacheTimeoutMs);

expect(setTimeout).toHaveBeenCalledTimes(1);
expect(setTimeout).toHaveBeenLastCalledWith(expect.any(Function), expectedCacheTimeoutMs);
expect(cache.get('a')).toBeUndefined();
});

/**
* This test assumes implementation which is not ideal, but that's
* the only way I know of how to go around timers in jest
**/
it('should overwrite existing cache entry', () => {
jest.useFakeTimers();
jest.spyOn(global, 'setTimeout');
jest.spyOn(global, 'clearTimeout');

cache.set('a', 'apple');
jest.advanceTimersByTime(expectedCacheTimeoutMs - 1);
cache.set('a', 'avocado');

expect(setTimeout).toHaveBeenCalledTimes(2);
expect(setTimeout).toHaveBeenLastCalledWith(expect.any(Function), expectedCacheTimeoutMs);
expect(clearTimeout).toHaveBeenCalledTimes(1);
// spin the clock by 5sec. After that time cache entry should be still valid.
jest.advanceTimersByTime(expectedCacheTimeoutMs / 2);

// setting assertion in a weird way because calling cache.get()
// will reset eviction timer which will mess up next assertion
let avocadoInCache = false;
cache.forEach((value, key) => {
if (key === 'a' && value === 'avocado') {
avocadoInCache = true;
}
});
expect(avocadoInCache).toBe(true);

// after another spin of 5 sec, cache entry should evict itself
jest.advanceTimersByTime(expectedCacheTimeoutMs / 2);
expect(cache.has('a')).toBeFalsy();
});
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we need another test that does:

cache.set('a', 'apple');
cache.set('b', 'banana');
cache.get('a');
cache.set('c', 'cherry');

Then makes sure its c and a (e.g., a got re-inserted at the end)

});
56 changes: 56 additions & 0 deletions src/cache/tlru-cache.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import { LRUCache } from './lru-cache';

/**
* Time-aware, least-recently-used (TLRU), variant of LRU where entries have valid lifetime.
* @param {number} maxSize - Maximum cache size
* @param {number} ttl - Time in milliseconds after which cache entry will evict itself
**/
export class TLRUCache extends LRUCache {
private readonly cacheEntriesTimoutIds = new Map<string, ReturnType<typeof setTimeout>>();
constructor(readonly maxSize: number, readonly ttl: number) {
super(maxSize);
}

private clearCacheEntryTimeout(key: string): void {
const timeoutId = this.cacheEntriesTimoutIds.get(key);
clearTimeout(timeoutId);
this.cacheEntriesTimoutIds.delete(key);
}

private setCacheEntryTimout(key: string): void {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

typo: setCacheEntryTimeout

const timeOutId = setTimeout(() => {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: timeoutId for consistentency with spelling of timeout in other places

this.delete(key);
}, this.ttl);

this.cacheEntriesTimoutIds.set(key, timeOutId);
}

delete(key: string): boolean {
this.clearCacheEntryTimeout(key);
return super.delete(key);
}

get(key: string): string | undefined {
const value = super.get(key);

// Whenever we get a cache hit, we need to reset the timer
// for eviction, because it is now considered most recently
// accessed thus the timer should start over. Not doing that
// will cause a de-sync that will stop proper eviction
this.clearCacheEntryTimeout(key);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we should move the statement above into the if scope below, since we should only clear the timeout if it was a cache hit

if (value) {
this.setCacheEntryTimout(key);
}
return value;
}

set(key: string, value: string): this {
const cache = super.set(key, value);
if (this.cacheEntriesTimoutIds.has(key)) {
this.clearCacheEntryTimeout(key);
}
this.setCacheEntryTimout(key);

return cache;
}
}
41 changes: 41 additions & 0 deletions src/cache/tlru-in-memory-assignment-cache.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import { TLRUInMemoryAssignmentCache } from './tlru-in-memory-assignment-cache';

describe('ExpiringLRUInMemoryAssignmentCache', () => {
let cache: TLRUInMemoryAssignmentCache;
const defaultTimout = 600_000; // 10 minutes

beforeAll(() => {
jest.useFakeTimers();
cache = new TLRUInMemoryAssignmentCache(2);
});

afterAll(() => {
jest.clearAllTimers();
});

it(`assignment cache's timeout should default to 10 minutes `, () => {
const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' };
cache.set(cacheEntry);
jest.advanceTimersByTime(defaultTimout);
expect(cache.has(cacheEntry)).toBeFalsy();
});

it(`assignment cache's timeout value is used on construction`, () => {
const expectedTimout = 88;
cache = new TLRUInMemoryAssignmentCache(2, expectedTimout);
const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' };
cache.set(cacheEntry);
jest.advanceTimersByTime(expectedTimout);
expect(cache.has(cacheEntry)).toBeFalsy();
});

it(`cache shouldn't be invalidated before timeout`, () => {
const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' };
cache.set(cacheEntry);

expect(cache.has(cacheEntry)).toBeTruthy();

jest.advanceTimersByTime(defaultTimout);
expect(cache.has(cacheEntry)).toBeFalsy();
});
});
17 changes: 17 additions & 0 deletions src/cache/tlru-in-memory-assignment-cache.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import { AbstractAssignmentCache } from './abstract-assignment-cache';
import { TLRUCache } from './tlru-cache';

/**
* Variation of LRU caching mechanism that will automatically evict items after
* set time of milliseconds.
*
* It is used to limit the size of the cache.
*
* @param {number} maxSize - Maximum cache size
* @param {number} ttl - Time in milliseconds after cache will expire.
*/
export class TLRUInMemoryAssignmentCache extends AbstractAssignmentCache<TLRUCache> {
constructor(maxSize: number, ttl = 600_000) {
super(new TLRUCache(maxSize, ttl));
}
}
2 changes: 1 addition & 1 deletion src/client/eppo-client-with-bandits.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ describe('EppoClient Bandits E2E test', () => {
expect(banditEvent.action).toBe('adidas');
expect(banditEvent.actionProbability).toBeCloseTo(0.099);
expect(banditEvent.optimalityGap).toBe(7.1);
expect(banditEvent.modelVersion).toBe('v123');
expect(banditEvent.modelVersion).toBe('123');
expect(banditEvent.subjectNumericAttributes).toStrictEqual({ age: 25 });
expect(banditEvent.subjectCategoricalAttributes).toStrictEqual({
country: 'USA',
Expand Down
19 changes: 13 additions & 6 deletions src/client/eppo-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,10 @@ import { logger } from '../application-logger';
import { IAssignmentEvent, IAssignmentLogger } from '../assignment-logger';
import { BanditEvaluator } from '../bandit-evaluator';
import { IBanditEvent, IBanditLogger } from '../bandit-logger';
import {
AssignmentCache,
LRUInMemoryAssignmentCache,
NonExpiringInMemoryAssignmentCache,
} from '../cache/abstract-assignment-cache';
import { AssignmentCache } from '../cache/abstract-assignment-cache';
import { LRUInMemoryAssignmentCache } from '../cache/lru-in-memory-assignment-cache';
import { NonExpiringInMemoryAssignmentCache } from '../cache/non-expiring-in-memory-cache-assignment';
import { TLRUInMemoryAssignmentCache } from '../cache/tlru-in-memory-assignment-cache';
import ConfigurationRequestor from '../configuration-requestor';
import { IConfigurationStore } from '../configuration-store/configuration-store';
import {
Expand Down Expand Up @@ -984,7 +983,15 @@ export default class EppoClient {
}

public useLRUInMemoryBanditAssignmentCache(maxSize: number) {
this.banditAssignmentCache = new LRUInMemoryAssignmentCache(maxSize);
this.banditAssignmentCache = new TLRUInMemoryAssignmentCache(maxSize);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

did you mean to use TLRU in here as well? the method name says LRU

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

while "LRU" has meaning, I wonder if "TLRU" is too vauge and we should be expliti in our naming calling it `ExpiringLRUInMemoryAssignmentCache" or something

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also we should allow the timeout to be optionally specified here

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I was also not conviced by the naming at first. I used ExpiringLRUInMemoryAssignmentCache in the beginning but then while reading about different caching algorithms. stumbled upon this.
https://en.wikipedia.org/wiki/Cache_replacement_policies#Time-aware,_least-recently_used

@aarsilv I will change the name if that doesn't convince you. What do you think?

}

/**
* @param {number} maxSize - Maximum cache size
* @param {number} timeout - TTL of cache entries
*/
public useTLRUInMemoryAssignmentCache(maxSize: number, timeout?: number) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should be axed and timeout added to the previous method. We only want the expiring option for bandits

this.banditAssignmentCache = new TLRUInMemoryAssignmentCache(maxSize, timeout);
}

public useCustomBanditAssignmentCache(cache: AssignmentCache) {
Expand Down
2 changes: 1 addition & 1 deletion src/configuration-requestor.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ describe('ConfigurationRequestor', () => {
const bannerBandit = banditModelStore.get('banner_bandit');
expect(bannerBandit?.banditKey).toBe('banner_bandit');
expect(bannerBandit?.modelName).toBe('falcon');
expect(bannerBandit?.modelVersion).toBe('v123');
expect(bannerBandit?.modelVersion).toBe('123');
const bannerModelData = bannerBandit?.modelData;
expect(bannerModelData?.gamma).toBe(1);
expect(bannerModelData?.defaultActionScore).toBe(0);
Expand Down
4 changes: 2 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@ import { IBanditLogger, IBanditEvent } from './bandit-logger';
import {
AbstractAssignmentCache,
AssignmentCache,
NonExpiringInMemoryAssignmentCache,
LRUInMemoryAssignmentCache,
AsyncMap,
AssignmentCacheKey,
AssignmentCacheValue,
AssignmentCacheEntry,
assignmentCacheKeyToString,
assignmentCacheValueToString,
} from './cache/abstract-assignment-cache';
import { LRUInMemoryAssignmentCache } from './cache/lru-in-memory-assignment-cache';
import { NonExpiringInMemoryAssignmentCache } from './cache/non-expiring-in-memory-cache-assignment';
import EppoClient, {
FlagConfigurationRequestParameters,
IAssignmentDetails,
Expand Down
Loading