-
Notifications
You must be signed in to change notification settings - Fork 1
Selki/ff3396 expiring bandit cache #128
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 7 commits
d0093fb
ffc0c59
90429c1
e4d1d26
001ae6e
c386303
e5c85cc
4b02b3e
4bdfa7f
d4a14bd
1cf7391
64c6729
1d1f7ee
7336c3e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
import { AbstractAssignmentCache } from './abstract-assignment-cache'; | ||
import { LRUCache } from './lru-cache'; | ||
|
||
/** | ||
* A cache that uses the LRU algorithm to evict the least recently used items. | ||
* | ||
* It is used to limit the size of the cache. | ||
* | ||
* The primary use case is for server-side SDKs, where the cache is shared across | ||
* multiple users. In this case, the cache size should be set to the maximum number | ||
* of users that can be active at the same time. | ||
* @param {number} maxSize - Maximum cache size | ||
*/ | ||
export class LRUInMemoryAssignmentCache extends AbstractAssignmentCache<LRUCache> { | ||
maya-the-mango marked this conversation as resolved.
Show resolved
Hide resolved
|
||
constructor(maxSize: number) { | ||
super(new LRUCache(maxSize)); | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
import { AbstractAssignmentCache } from './abstract-assignment-cache'; | ||
|
||
/** | ||
* A cache that never expires. | ||
* | ||
* The primary use case is for client-side SDKs, where the cache is only used | ||
* for a single user. | ||
*/ | ||
export class NonExpiringInMemoryAssignmentCache extends AbstractAssignmentCache< | ||
Map<string, string> | ||
> { | ||
constructor(store = new Map<string, string>()) { | ||
super(store); | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
import { TLRUCache } from './tlru-cache'; | ||
|
||
describe('TLRU Cache', () => { | ||
maya-the-mango marked this conversation as resolved.
Show resolved
Hide resolved
|
||
let cache: TLRUCache; | ||
const expectedCacheTimeoutMs = 10; | ||
|
||
beforeEach(async () => { | ||
cache = new TLRUCache(2, expectedCacheTimeoutMs); | ||
}); | ||
|
||
afterEach(async () => { | ||
jest.restoreAllMocks(); | ||
jest.clearAllTimers(); | ||
}); | ||
|
||
it('should evict cache after timeout', () => { | ||
jest.useFakeTimers(); | ||
jest.spyOn(global, 'setTimeout'); | ||
|
||
cache.set('a', 'apple'); | ||
jest.advanceTimersByTime(expectedCacheTimeoutMs); | ||
|
||
expect(setTimeout).toHaveBeenCalledTimes(1); | ||
expect(setTimeout).toHaveBeenLastCalledWith(expect.any(Function), expectedCacheTimeoutMs); | ||
expect(cache.get('a')).toBeUndefined(); | ||
}); | ||
|
||
/** | ||
* This test assumes implementation which is not ideal, but that's | ||
* the only way I know of how to go around timers in jest | ||
**/ | ||
it('should overwrite existing cache entry', () => { | ||
jest.useFakeTimers(); | ||
jest.spyOn(global, 'setTimeout'); | ||
jest.spyOn(global, 'clearTimeout'); | ||
|
||
cache.set('a', 'apple'); | ||
jest.advanceTimersByTime(expectedCacheTimeoutMs - 1); | ||
cache.set('a', 'avocado'); | ||
|
||
expect(setTimeout).toHaveBeenCalledTimes(2); | ||
expect(setTimeout).toHaveBeenLastCalledWith(expect.any(Function), expectedCacheTimeoutMs); | ||
expect(clearTimeout).toHaveBeenCalledTimes(1); | ||
// spin the clock by 5sec. After that time cache entry should be still valid. | ||
jest.advanceTimersByTime(expectedCacheTimeoutMs / 2); | ||
|
||
// setting assertion in a weird way because calling cache.get() | ||
// will reset eviction timer which will mess up next assertion | ||
let avocadoInCache = false; | ||
cache.forEach((value, key) => { | ||
if (key === 'a' && value === 'avocado') { | ||
avocadoInCache = true; | ||
} | ||
}); | ||
expect(avocadoInCache).toBe(true); | ||
|
||
// after another spin of 5 sec, cache entry should evict itself | ||
jest.advanceTimersByTime(expectedCacheTimeoutMs / 2); | ||
expect(cache.has('a')).toBeFalsy(); | ||
}); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think we need another test that does:
Then makes sure its |
||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
import { LRUCache } from './lru-cache'; | ||
|
||
/** | ||
* Time-aware, least-recently-used (TLRU), variant of LRU where entries have valid lifetime. | ||
* @param {number} maxSize - Maximum cache size | ||
* @param {number} ttl - Time in milliseconds after which cache entry will evict itself | ||
**/ | ||
export class TLRUCache extends LRUCache { | ||
rasendubi marked this conversation as resolved.
Show resolved
Hide resolved
|
||
private readonly cacheEntriesTimoutIds = new Map<string, ReturnType<typeof setTimeout>>(); | ||
constructor(readonly maxSize: number, readonly ttl: number) { | ||
super(maxSize); | ||
} | ||
|
||
private clearCacheEntryTimeoutIfExists(key: string): void { | ||
if (this.cacheEntriesTimoutIds.has(key)) { | ||
const timeoutId = this.cacheEntriesTimoutIds.get(key); | ||
clearTimeout(timeoutId); | ||
this.cacheEntriesTimoutIds.delete(key); | ||
} | ||
} | ||
|
||
private setCacheEntryTimeout(key: string): void { | ||
const timeoutId = setTimeout(() => { | ||
this.delete(key); | ||
}, this.ttl); | ||
maya-the-mango marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
||
this.cacheEntriesTimoutIds.set(key, timeoutId); | ||
} | ||
|
||
delete(key: string): boolean { | ||
this.clearCacheEntryTimeoutIfExists(key); | ||
return super.delete(key); | ||
} | ||
|
||
get(key: string): string | undefined { | ||
const value = super.get(key); | ||
|
||
|
||
if (value) { | ||
maya-the-mango marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
// Whenever we get a cache hit, we need to reset the timer | ||
// for eviction, because it is now considered most recently | ||
// accessed thus the timer should start over. Not doing that | ||
// will cause a de-sync that will stop proper eviction | ||
this.clearCacheEntryTimeoutIfExists(key); | ||
this.setCacheEntryTimeout(key); | ||
} | ||
return value; | ||
} | ||
|
||
set(key: string, value: string): this { | ||
const cache = super.set(key, value); | ||
this.clearCacheEntryTimeoutIfExists(key); | ||
this.setCacheEntryTimeout(key); | ||
|
||
return cache; | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,43 @@ | ||
import { DEFAULT_TLRU_TTL_MS } from '../constants'; | ||
|
||
import { TLRUInMemoryAssignmentCache } from './tlru-in-memory-assignment-cache'; | ||
|
||
describe('ExpiringLRUInMemoryAssignmentCache', () => { | ||
maya-the-mango marked this conversation as resolved.
Show resolved
Hide resolved
|
||
let cache: TLRUInMemoryAssignmentCache; | ||
const defaultTimout = DEFAULT_TLRU_TTL_MS; // 10 minutes | ||
|
||
beforeAll(() => { | ||
jest.useFakeTimers(); | ||
cache = new TLRUInMemoryAssignmentCache(2); | ||
}); | ||
|
||
afterAll(() => { | ||
jest.clearAllTimers(); | ||
}); | ||
|
||
it(`assignment cache's timeout should default to 10 minutes `, () => { | ||
const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' }; | ||
cache.set(cacheEntry); | ||
jest.advanceTimersByTime(defaultTimout); | ||
expect(cache.has(cacheEntry)).toBeFalsy(); | ||
}); | ||
|
||
it(`assignment cache's timeout value is used on construction`, () => { | ||
const expectedTimout = 88; | ||
cache = new TLRUInMemoryAssignmentCache(2, expectedTimout); | ||
const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' }; | ||
cache.set(cacheEntry); | ||
jest.advanceTimersByTime(expectedTimout); | ||
expect(cache.has(cacheEntry)).toBeFalsy(); | ||
}); | ||
|
||
it(`cache shouldn't be invalidated before timeout`, () => { | ||
const cacheEntry = { subjectKey: 'a', flagKey: 'b', banditKey: 'c', actionKey: 'd' }; | ||
cache.set(cacheEntry); | ||
|
||
expect(cache.has(cacheEntry)).toBeTruthy(); | ||
|
||
jest.advanceTimersByTime(defaultTimout); | ||
expect(cache.has(cacheEntry)).toBeFalsy(); | ||
}); | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
import { DEFAULT_TLRU_TTL_MS } from '../constants'; | ||
|
||
import { AbstractAssignmentCache } from './abstract-assignment-cache'; | ||
import { TLRUCache } from './tlru-cache'; | ||
|
||
/** | ||
* Variation of LRU caching mechanism that will automatically evict items after | ||
* set time of milliseconds. | ||
* | ||
* It is used to limit the size of the cache. | ||
* | ||
* @param {number} maxSize - Maximum cache size | ||
* @param {number} ttl - Time in milliseconds after cache will expire. | ||
*/ | ||
export class TLRUInMemoryAssignmentCache extends AbstractAssignmentCache<TLRUCache> { | ||
constructor(maxSize: number, ttl = DEFAULT_TLRU_TTL_MS) { | ||
super(new TLRUCache(maxSize, ttl)); | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -3,11 +3,10 @@ import { logger } from '../application-logger'; | |
import { IAssignmentEvent, IAssignmentLogger } from '../assignment-logger'; | ||
import { BanditEvaluator } from '../bandit-evaluator'; | ||
import { IBanditEvent, IBanditLogger } from '../bandit-logger'; | ||
import { | ||
AssignmentCache, | ||
LRUInMemoryAssignmentCache, | ||
NonExpiringInMemoryAssignmentCache, | ||
} from '../cache/abstract-assignment-cache'; | ||
import { AssignmentCache } from '../cache/abstract-assignment-cache'; | ||
import { LRUInMemoryAssignmentCache } from '../cache/lru-in-memory-assignment-cache'; | ||
import { NonExpiringInMemoryAssignmentCache } from '../cache/non-expiring-in-memory-cache-assignment'; | ||
import { TLRUInMemoryAssignmentCache } from '../cache/tlru-in-memory-assignment-cache'; | ||
import ConfigurationRequestor from '../configuration-requestor'; | ||
import { IConfigurationStore } from '../configuration-store/configuration-store'; | ||
import { | ||
|
@@ -984,7 +983,15 @@ export default class EppoClient { | |
} | ||
|
||
public useLRUInMemoryBanditAssignmentCache(maxSize: number) { | ||
this.banditAssignmentCache = new LRUInMemoryAssignmentCache(maxSize); | ||
this.banditAssignmentCache = new TLRUInMemoryAssignmentCache(maxSize); | ||
|
||
} | ||
|
||
/** | ||
* @param {number} maxSize - Maximum cache size | ||
* @param {number} timeout - TTL of cache entries | ||
*/ | ||
public useTLRUInMemoryAssignmentCache(maxSize: number, timeout?: number) { | ||
|
||
this.banditAssignmentCache = new TLRUInMemoryAssignmentCache(maxSize, timeout); | ||
} | ||
|
||
public useCustomBanditAssignmentCache(cache: AssignmentCache) { | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -12,3 +12,4 @@ export const NULL_SENTINEL = 'EPPO_NULL'; | |
// number of logging events that may be queued while waiting for initialization | ||
export const MAX_EVENT_QUEUE_SIZE = 100; | ||
export const BANDIT_ASSIGNMENT_SHARDS = 10000; | ||
export const DEFAULT_TLRU_TTL_MS = 600_000; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🔥 |
Uh oh!
There was an error while loading. Please reload this page.