Skip to content

Commit 7f838c2

Browse files
Selki/ff3396 expiring bandit cache (#128)
* Added Expiring LRU Cache implementation * Added Expiring LRU Cache implementation * Reorganized caches into their own files * fixed older unit tests * renaming classes and file organizing * Fixed cache eviction for TLRU cache entries that were overwritten. Changed default ttl of TLRU assignment cache. * minor code cleanliness improvements * memory optimization of tlru cache * use better name for bandit cache method in EppoClient * fixed tests * added more test for tlru cache, changed get, set, entries, keys, values behaviour * fixed get() behaviour of tlru cache * sdk version bump
1 parent de9ae8b commit 7f838c2

15 files changed

+410
-46
lines changed

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@eppo/js-client-sdk-common",
3-
"version": "4.3.0",
3+
"version": "4.4.0",
44
"description": "Eppo SDK for client-side JavaScript applications (base for both web and react native)",
55
"main": "dist/index.js",
66
"files": [

src/cache/abstract-assignment-cache.ts

Lines changed: 1 addition & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
import { getMD5Hash } from '../obfuscation';
22

3-
import { LRUCache } from './lru-cache';
4-
53
/**
64
* Assignment cache keys are only on the subject and flag level, while the entire value is used
75
* for uniqueness checking. This way that if an assigned variation or bandit action changes for a
@@ -60,7 +58,7 @@ export abstract class AbstractAssignmentCache<T extends Map<string, string>>
6058
return this.get(entry) === assignmentCacheValueToString(entry);
6159
}
6260

63-
private get(key: AssignmentCacheKey): string | undefined {
61+
get(key: AssignmentCacheKey): string | undefined {
6462
return this.delegate.get(assignmentCacheKeyToString(key));
6563
}
6664

@@ -80,32 +78,3 @@ export abstract class AbstractAssignmentCache<T extends Map<string, string>>
8078
return this.delegate.entries();
8179
}
8280
}
83-
84-
/**
85-
* A cache that never expires.
86-
*
87-
* The primary use case is for client-side SDKs, where the cache is only used
88-
* for a single user.
89-
*/
90-
export class NonExpiringInMemoryAssignmentCache extends AbstractAssignmentCache<
91-
Map<string, string>
92-
> {
93-
constructor(store = new Map<string, string>()) {
94-
super(store);
95-
}
96-
}
97-
98-
/**
99-
* A cache that uses the LRU algorithm to evict the least recently used items.
100-
*
101-
* It is used to limit the size of the cache.
102-
*
103-
* The primary use case is for server-side SDKs, where the cache is shared across
104-
* multiple users. In this case, the cache size should be set to the maximum number
105-
* of users that can be active at the same time.
106-
*/
107-
export class LRUInMemoryAssignmentCache extends AbstractAssignmentCache<LRUCache> {
108-
constructor(maxSize: number) {
109-
super(new LRUCache(maxSize));
110-
}
111-
}

src/cache/lru-cache.spec.ts

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,4 +61,19 @@ describe('LRUCache', () => {
6161
expect(oneCache.get('a')).toBeFalsy();
6262
expect(oneCache.get('b')).toBe('banana');
6363
});
64+
65+
/**
66+
This test case might be an overkill but in case Map() changes,
67+
or we want to ditch it completely this will remind us that insertion
68+
order is crucial for this cache to work properly
69+
**/
70+
it('should preserve insertion order when inserting on capacity limit', () => {
71+
cache.set('a', 'apple');
72+
cache.set('b', 'banana');
73+
cache.set('c', 'cherry');
74+
75+
const keys = Array.from(cache.keys());
76+
expect(keys[0]).toBe('b');
77+
expect(keys[1]).toBe('c');
78+
});
6479
});

src/cache/lru-cache.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,12 @@
1212
* Source: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map
1313
*/
1414
export class LRUCache implements Map<string, string> {
15-
private readonly cache = new Map<string, string>();
15+
protected readonly cache = new Map<string, string>();
1616
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
1717
// @ts-ignore
1818
[Symbol.toStringTag]: string;
1919

20-
constructor(private readonly capacity: number) {}
20+
constructor(protected readonly capacity: number) {}
2121

2222
[Symbol.iterator](): IterableIterator<[string, string]> {
2323
return this.cache[Symbol.iterator]();
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
import { AbstractAssignmentCache } from './abstract-assignment-cache';
2+
import { LRUCache } from './lru-cache';
3+
4+
/**
5+
* A cache that uses the LRU algorithm to evict the least recently used items.
6+
*
7+
* It is used to limit the size of the cache.
8+
*
9+
* The primary use case is for server-side SDKs, where the cache is shared across
10+
* multiple users. In this case, the cache size should be set to the maximum number
11+
* of users that can be active at the same time.
12+
* @param {number} maxSize - Maximum cache size
13+
*/
14+
export class LRUInMemoryAssignmentCache extends AbstractAssignmentCache<LRUCache> {
15+
constructor(maxSize: number) {
16+
super(new LRUCache(maxSize));
17+
}
18+
}

src/cache/abstract-assignment-cache.spec.ts renamed to src/cache/non-expiring-in-memory-assignment-cache.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
import {
22
assignmentCacheKeyToString,
33
assignmentCacheValueToString,
4-
NonExpiringInMemoryAssignmentCache,
54
} from './abstract-assignment-cache';
5+
import { NonExpiringInMemoryAssignmentCache } from './non-expiring-in-memory-cache-assignment';
66

77
describe('NonExpiringInMemoryAssignmentCache', () => {
88
it('read and write variation entries', () => {
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
import { AbstractAssignmentCache } from './abstract-assignment-cache';
2+
3+
/**
4+
* A cache that never expires.
5+
*
6+
* The primary use case is for client-side SDKs, where the cache is only used
7+
* for a single user.
8+
*/
9+
export class NonExpiringInMemoryAssignmentCache extends AbstractAssignmentCache<
10+
Map<string, string>
11+
> {
12+
constructor(store = new Map<string, string>()) {
13+
super(store);
14+
}
15+
}

src/cache/tlru-cache.spec.ts

Lines changed: 175 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,175 @@
1+
import { TLRUCache } from './tlru-cache';
2+
3+
describe('TLRU Cache', () => {
4+
let cache: TLRUCache;
5+
const expectedCacheTimeoutMs = 10;
6+
7+
beforeEach(async () => {
8+
cache = new TLRUCache(2, expectedCacheTimeoutMs);
9+
});
10+
11+
afterEach(async () => {
12+
jest.restoreAllMocks();
13+
jest.clearAllTimers();
14+
});
15+
16+
it('should evict cache after expiration', () => {
17+
jest.useFakeTimers();
18+
19+
cache.set('a', 'apple');
20+
jest.advanceTimersByTime(expectedCacheTimeoutMs);
21+
22+
expect(cache.get('a')).toBeUndefined();
23+
});
24+
25+
it('should not evict cache before expiration', () => {
26+
jest.useFakeTimers();
27+
28+
cache.set('a', 'apple');
29+
jest.advanceTimersByTime(expectedCacheTimeoutMs - 1);
30+
expect(cache.get('a')).toBe('apple');
31+
});
32+
33+
it('should evict all expired entries on .entries() call', () => {
34+
jest.useFakeTimers();
35+
36+
cache.set('a', 'avocado');
37+
jest.advanceTimersByTime(expectedCacheTimeoutMs);
38+
cache.set('b', 'banana');
39+
jest.advanceTimersByTime(expectedCacheTimeoutMs);
40+
41+
const cacheEntries = [];
42+
43+
for (const entry of cache.entries()) {
44+
cacheEntries.push(entry);
45+
}
46+
47+
expect(cacheEntries.length).toBe(0);
48+
});
49+
50+
it('should evict all expired entries on .keys() call', () => {
51+
jest.useFakeTimers();
52+
53+
cache = new TLRUCache(3, expectedCacheTimeoutMs);
54+
cache.set('a', 'avocado');
55+
jest.advanceTimersByTime(expectedCacheTimeoutMs);
56+
cache.set('b', 'banana');
57+
jest.advanceTimersByTime(expectedCacheTimeoutMs);
58+
cache.set('c', 'cherry');
59+
60+
const cacheKeys = [];
61+
62+
for (const key of cache.keys()) {
63+
cacheKeys.push(key);
64+
}
65+
66+
expect(cacheKeys.length).toBe(1);
67+
expect(cache.get('c')).toBe('cherry');
68+
});
69+
70+
it('should evict all expired entries on .values() call', () => {
71+
jest.useFakeTimers();
72+
cache = new TLRUCache(3, expectedCacheTimeoutMs);
73+
74+
cache.set('a', 'avocado');
75+
jest.advanceTimersByTime(expectedCacheTimeoutMs);
76+
cache.set('b', 'banana');
77+
jest.advanceTimersByTime(expectedCacheTimeoutMs);
78+
cache.set('c', 'cherry');
79+
80+
const cacheValues = [];
81+
82+
for (const value of cache.values()) {
83+
cacheValues.push(value);
84+
}
85+
86+
expect(cacheValues.length).toBe(1);
87+
expect(cache.get('c')).toBe('cherry');
88+
});
89+
90+
it('should overwrite existing cache entry', () => {
91+
jest.useFakeTimers();
92+
93+
cache.set('a', 'apple');
94+
jest.advanceTimersByTime(expectedCacheTimeoutMs - 1);
95+
cache.set('a', 'avocado');
96+
97+
// spin the clock by 5sec. After that time cache entry should be still valid.
98+
jest.advanceTimersByTime(expectedCacheTimeoutMs / 2);
99+
100+
// setting assertion in a weird way because calling cache.get()
101+
// will reset eviction timer which will mess up next assertion
102+
let avocadoInCache = false;
103+
cache.forEach((value, key) => {
104+
if (key === 'a' && value === 'avocado') {
105+
avocadoInCache = true;
106+
}
107+
});
108+
expect(avocadoInCache).toBe(true);
109+
110+
// after another spin of 5 sec, cache entry should evict itself
111+
jest.advanceTimersByTime(expectedCacheTimeoutMs / 2);
112+
expect(cache.get('a')).toBeUndefined();
113+
});
114+
115+
it('should check if a key exists', () => {
116+
cache.set('a', 'apple');
117+
expect(cache.has('a')).toBeTruthy();
118+
expect(cache.has('b')).toBeFalsy();
119+
});
120+
121+
it('should handle the cache capacity of zero', () => {
122+
const zeroCache = new TLRUCache(0, expectedCacheTimeoutMs);
123+
zeroCache.set('a', 'apple');
124+
expect(zeroCache.get('a')).toBeFalsy();
125+
});
126+
127+
it('should handle the cache capacity of one', () => {
128+
jest.useFakeTimers();
129+
const oneCache = new TLRUCache(1, expectedCacheTimeoutMs);
130+
oneCache.set('a', 'apple');
131+
jest.advanceTimersByTime(expectedCacheTimeoutMs);
132+
expect(oneCache.get('a')).toBeUndefined();
133+
134+
oneCache.set('a', 'avocado');
135+
expect(oneCache.get('a')).toBe('avocado');
136+
oneCache.set('b', 'banana');
137+
expect(oneCache.get('a')).toBeFalsy();
138+
expect(oneCache.get('b')).toBe('banana');
139+
});
140+
141+
it('should evict oldest entry when capacity limit is reached', () => {
142+
cache.set('a', 'apple');
143+
cache.set('b', 'banana');
144+
cache.set('c', 'cherry');
145+
146+
expect(cache.get('a')).toBeUndefined();
147+
expect(cache.has('b')).toBeTruthy();
148+
expect(cache.has('c')).toBeTruthy();
149+
});
150+
151+
/**
152+
This test case might be an overkill but in case Map() changes,
153+
or we want to ditch it completely this will remind us that insertion
154+
order is crucial for this cache to work properly
155+
**/
156+
it('should preserve insertion order when inserting on capacity limit', () => {
157+
cache.set('a', 'apple');
158+
cache.set('b', 'banana');
159+
cache.set('c', 'cherry');
160+
161+
let keys = Array.from(cache.keys());
162+
expect(keys[0]).toBe('b');
163+
expect(keys[1]).toBe('c');
164+
165+
cache = new TLRUCache(2, expectedCacheTimeoutMs);
166+
cache.set('a', 'apple');
167+
cache.set('b', 'banana');
168+
cache.get('a');
169+
cache.set('c', 'cherry');
170+
171+
keys = Array.from(cache.keys());
172+
expect(keys[0]).toBe('a');
173+
expect(keys[1]).toBe('c');
174+
});
175+
});

0 commit comments

Comments
 (0)