Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@eppo/js-client-sdk-common",
"version": "4.3.0",
"version": "4.4.0",
"description": "Eppo SDK for client-side JavaScript applications (base for both web and react native)",
"main": "dist/index.js",
"files": [
Expand Down
33 changes: 1 addition & 32 deletions src/cache/abstract-assignment-cache.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import { getMD5Hash } from '../obfuscation';

import { LRUCache } from './lru-cache';

/**
* Assignment cache keys are only on the subject and flag level, while the entire value is used
* for uniqueness checking. This way that if an assigned variation or bandit action changes for a
Expand Down Expand Up @@ -60,7 +58,7 @@ export abstract class AbstractAssignmentCache<T extends Map<string, string>>
return this.get(entry) === assignmentCacheValueToString(entry);
}

private get(key: AssignmentCacheKey): string | undefined {
get(key: AssignmentCacheKey): string | undefined {
return this.delegate.get(assignmentCacheKeyToString(key));
}

Expand All @@ -80,32 +78,3 @@ export abstract class AbstractAssignmentCache<T extends Map<string, string>>
return this.delegate.entries();
}
}

/**
* A cache that never expires.
*
* The primary use case is for client-side SDKs, where the cache is only used
* for a single user.
*/
export class NonExpiringInMemoryAssignmentCache extends AbstractAssignmentCache<
Map<string, string>
> {
constructor(store = new Map<string, string>()) {
super(store);
}
}

/**
* A cache that uses the LRU algorithm to evict the least recently used items.
*
* It is used to limit the size of the cache.
*
* The primary use case is for server-side SDKs, where the cache is shared across
* multiple users. In this case, the cache size should be set to the maximum number
* of users that can be active at the same time.
*/
export class LRUInMemoryAssignmentCache extends AbstractAssignmentCache<LRUCache> {
constructor(maxSize: number) {
super(new LRUCache(maxSize));
}
}
15 changes: 15 additions & 0 deletions src/cache/lru-cache.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,4 +61,19 @@ describe('LRUCache', () => {
expect(oneCache.get('a')).toBeFalsy();
expect(oneCache.get('b')).toBe('banana');
});

/**
This test case might be an overkill but in case Map() changes,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

probably handled by a test like your "should evict oldest entry when capacity limit is reached" but no harm no foul

or we want to ditch it completely this will remind us that insertion
order is crucial for this cache to work properly
**/
it('should preserve insertion order when inserting on capacity limit', () => {
cache.set('a', 'apple');
cache.set('b', 'banana');
cache.set('c', 'cherry');

const keys = Array.from(cache.keys());
expect(keys[0]).toBe('b');
expect(keys[1]).toBe('c');
});
});
4 changes: 2 additions & 2 deletions src/cache/lru-cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@
* Source: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map
*/
export class LRUCache implements Map<string, string> {
private readonly cache = new Map<string, string>();
protected readonly cache = new Map<string, string>();
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
[Symbol.toStringTag]: string;

constructor(private readonly capacity: number) {}
constructor(protected readonly capacity: number) {}

[Symbol.iterator](): IterableIterator<[string, string]> {
return this.cache[Symbol.iterator]();
Expand Down
18 changes: 18 additions & 0 deletions src/cache/lru-in-memory-assignment-cache.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import { AbstractAssignmentCache } from './abstract-assignment-cache';
import { LRUCache } from './lru-cache';

/**
* A cache that uses the LRU algorithm to evict the least recently used items.
*
* It is used to limit the size of the cache.
*
* The primary use case is for server-side SDKs, where the cache is shared across
* multiple users. In this case, the cache size should be set to the maximum number
* of users that can be active at the same time.
* @param {number} maxSize - Maximum cache size
*/
export class LRUInMemoryAssignmentCache extends AbstractAssignmentCache<LRUCache> {
constructor(maxSize: number) {
super(new LRUCache(maxSize));
}
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import {
assignmentCacheKeyToString,
assignmentCacheValueToString,
NonExpiringInMemoryAssignmentCache,
} from './abstract-assignment-cache';
import { NonExpiringInMemoryAssignmentCache } from './non-expiring-in-memory-cache-assignment';

describe('NonExpiringInMemoryAssignmentCache', () => {
it('read and write variation entries', () => {
Expand Down
15 changes: 15 additions & 0 deletions src/cache/non-expiring-in-memory-cache-assignment.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { AbstractAssignmentCache } from './abstract-assignment-cache';

/**
* A cache that never expires.
*
* The primary use case is for client-side SDKs, where the cache is only used
* for a single user.
*/
export class NonExpiringInMemoryAssignmentCache extends AbstractAssignmentCache<
Map<string, string>
> {
constructor(store = new Map<string, string>()) {
super(store);
}
}
175 changes: 175 additions & 0 deletions src/cache/tlru-cache.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
import { TLRUCache } from './tlru-cache';

describe('TLRU Cache', () => {
let cache: TLRUCache;
const expectedCacheTimeoutMs = 10;

beforeEach(async () => {
cache = new TLRUCache(2, expectedCacheTimeoutMs);
});

afterEach(async () => {
jest.restoreAllMocks();
jest.clearAllTimers();
});

it('should evict cache after expiration', () => {
jest.useFakeTimers();

cache.set('a', 'apple');
jest.advanceTimersByTime(expectedCacheTimeoutMs);

expect(cache.get('a')).toBeUndefined();
});

it('should not evict cache before expiration', () => {
jest.useFakeTimers();

cache.set('a', 'apple');
jest.advanceTimersByTime(expectedCacheTimeoutMs - 1);
expect(cache.get('a')).toBe('apple');
});

it('should evict all expired entries on .entries() call', () => {
jest.useFakeTimers();

cache.set('a', 'avocado');
jest.advanceTimersByTime(expectedCacheTimeoutMs);
cache.set('b', 'banana');
jest.advanceTimersByTime(expectedCacheTimeoutMs);

const cacheEntries = [];

for (const entry of cache.entries()) {
cacheEntries.push(entry);
}

expect(cacheEntries.length).toBe(0);
});

it('should evict all expired entries on .keys() call', () => {
jest.useFakeTimers();

cache = new TLRUCache(3, expectedCacheTimeoutMs);
cache.set('a', 'avocado');
jest.advanceTimersByTime(expectedCacheTimeoutMs);
cache.set('b', 'banana');
jest.advanceTimersByTime(expectedCacheTimeoutMs);
cache.set('c', 'cherry');

const cacheKeys = [];

for (const key of cache.keys()) {
cacheKeys.push(key);
}

expect(cacheKeys.length).toBe(1);
expect(cache.get('c')).toBe('cherry');
Comment on lines +66 to +67
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍

});

it('should evict all expired entries on .values() call', () => {
jest.useFakeTimers();
cache = new TLRUCache(3, expectedCacheTimeoutMs);

cache.set('a', 'avocado');
jest.advanceTimersByTime(expectedCacheTimeoutMs);
cache.set('b', 'banana');
jest.advanceTimersByTime(expectedCacheTimeoutMs);
cache.set('c', 'cherry');

const cacheValues = [];

for (const value of cache.values()) {
cacheValues.push(value);
}

expect(cacheValues.length).toBe(1);
expect(cache.get('c')).toBe('cherry');
Comment on lines +86 to +87
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍

});

it('should overwrite existing cache entry', () => {
jest.useFakeTimers();

cache.set('a', 'apple');
jest.advanceTimersByTime(expectedCacheTimeoutMs - 1);
cache.set('a', 'avocado');

// spin the clock by 5sec. After that time cache entry should be still valid.
jest.advanceTimersByTime(expectedCacheTimeoutMs / 2);

// setting assertion in a weird way because calling cache.get()
// will reset eviction timer which will mess up next assertion
let avocadoInCache = false;
cache.forEach((value, key) => {
if (key === 'a' && value === 'avocado') {
avocadoInCache = true;
}
});
expect(avocadoInCache).toBe(true);

// after another spin of 5 sec, cache entry should evict itself
jest.advanceTimersByTime(expectedCacheTimeoutMs / 2);
expect(cache.get('a')).toBeUndefined();
});

it('should check if a key exists', () => {
cache.set('a', 'apple');
expect(cache.has('a')).toBeTruthy();
expect(cache.has('b')).toBeFalsy();
});

it('should handle the cache capacity of zero', () => {
const zeroCache = new TLRUCache(0, expectedCacheTimeoutMs);
zeroCache.set('a', 'apple');
expect(zeroCache.get('a')).toBeFalsy();
});

it('should handle the cache capacity of one', () => {
jest.useFakeTimers();
const oneCache = new TLRUCache(1, expectedCacheTimeoutMs);
oneCache.set('a', 'apple');
jest.advanceTimersByTime(expectedCacheTimeoutMs);
expect(oneCache.get('a')).toBeUndefined();

oneCache.set('a', 'avocado');
expect(oneCache.get('a')).toBe('avocado');
oneCache.set('b', 'banana');
expect(oneCache.get('a')).toBeFalsy();
expect(oneCache.get('b')).toBe('banana');
});

it('should evict oldest entry when capacity limit is reached', () => {
cache.set('a', 'apple');
cache.set('b', 'banana');
cache.set('c', 'cherry');

expect(cache.get('a')).toBeUndefined();
expect(cache.has('b')).toBeTruthy();
expect(cache.has('c')).toBeTruthy();
});

/**
This test case might be an overkill but in case Map() changes,
or we want to ditch it completely this will remind us that insertion
order is crucial for this cache to work properly
**/
it('should preserve insertion order when inserting on capacity limit', () => {
cache.set('a', 'apple');
cache.set('b', 'banana');
cache.set('c', 'cherry');

let keys = Array.from(cache.keys());
expect(keys[0]).toBe('b');
expect(keys[1]).toBe('c');

cache = new TLRUCache(2, expectedCacheTimeoutMs);
cache.set('a', 'apple');
cache.set('b', 'banana');
cache.get('a');
cache.set('c', 'cherry');

keys = Array.from(cache.keys());
expect(keys[0]).toBe('a');
expect(keys[1]).toBe('c');
Comment on lines +171 to +173
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💪

});
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we need another test that does:

cache.set('a', 'apple');
cache.set('b', 'banana');
cache.get('a');
cache.set('c', 'cherry');

Then makes sure its c and a (e.g., a got re-inserted at the end)

});
Loading
Loading