Skip to content

Commit 0fb9658

Browse files
authored
Merge pull request #16 from oslabs-beta/tests
Tests & bugfix
2 parents 51917a4 + b2cadc2 commit 0fb9658

File tree

5 files changed

+321
-13
lines changed

5 files changed

+321
-13
lines changed

README.md

Lines changed: 31 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -64,20 +64,36 @@ const GraphQLRouter =
6464
ObsRouter >
6565
{
6666
Router,
67-
typeDefs: types,
68-
resolvers: resolvers,
69-
redisPort: 6379, //Desired redis port
70-
useCache: true, //Boolean to toggle all cache functionality
71-
usePlayground: true, //Boolean to allow for graphQL playground
72-
persistQueries: true, //Boolean to toggle the use of persistant queries
73-
searchTerms: [] //Optional array to allow board queries to store according to search fields so individual searches are found in cache
74-
customIdentifier: ['id', '__typename'],
75-
mutationTableMap = {}, //Object where keys are add mutation types and value is an array of affected tables (e.g. {addPlants: ['plants'], addMovie: ['movies']})
67+
typeDefs: types, // graphQL typeDefs
68+
resolvers: resolvers, // graphQL resolvers
7669
};
7770

7871
// attach the graphql routers routes to our app
7972
app.use(GraphQLRouter.routes(), GraphQLRouter.allowedMethods());
8073
```
74+
## Selecting options for the Router
75+
```javascript
76+
const GraphQLRouter =
77+
(await ObsidianRouter) <
78+
ObsRouter >
79+
{
80+
Router, // Router that is initialized by server.
81+
path = '/graphql', // endpoint for graphQL queries, default to '/graphql'
82+
typeDefs: types, // graphQL typeDefs
83+
resolvers: resolvers, // graphQL resolvers
84+
usePlayground: true, // Boolean to allow for graphQL playground, default to false
85+
useCache: true, // Boolean to toggle all cache functionality, default to true
86+
redisPort: 6379, // Desired redis port, default to 6379
87+
policy: 'allkeys-lru', // Option select your Redis policy, default to allkeys-lru
88+
maxmemory = '2000mb', // Option to select Redis capacity, default to 2000mb
89+
searchTerms: [] //Optional array to allow board queries to store according to search fields so individual searches are found in cache
90+
persistQueries: true, //Boolean to toggle the use of persistant queries, default to false
91+
hashTableSize = 16, // Size of hash table for persistent queries, default to 16
92+
maxQueryDepth = 0, // Maximum depth of query, default to 0
93+
customIdentifier: ['__typename', '_id'], // keys to be used to idedntify and normalize object
94+
mutationTableMap = {}, //Object where keys are add mutation types and value is an array of affected tables (e.g. {addPlants: ['plants'], addMovie: ['movies']})
95+
};
96+
```
8197

8298
## Creating the Wrapper
8399

@@ -93,10 +109,10 @@ const App = () => {
93109
};
94110
```
95111

96-
## Selecting useCache, LFU/LRU/WTinyLFU, capacity, and searchTerms (if any); default (if not provided) true, LFU, 2000
112+
## Selecting useCache, LFU/LRU/WTinyLFU, capacity, persistQueries, and searchTerms (if any); default (if not provided): true, LFU, 2000, false
97113

98114
```javascript
99-
<ObsidianWrapper useCache={true} algo='LRU' capacity='5000' searchTerms={[title, author, ISBN]}>
115+
<ObsidianWrapper useCache={true} algo='LRU' capacity='5000' persistQueries={true} searchTerms={['title', 'director', 'genre']}>
100116
<MovieApp />
101117
</ObsidianWrapper>
102118
```
@@ -185,8 +201,12 @@ Working demo to install locally in docker:
185201

186202
## Features In Progress
187203

204+
- Server-side caching improvements
205+
- More comprehensive mutation support
206+
- searchTerms option optimization
188207
- Ability to store/read only the whole query
189208
- Hill Climber optimization for W-TinyLFU cache size allocation
209+
- Developer Tool server-side cache integration
190210
- Developer Tool View Cache component, and Playground component
191211

192212
## Authors

src/Browser/wTinyLFU Sub-Caches/slruSub-cache.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,5 +54,5 @@ SLRUCache.prototype.has = function (key) {
5454
SLRUCache.prototype.putAndDemote = function (key, value) {
5555
// if adding an item to the protectedLRU results in ejection, demote ejected node
5656
const demoted = this.protectedLRU.put(key, value);
57-
if (demoted) this.probationaryLRU.put(demoted.key, demoted);
57+
if (demoted) this.probationaryLRU.put(demoted.key, demoted.value);
5858
}

src/Obsidian.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@ export interface ObsidianRouterOptions<T> {
2828
usePlayground?: boolean;
2929
useCache?: boolean;
3030
redisPort?: number;
31-
redisURI?: string;
3231
policy?: string;
3332
maxmemory?: string;
3433
searchTerms?: string[];
Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
import WTinyLFUCache from "../test_variables/wTinyLFU_variables.js";
2+
import { Rhum } from 'https://deno.land/x/[email protected]/mod.ts';
3+
4+
Rhum.testPlan('WTinyLFU cache functionality', () => {
5+
Rhum.testSuite('WTinyLFU Initialization', () => {
6+
Rhum.testCase('should initialize with corect capacities', () => {
7+
const cache = new WTinyLFUCache(1000);
8+
Rhum.asserts.assertEquals(cache.capacity, 1000);
9+
Rhum.asserts.assertEquals(cache.WLRU.capacity, 10);
10+
Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.capacity, 198);
11+
Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.capacity, 792);
12+
});
13+
})
14+
Rhum.testSuite('Window cache functionality', () => {
15+
Rhum.testCase('should add new item to the windowLRU when adding to WTLFU cache', () => {
16+
const cache = new WTinyLFUCache(100);
17+
cache.putAndPromote('one', 1);
18+
Rhum.asserts.assertEquals(cache.WLRU.get('one'), 1);
19+
});
20+
Rhum.testCase('should move items ejected from windowLRU into the probationaryLRU cache', async () => {
21+
const cache = new WTinyLFUCache(100);
22+
await cache.putAndPromote('one', 1);
23+
await cache.putAndPromote('two', 2);
24+
Rhum.asserts.assertEquals(cache.WLRU.get('one'), null);
25+
Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.peek('one'), 1);
26+
Rhum.asserts.assertEquals(cache.WLRU.get('two'), 2);
27+
})
28+
Rhum.testCase('should promote items from probationaryLRU to the protectedLRU when accessed', async () => {
29+
const cache = new WTinyLFUCache(100);
30+
await cache.putAndPromote('one', 1);
31+
await cache.putAndPromote('two', 2);
32+
Rhum.asserts.assertEquals(cache.SLRU.get('one'), 1);
33+
Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('one'), null);
34+
Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.peek('one'), 1);
35+
})
36+
Rhum.testCase('should demote items ejected from protectedLRU to probationary LRU', async () => {
37+
const cache = new WTinyLFUCache(100);
38+
cache.SLRU.protectedLRU.capacity = 1;
39+
cache.SLRU.protectedLRU.put('one', 1);
40+
await cache.SLRU.putAndDemote('two', 2);
41+
Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.get('one'), null);
42+
Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('one'), 1);
43+
Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.get('two'), 2);
44+
})
45+
Rhum.testCase('should move highest frequency item into full probationary cache', async () => {
46+
const cache = new WTinyLFUCache(100);
47+
cache.SLRU.probationaryLRU.capacity = 1;
48+
await cache.putAndPromote('one', 1);
49+
await cache.putAndPromote('two', 2);
50+
Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('one'), 1);
51+
cache.sketch['one'] = 3;
52+
cache.sketch['two'] = 2;
53+
await cache.putAndPromote('three', 3);
54+
Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('one'), 1);
55+
Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('two'), null);
56+
Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('three'), null);
57+
Rhum.asserts.assertEquals(cache.WLRU.get('one'), null);
58+
Rhum.asserts.assertEquals(cache.WLRU.get('two'), null);
59+
Rhum.asserts.assertEquals(cache.WLRU.get('three'), 3);
60+
})
61+
Rhum.testCase('should evict least recently used item from WLRU', async () => {
62+
const cache = new WTinyLFUCache(200);
63+
await cache.WLRU.put('one', 1);
64+
await cache.WLRU.put('two', 2);
65+
await cache.WLRU.put('three', 3);
66+
Rhum.asserts.assertEquals(cache.WLRU.get('one'), null);
67+
Rhum.asserts.assertEquals(cache.WLRU.get('two'), 2);
68+
Rhum.asserts.assertEquals(cache.WLRU.get('three'), 3);
69+
})
70+
Rhum.testCase('should evict least recently used item from ProbationaryLRU', async () => {
71+
const cache = new WTinyLFUCache(100);
72+
cache.SLRU.probationaryLRU.capacity = 2;
73+
await cache.SLRU.probationaryLRU.put('one', 1);
74+
await cache.SLRU.probationaryLRU.put('two', 2);
75+
await cache.SLRU.probationaryLRU.put('three', 3);
76+
Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('one'), null);
77+
Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('two'), 2);
78+
Rhum.asserts.assertEquals(cache.SLRU.probationaryLRU.get('three'), 3);
79+
})
80+
Rhum.testCase('should evict least recently used item from ProtectedLRU', async () => {
81+
const cache = new WTinyLFUCache(100);
82+
cache.SLRU.protectedLRU.capacity = 2;
83+
await cache.SLRU.protectedLRU.put('one', 1);
84+
await cache.SLRU.protectedLRU.put('two', 2);
85+
await cache.SLRU.protectedLRU.put('three', 3);
86+
Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.get('one'), null);
87+
Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.get('two'), 2);
88+
Rhum.asserts.assertEquals(cache.SLRU.protectedLRU.get('three'), 3);
89+
})
90+
})
91+
});
92+
93+
Rhum.run();
Lines changed: 196 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,196 @@
1+
// import { FrequencySketch } from '../../src/Browser/FrequencySketch.js'
2+
3+
/*****
4+
* Overall w-TinyLFU Cache
5+
*****/
6+
export default function WTinyLFUCache (capacity) {
7+
this.capacity = capacity;
8+
this.sketch = {};
9+
10+
// initialize window cache with access to frequency sketch
11+
this.WLRU = new LRUCache(capacity * .01);
12+
this.WLRU.sketch = this.sketch;
13+
// initialize segmented main cache with access to frequency sketch
14+
this.SLRU = new SLRUCache(capacity * .99);
15+
this.SLRU.probationaryLRU.sketch = this.sketch;
16+
this.SLRU.protectedLRU.sketch = this.sketch;
17+
}
18+
19+
WTinyLFUCache.prototype.putAndPromote = async function (key, value) {
20+
const WLRUCandidate = this.WLRU.put(key, value);
21+
// if adding to the WLRU cache results in an eviction...
22+
if (WLRUCandidate) {
23+
// if the probationary cache is at capacity...
24+
let winner = WLRUCandidate;
25+
if (this.SLRU.probationaryLRU.nodeHash.size >= Math.floor(this.SLRU.probationaryLRU.capacity)) {
26+
// send the last accessed item in the probationary cache to the TinyLFU
27+
const SLRUCandidate = this.SLRU.probationaryLRU.getCandidate();
28+
// determine which item will improve the hit-ratio most
29+
winner = await this.TinyLFU(WLRUCandidate, SLRUCandidate);
30+
}
31+
// add the winner to the probationary SLRU
32+
this.SLRU.probationaryLRU.put(winner.key, winner.value);
33+
}
34+
}
35+
36+
WTinyLFUCache.prototype.TinyLFU = function (WLRUCandidate, SLRUCandidate) {
37+
// get the frequency values of both items
38+
const WLRUFreq = this.sketch[WLRUCandidate.key];
39+
const SLRUFreq = this.sketch[SLRUCandidate.key];
40+
// return the object with the higher frequency, prioritizing items in the window cache,
41+
return WLRUFreq >= SLRUFreq ? WLRUCandidate : SLRUCandidate;
42+
}
43+
44+
/*****
45+
* Main SLRU Cache
46+
*****/
47+
function SLRUCache(capacity) {
48+
// Probationary LRU Cache using existing LRU structure in lruBrowserCache.js
49+
this.probationaryLRU = new LRUCache(capacity * .20);
50+
// Protected LRU Cache
51+
this.protectedLRU = new LRUCache(capacity * .80);
52+
}
53+
54+
// Get item from cache, updates last access,
55+
// and promotes existing items to protected
56+
SLRUCache.prototype.get = function (key) {
57+
// get the item from the protectedLRU
58+
const protectedItem = this.protectedLRU.get(key);
59+
// check to see if the item is in the probationaryLRU
60+
const probationaryItem = this.probationaryLRU.peek(key);
61+
62+
// If the item is in neither segment, return undefined
63+
if (protectedItem === null && probationaryItem === null) return;
64+
65+
// If the item only exists in the protected segment, return that item
66+
if (protectedItem !== null) return protectedItem;
67+
68+
// If the item only exists in the probationary segment, promote to protected and return item
69+
// if adding an item to the protectedLRU results in ejection, demote ejected node
70+
this.probationaryLRU.delete(key);
71+
this.putAndDemote(key, probationaryItem);
72+
return probationaryItem;
73+
}
74+
75+
// add or update item in cache
76+
SLRUCache.prototype.put = function (key, node) {
77+
// if the item is in the protected segment, update it
78+
if (this.protectedLRU.nodeHash.get(key)) this.putAndDemote(key, node);
79+
else if (this.probationaryLRU.nodeHash(key)) {
80+
// if the item is in the probationary segment,
81+
// promote and update it
82+
this.probationaryLRU.delete(key);
83+
this.putAndDemote(key, node);
84+
}
85+
// if in neither, add item to the probationary segment
86+
else this.probationaryLRU.put(key, node)
87+
}
88+
89+
// Check to see if the item exists in the cache without updating access
90+
SLRUCache.prototype.has = function (key) {
91+
return this.protectedLRU.nodeHash.get(key) || this.probationaryLRU.nodeHash.get(key);
92+
}
93+
94+
// Adds a node to the protectedLRU
95+
SLRUCache.prototype.putAndDemote = function (key, value) {
96+
// if adding an item to the protectedLRU results in ejection, demote ejected node
97+
const demoted = this.protectedLRU.put(key, value);
98+
if (demoted) this.probationaryLRU.put(demoted.key, demoted.value);
99+
}
100+
101+
class Node {
102+
constructor (key, value) {
103+
this.key = key;
104+
this.value = value;
105+
this.next = this.prev = null;
106+
}
107+
}
108+
109+
function LRUCache(capacity) {
110+
this.capacity = capacity;
111+
this.currentSize = 0;
112+
// node hash for cache lookup and storage
113+
this.nodeHash = new Map();
114+
115+
// doubly-linked list to keep track of recency and handle eviction
116+
this.head = new Node('head', null);
117+
this.tail = new Node('tail', null);
118+
this.head.next = this.tail;
119+
this.tail.prev = this.head;
120+
}
121+
122+
LRUCache.prototype.removeNode = function (node) {
123+
const prev = node.prev;
124+
const next = node.next;
125+
prev.next = next;
126+
next.prev = prev;
127+
};
128+
129+
130+
LRUCache.prototype.addNode = function (node) {
131+
const tempTail = this.tail.prev;
132+
tempTail.next = node;
133+
134+
this.tail.prev = node;
135+
node.next = this.tail;
136+
node.prev = tempTail;
137+
}
138+
139+
// Like get, but doesn't update anything
140+
LRUCache.prototype.peek = function(key) {
141+
const node = this.nodeHash.get(key);
142+
if (!node) return null;
143+
return node.value;
144+
}
145+
146+
// Like removeNode, but takes key and deletes from hash
147+
LRUCache.prototype.delete = function (key) {
148+
const node = this.nodeHash.get(key);
149+
const prev = node.prev;
150+
const next = node.next;
151+
prev.next = next;
152+
next.prev = prev;
153+
this.nodeHash.delete(key);
154+
}
155+
156+
LRUCache.prototype.get = function(key) {
157+
const node = this.nodeHash.get(key);
158+
159+
// check if node does not exist in nodeHash obj
160+
if (!node) return null;
161+
// update position to most recent in list
162+
this.removeNode(node);
163+
this.addNode(node);
164+
return node.value;
165+
}
166+
167+
// used by wTinyLFU to get SLRU eviction candidates for TinyLFU decision
168+
LRUCache.prototype.getCandidate = function () {
169+
const tempHead = this.head.next;
170+
this.removeNode(tempHead);
171+
this.nodeHash.delete(tempHead.key);
172+
return {key: tempHead.key, value: tempHead.value};
173+
}
174+
175+
LRUCache.prototype.put = function (key, value) {
176+
// create a new node
177+
const newNode = new Node(key, value);
178+
179+
// remove node from old position
180+
const node = this.nodeHash.get(key);
181+
if (node) this.removeNode(node);
182+
183+
// add new node to tail
184+
this.addNode(newNode);
185+
this.nodeHash.set(key, newNode);
186+
187+
// check capacity - if over capacity, remove and reassign head node
188+
if (this.nodeHash.size > this.capacity){
189+
const tempHead = this.head.next;
190+
this.removeNode(tempHead);
191+
this.nodeHash.delete(tempHead.key);
192+
// return tempHead for use in w-TinyLFU's SLRU cache
193+
return {key: tempHead.key, value: tempHead.value};
194+
}
195+
}
196+

0 commit comments

Comments
 (0)