|
| 1 | +/* eslint-disable no-undef */ |
| 2 | + |
| 3 | +import { MemoryCache } from '@/lib/cache/MemoryCache'; |
| 4 | +import { getResolver } from '@/resolvers/jobs'; |
| 5 | +const hash = require('object-hash'); |
| 6 | + |
| 7 | +let cache: MemoryCache; |
| 8 | +let resolver: any; |
| 9 | +const storyIds: number[] = []; |
| 10 | +const stories: Record<string, any>[] = []; |
| 11 | + |
| 12 | +process.env.HACKERNEWS_API_URL = 'http://127.0.0.1:3031'; |
| 13 | + |
| 14 | +beforeEach(() => { |
| 15 | + cache = new MemoryCache(); |
| 16 | + resolver = getResolver(storyIds, stories, cache); |
| 17 | +}); |
| 18 | + |
| 19 | +it('it retrieves the story data and returns it', async () => { |
| 20 | + const req = { body: { first: 2, skipText: false } }; |
| 21 | + const data = await resolver({}, { first: 2, skipText: false }, { req }); |
| 22 | + |
| 23 | + for (const item of data) { |
| 24 | + item.expires = 0; |
| 25 | + } |
| 26 | + |
| 27 | + expect(data).toMatchSnapshot(); |
| 28 | +}); |
| 29 | + |
| 30 | +it('it retrieves the job ids and caches them', async () => { |
| 31 | + const req = { body: { query: 'abc', vars: 'bbb' } }; |
| 32 | + const reqHash = hash(req.body); |
| 33 | + |
| 34 | + await resolver({}, { first: 2, skipText: false }, { req }); |
| 35 | + |
| 36 | + expect(await cache.has(`jobstoryids:2:${reqHash}`)).toBeTruthy(); |
| 37 | +}); |
| 38 | + |
| 39 | +it('it retrieves each job story and caches it', async () => { |
| 40 | + const req = { body: { first: 2, skipText: false, a: 1, b: 2 } }; |
| 41 | + const data = await resolver({}, { first: 2, skipText: false }, { req }); |
| 42 | + |
| 43 | + data.forEach(item => { |
| 44 | + expect(cache.has(`jobstory:${item.id}`)).toBeTruthy(); |
| 45 | + }); |
| 46 | + |
| 47 | + for (const item in cache.map) { |
| 48 | + cache.map[item].ts = 0; |
| 49 | + cache.map[item].expires = 0; |
| 50 | + } |
| 51 | + |
| 52 | + expect(cache.map).toMatchSnapshot(); |
| 53 | +}); |
0 commit comments