@@ -55,19 +55,29 @@ export async function insertForUrl(url: string, data: mixed) {
5555 responseForCachePolicy ( resp ) ,
5656 )
5757
58- return cacheItem ( { key, response : resp , policy} )
58+ return cacheItem ( { key, response : resp , policy, bundled : true } )
5959}
6060
6161// Does the magic: stores a Request into AsyncStorage
62- type CacheItemArgs = { key : string , response : Response , policy : CachePolicy }
63- async function cacheItem ( { key, response, policy} : CacheItemArgs ) {
62+ type CacheItemArgs = {
63+ key : string ,
64+ response : Response ,
65+ policy : CachePolicy ,
66+ bundled ?: boolean ,
67+ }
68+ async function cacheItem ( { key, response, policy, bundled} : CacheItemArgs ) {
6469 response = await serializeResponse ( response )
6570
71+ let strResponse = JSON . stringify ( response )
6672 await AsyncStorage . multiSet ( [
67- [ `${ ROOT } :${ key } :response` , JSON . stringify ( response ) ] ,
73+ [ `${ ROOT } :${ key } :response` , strResponse ] ,
6874 [ `${ ROOT } :${ key } :policy` , JSON . stringify ( policy . toObject ( ) ) ] ,
6975 [ `${ ROOT } :${ key } :ttl` , JSON . stringify ( policy . timeToLive ( ) ) ] ,
7076 ] )
77+
78+ if ( bundled ) {
79+ await AsyncStorage . setItem ( `${ ROOT } :${ key } :bundled` , strResponse )
80+ }
7181}
7282
7383// Does more magic: gets a Request from AsyncStorage
@@ -90,50 +100,79 @@ async function getItem(key: string): Promise<GetItemResult> {
90100 }
91101}
92102
93- // Requests an URL and retrieves it from the cache if possible
94- export async function cachedFetch ( request : Request ) : Promise < Response > {
95- let { url} = request
103+ // Handles the case of no-data-yet-cached
104+ async function handleInitialFetch ( args : { request : Request , key : string } ) {
105+ let { request, key} = args
106+
107+ debug && console . log ( `fetch(${ request . url } ): no policy cached; fetching` )
108+
109+ // I explicitly want errors here to propagate. Why? Bundled data will have
110+ // an expired policy stored, so it won't hit this branch. Thus, the only
111+ // requests in here will have nothing to fall back to, so we need some way
112+ // to signal that an error happened.
113+ let response = await fetch ( request )
96114
97115 let cachePolicyRequest = requestForCachePolicy ( request )
116+ let cachePolicyResponse = responseForCachePolicy ( response )
98117
99- let key = `urlcache:${ url } `
100- let { response : oldResponse , policy : oldPolicy } = await getItem ( key )
118+ let policy = new CachePolicy ( cachePolicyRequest , cachePolicyResponse )
101119
102- // If nothing has ever been cached, go fetch it
103- if ( ! oldPolicy ) {
104- debug && console . log ( `fetch(${ request . url } ): no policy cached; fetching` )
120+ if ( policy . storable ( ) ) {
121+ debug && console . log ( `fetch(${ request . url } ): caching` )
122+ await cacheItem ( { key, response, policy} )
123+ } else {
124+ debug && console . log ( `fetch(${ request . url } ): not cachable` )
125+ }
105126
106- let response = await fetch ( request )
107- let cachePolicyResponse = responseForCachePolicy ( response )
127+ return response
128+ }
108129
109- let policy = new CachePolicy ( cachePolicyRequest , cachePolicyResponse )
130+ type HandlePartialFetchArgs = {
131+ request : Request ,
132+ oldResponse : Response ,
133+ oldPolicy : CachePolicy ,
134+ key : string ,
135+ }
110136
111- if ( policy . storable ( ) ) {
112- debug && console . log ( `fetch(${ request . url } ): caching` )
113- await cacheItem ( { key, response, policy} )
114- } else {
115- debug && console . log ( `fetch(${ request . url } ): not cachable` )
116- }
137+ // Handles the case of cached-and-fresh data
138+ function handleCachedButStillFresh ( args : HandlePartialFetchArgs ) {
139+ let { request, oldResponse, oldPolicy} = args
117140
118- return response
119- }
141+ debug && console . log ( `fetch(${ request . url } ): fresh; returning` )
142+ oldResponse . headers = new Headers ( oldPolicy . responseHeaders ( ) )
143+ return oldResponse
144+ }
120145
121- // If we can re-use the cached data, return it; otherwise, we're serving requests from the cache
122- if ( oldPolicy . satisfiesWithoutRevalidation ( cachePolicyRequest ) ) {
123- debug && console . log ( `fetch(${ request . url } ): fresh; returning` )
146+ // Handles the case of cached-but-stale data
147+ async function handleStale ( args : HandlePartialFetchArgs ) {
148+ let { request, oldResponse, oldPolicy, key} = args
149+
150+ debug && console . log ( `fetch(${ request . url } ): stale; validating` )
151+
152+ let cachePolicyRequest = requestForCachePolicy ( request )
153+
154+ let newResponse = null
155+ try {
156+ // Update the request to ask the origin server if the cached response can be used
157+ let newHeaders = oldPolicy . revalidationHeaders ( cachePolicyRequest )
158+ request . headers = new Headers ( newHeaders )
159+
160+ // Send request to the origin server. The server may respond with status 304.
161+ newResponse = await fetch ( request )
162+ } catch ( error ) {
163+ // "A fetch() promise only rejects when a network error is encountered [...] not on HTTP errors such as 404"
164+ // - https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch
165+
166+ // We know there's data in the cache, or we wouldn't have hit this spot.
167+ // We've made the decision to return "stale" data if we're offline, so if
168+ // we have a network error, we just do an early return with the cached
169+ // data.
170+
171+ debug && console . log ( `fetch(${ request . url } ): offline; returning stale data` )
124172 oldResponse . headers = new Headers ( oldPolicy . responseHeaders ( ) )
125173 return oldResponse
126174 }
127175
128- // Update the request to ask the origin server if the cached response can be used
129- request . headers = new Headers (
130- oldPolicy . revalidationHeaders ( cachePolicyRequest ) ,
131- )
132-
133- debug && console . log ( `fetch(${ request . url } ): stale; validating` )
134-
135- // Send request to the origin server. The server may respond with status 304
136- let newResponse = await fetch ( request )
137176 let newCachePolicyResponse = responseForCachePolicy ( newResponse )
138177
139178 // Create updated policy and combined response from the old and new data
@@ -162,3 +201,39 @@ export async function cachedFetch(request: Request): Promise<Response> {
162201
163202 return response
164203}
204+
205+ // Returns the bundled response when in development
206+ function handleBundledInDev ( request : Request , bundledResponse : string ) {
207+ debug &&
208+ console . log ( `fetch(${ request . url } ): in dev mode; returning bundled data` )
209+ let { body, ...init } = JSON . parse ( bundledResponse )
210+ return new Response ( body , init )
211+ }
212+
213+ // Requests an URL and retrieves it from the cache if possible
214+ export async function cachedFetch ( request : Request ) : Promise < Response > {
215+ let { url} = request
216+
217+ let key = `urlcache:${ url } `
218+ let { response : oldResponse , policy : oldPolicy } = await getItem ( key )
219+
220+ // If we're in dev, and there's bundled data, return it
221+ if ( process . env . NODE_ENV === 'development' ) {
222+ let bundledResponse = await AsyncStorage . getItem ( `${ ROOT } :${ key } :bundled` )
223+ if ( bundledResponse ) {
224+ return handleBundledInDev ( request , bundledResponse )
225+ }
226+ }
227+
228+ // If nothing has ever been cached, go fetch it
229+ if ( ! oldPolicy ) {
230+ return handleInitialFetch ( { request, key} )
231+ }
232+
233+ // If we can re-use the cached data, return it; otherwise, we're serving requests from the cache
234+ if ( oldPolicy . satisfiesWithoutRevalidation ( requestForCachePolicy ( request ) ) ) {
235+ return handleCachedButStillFresh ( { request, oldResponse, oldPolicy, key} )
236+ }
237+
238+ return handleStale ( { request, oldResponse, oldPolicy, key} )
239+ }
0 commit comments