From 6c637bba9c4c21e2db895507afdd8baee234f54a Mon Sep 17 00:00:00 2001 From: Mark Stacey Date: Tue, 15 Dec 2020 16:51:13 -0330 Subject: [PATCH] Fix `fetch-with-cache` handling of interwoven requests (#10079) A data race was introduced in #9919 when the old synchronous storage API was replaced with an async storage API. The problem arises when `fetchWithCache` is called a second time while it's still processing another call. In this case, the `cachedFetch` object can become stale while blocked waiting for a fetch response, and result in a cache being overwritten unintentionally. See this example (options omitted for simplicity, and assuming an empty initial cache): ``` await Promise.all([ fetchWithCache('https://metamask.io/foo'), fetchWithCache('https://metamask.io/bar'), ] ``` The order of events could be as follows: 1. Empty cache retrieved for `/foo` route 2. Empty cache retrieved for `/bar` route 3. Call made to `/foo` route 4. Call made to `/bar` route 5. `/foo` response is added to the empty cache object retrieved in step 1, then is saved in the cache. 6. `/bar` response is added to the empty cache object retrieved in step 2, then is saved in the cache. In step 6, the cache object saved would not contain the `/foo` response set in step 5. As a result, `/foo` would never be cached. This problem was resolved by embedding the URL being cached directly in the cache key. This prevents simultaneous responses from overwriting each others caches. Technically a data race still exists when handing simultaneous responses to the same route, but the result would be that the last call to finish would overwrite the previous. This seems acceptable. --- ui/app/helpers/utils/fetch-with-cache.js | 9 ++-- ui/app/helpers/utils/fetch-with-cache.test.js | 51 ++++++++++++++++--- 2 files changed, 47 insertions(+), 13 deletions(-) diff --git a/ui/app/helpers/utils/fetch-with-cache.js b/ui/app/helpers/utils/fetch-with-cache.js index 351ee4ab9..ca2bbd7f0 100644 --- a/ui/app/helpers/utils/fetch-with-cache.js +++ b/ui/app/helpers/utils/fetch-with-cache.js @@ -16,7 +16,6 @@ const fetchWithCache = async ( fetchOptions.headers = new window.Headers(fetchOptions.headers) } if ( - fetchOptions.headers && fetchOptions.headers.has('Content-Type') && fetchOptions.headers.get('Content-Type') !== 'application/json' ) { @@ -24,8 +23,8 @@ const fetchWithCache = async ( } const currentTime = Date.now() - const cachedFetch = (await getStorageItem('cachedFetch')) || {} - const { cachedResponse, cachedTime } = cachedFetch[url] || {} + const cacheKey = `cachedFetch:${url}` + const { cachedResponse, cachedTime } = (await getStorageItem(cacheKey)) || {} if (cachedResponse && currentTime - cachedTime < cacheRefreshTime) { return cachedResponse } @@ -48,8 +47,8 @@ const fetchWithCache = async ( cachedResponse: responseJson, cachedTime: currentTime, } - cachedFetch[url] = cacheEntry - await setStorageItem('cachedFetch', cachedFetch) + + await setStorageItem(cacheKey, cacheEntry) return responseJson } diff --git a/ui/app/helpers/utils/fetch-with-cache.test.js b/ui/app/helpers/utils/fetch-with-cache.test.js index 43336a85e..3c98cadba 100644 --- a/ui/app/helpers/utils/fetch-with-cache.test.js +++ b/ui/app/helpers/utils/fetch-with-cache.test.js @@ -37,10 +37,8 @@ describe('Fetch with cache', function () { .reply(200, '{"average": 2}') fakeStorage.getStorageItem.returns({ - 'https://fetchwithcache.metamask.io/price': { - cachedResponse: { average: 1 }, - cachedTime: Date.now(), - }, + cachedResponse: { average: 1 }, + cachedTime: Date.now(), }) const response = await fetchWithCache( @@ -57,10 +55,8 @@ describe('Fetch with cache', function () { .reply(200, '{"average": 3}') fakeStorage.getStorageItem.returns({ - 'https://fetchwithcache.metamask.io/cached': { - cachedResponse: { average: 1 }, - cachedTime: Date.now() - 1000, - }, + cachedResponse: { average: 1 }, + cachedTime: Date.now() - 1000, }) const response = await fetchWithCache( @@ -135,4 +131,43 @@ describe('Fetch with cache', function () { { message: 'fetchWithCache only supports JSON responses' }, ) }) + + it('should correctly cache responses from interwoven requests', async function () { + nock('https://fetchwithcache.metamask.io') + .get('/foo') + .reply(200, '{"average": 9}') + nock('https://fetchwithcache.metamask.io') + .get('/bar') + .reply(200, '{"average": 9}') + + const testCache = {} + fakeStorage.getStorageItem.callsFake((key) => testCache[key]) + fakeStorage.setStorageItem.callsFake((key, value) => { + testCache[key] = value + }) + + await Promise.all([ + fetchWithCache( + 'https://fetchwithcache.metamask.io/foo', + {}, + { cacheRefreshTime: 123 }, + ), + fetchWithCache( + 'https://fetchwithcache.metamask.io/bar', + {}, + { cacheRefreshTime: 123 }, + ), + ]) + + assert.deepStrictEqual( + testCache['cachedFetch:https://fetchwithcache.metamask.io/foo'] + .cachedResponse, + { average: 9 }, + ) + assert.deepStrictEqual( + testCache['cachedFetch:https://fetchwithcache.metamask.io/bar'] + .cachedResponse, + { average: 9 }, + ) + }) })