mirror of
https://github.com/kremalicious/metamask-extension.git
synced 2024-11-22 09:57:02 +01:00
Fix fetch-with-cache
handling of interwoven requests (#10079)
A data race was introduced in #9919 when the old synchronous storage API was replaced with an async storage API. The problem arises when `fetchWithCache` is called a second time while it's still processing another call. In this case, the `cachedFetch` object can become stale while blocked waiting for a fetch response, and result in a cache being overwritten unintentionally. See this example (options omitted for simplicity, and assuming an empty initial cache): ``` await Promise.all([ fetchWithCache('https://metamask.io/foo'), fetchWithCache('https://metamask.io/bar'), ] ``` The order of events could be as follows: 1. Empty cache retrieved for `/foo` route 2. Empty cache retrieved for `/bar` route 3. Call made to `/foo` route 4. Call made to `/bar` route 5. `/foo` response is added to the empty cache object retrieved in step 1, then is saved in the cache. 6. `/bar` response is added to the empty cache object retrieved in step 2, then is saved in the cache. In step 6, the cache object saved would not contain the `/foo` response set in step 5. As a result, `/foo` would never be cached. This problem was resolved by embedding the URL being cached directly in the cache key. This prevents simultaneous responses from overwriting each others caches. Technically a data race still exists when handing simultaneous responses to the same route, but the result would be that the last call to finish would overwrite the previous. This seems acceptable.
This commit is contained in:
parent
1b19f5e7ad
commit
6c637bba9c
@ -16,7 +16,6 @@ const fetchWithCache = async (
|
||||
fetchOptions.headers = new window.Headers(fetchOptions.headers)
|
||||
}
|
||||
if (
|
||||
fetchOptions.headers &&
|
||||
fetchOptions.headers.has('Content-Type') &&
|
||||
fetchOptions.headers.get('Content-Type') !== 'application/json'
|
||||
) {
|
||||
@ -24,8 +23,8 @@ const fetchWithCache = async (
|
||||
}
|
||||
|
||||
const currentTime = Date.now()
|
||||
const cachedFetch = (await getStorageItem('cachedFetch')) || {}
|
||||
const { cachedResponse, cachedTime } = cachedFetch[url] || {}
|
||||
const cacheKey = `cachedFetch:${url}`
|
||||
const { cachedResponse, cachedTime } = (await getStorageItem(cacheKey)) || {}
|
||||
if (cachedResponse && currentTime - cachedTime < cacheRefreshTime) {
|
||||
return cachedResponse
|
||||
}
|
||||
@ -48,8 +47,8 @@ const fetchWithCache = async (
|
||||
cachedResponse: responseJson,
|
||||
cachedTime: currentTime,
|
||||
}
|
||||
cachedFetch[url] = cacheEntry
|
||||
await setStorageItem('cachedFetch', cachedFetch)
|
||||
|
||||
await setStorageItem(cacheKey, cacheEntry)
|
||||
return responseJson
|
||||
}
|
||||
|
||||
|
@ -37,10 +37,8 @@ describe('Fetch with cache', function () {
|
||||
.reply(200, '{"average": 2}')
|
||||
|
||||
fakeStorage.getStorageItem.returns({
|
||||
'https://fetchwithcache.metamask.io/price': {
|
||||
cachedResponse: { average: 1 },
|
||||
cachedTime: Date.now(),
|
||||
},
|
||||
cachedResponse: { average: 1 },
|
||||
cachedTime: Date.now(),
|
||||
})
|
||||
|
||||
const response = await fetchWithCache(
|
||||
@ -57,10 +55,8 @@ describe('Fetch with cache', function () {
|
||||
.reply(200, '{"average": 3}')
|
||||
|
||||
fakeStorage.getStorageItem.returns({
|
||||
'https://fetchwithcache.metamask.io/cached': {
|
||||
cachedResponse: { average: 1 },
|
||||
cachedTime: Date.now() - 1000,
|
||||
},
|
||||
cachedResponse: { average: 1 },
|
||||
cachedTime: Date.now() - 1000,
|
||||
})
|
||||
|
||||
const response = await fetchWithCache(
|
||||
@ -135,4 +131,43 @@ describe('Fetch with cache', function () {
|
||||
{ message: 'fetchWithCache only supports JSON responses' },
|
||||
)
|
||||
})
|
||||
|
||||
it('should correctly cache responses from interwoven requests', async function () {
|
||||
nock('https://fetchwithcache.metamask.io')
|
||||
.get('/foo')
|
||||
.reply(200, '{"average": 9}')
|
||||
nock('https://fetchwithcache.metamask.io')
|
||||
.get('/bar')
|
||||
.reply(200, '{"average": 9}')
|
||||
|
||||
const testCache = {}
|
||||
fakeStorage.getStorageItem.callsFake((key) => testCache[key])
|
||||
fakeStorage.setStorageItem.callsFake((key, value) => {
|
||||
testCache[key] = value
|
||||
})
|
||||
|
||||
await Promise.all([
|
||||
fetchWithCache(
|
||||
'https://fetchwithcache.metamask.io/foo',
|
||||
{},
|
||||
{ cacheRefreshTime: 123 },
|
||||
),
|
||||
fetchWithCache(
|
||||
'https://fetchwithcache.metamask.io/bar',
|
||||
{},
|
||||
{ cacheRefreshTime: 123 },
|
||||
),
|
||||
])
|
||||
|
||||
assert.deepStrictEqual(
|
||||
testCache['cachedFetch:https://fetchwithcache.metamask.io/foo']
|
||||
.cachedResponse,
|
||||
{ average: 9 },
|
||||
)
|
||||
assert.deepStrictEqual(
|
||||
testCache['cachedFetch:https://fetchwithcache.metamask.io/bar']
|
||||
.cachedResponse,
|
||||
{ average: 9 },
|
||||
)
|
||||
})
|
||||
})
|
||||
|
Loading…
Reference in New Issue
Block a user