make cache get used even if you make a bunch of identical requests immediately
This commit is contained in:
parent
6cd42272c4
commit
dae57d2604
@ -33,6 +33,10 @@ export class NFCResponse extends Response {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static fromCachedResponse(bodyStream, rawMetaData, ejectSelfFromCache) {
|
static fromCachedResponse(bodyStream, rawMetaData, ejectSelfFromCache) {
|
||||||
|
if (bodyStream.readableEnded) {
|
||||||
|
throw new Error('Cache returned a body stream that has already been read to end.');
|
||||||
|
}
|
||||||
|
|
||||||
return new NFCResponse(bodyStream, rawMetaData, ejectSelfFromCache, true);
|
return new NFCResponse(bodyStream, rawMetaData, ejectSelfFromCache, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
43
index.js
43
index.js
@ -2,6 +2,7 @@ import fetch from 'node-fetch';
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import { URLSearchParams } from 'url';
|
import { URLSearchParams } from 'url';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
|
import locko from 'locko';
|
||||||
import { NFCResponse } from './classes/response.js';
|
import { NFCResponse } from './classes/response.js';
|
||||||
import { MemoryCache } from './classes/caching/memory_cache.js';
|
import { MemoryCache } from './classes/caching/memory_cache.js';
|
||||||
|
|
||||||
@ -73,15 +74,11 @@ function getCacheKey(requestArguments) {
|
|||||||
|
|
||||||
async function getResponse(cache, requestArguments) {
|
async function getResponse(cache, requestArguments) {
|
||||||
const cacheKey = getCacheKey(requestArguments);
|
const cacheKey = getCacheKey(requestArguments);
|
||||||
const cachedValue = await cache.get(cacheKey);
|
let cachedValue = await cache.get(cacheKey);
|
||||||
|
|
||||||
const ejectSelfFromCache = () => cache.remove(cacheKey);
|
const ejectSelfFromCache = () => cache.remove(cacheKey);
|
||||||
|
|
||||||
if (cachedValue) {
|
if (cachedValue) {
|
||||||
if (cachedValue.bodyStream.readableEnded) {
|
|
||||||
throw new Error('Cache returned a body stream that has already been read to end.');
|
|
||||||
}
|
|
||||||
|
|
||||||
return NFCResponse.fromCachedResponse(
|
return NFCResponse.fromCachedResponse(
|
||||||
cachedValue.bodyStream,
|
cachedValue.bodyStream,
|
||||||
cachedValue.metaData,
|
cachedValue.metaData,
|
||||||
@ -89,19 +86,33 @@ async function getResponse(cache, requestArguments) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const fetchResponse = await fetch(...requestArguments);
|
await locko.lock(cacheKey);
|
||||||
const nfcResponse = NFCResponse.fromNodeFetchResponse(fetchResponse, ejectSelfFromCache);
|
try {
|
||||||
const contentLength = Number.parseInt(nfcResponse.headers.get('content-length'), 10) || 0;
|
cachedValue = await cache.get(cacheKey);
|
||||||
const nfcResponseSerialized = nfcResponse.serialize();
|
if (cachedValue) {
|
||||||
|
return NFCResponse.fromCachedResponse(
|
||||||
|
cachedValue.bodyStream,
|
||||||
|
cachedValue.metaData,
|
||||||
|
ejectSelfFromCache,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
await cache.set(
|
const fetchResponse = await fetch(...requestArguments);
|
||||||
cacheKey,
|
const nfcResponse = NFCResponse.fromNodeFetchResponse(fetchResponse, ejectSelfFromCache);
|
||||||
nfcResponseSerialized.bodyStream,
|
const contentLength = Number.parseInt(nfcResponse.headers.get('content-length'), 10) || 0;
|
||||||
nfcResponseSerialized.metaData,
|
const nfcResponseSerialized = nfcResponse.serialize();
|
||||||
contentLength,
|
|
||||||
);
|
|
||||||
|
|
||||||
return nfcResponse;
|
await cache.set(
|
||||||
|
cacheKey,
|
||||||
|
nfcResponseSerialized.bodyStream,
|
||||||
|
nfcResponseSerialized.metaData,
|
||||||
|
contentLength,
|
||||||
|
);
|
||||||
|
|
||||||
|
return nfcResponse;
|
||||||
|
} finally {
|
||||||
|
locko.unlock(cacheKey);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function createFetchWithCache(cache) {
|
function createFetchWithCache(cache) {
|
||||||
|
5
package-lock.json
generated
5
package-lock.json
generated
@ -2323,6 +2323,11 @@
|
|||||||
"path-exists": "^3.0.0"
|
"path-exists": "^3.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"locko": {
|
||||||
|
"version": "0.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/locko/-/locko-0.0.3.tgz",
|
||||||
|
"integrity": "sha512-ekhPWcejAum9WHN2ClkFA8RAUTDyYDlRRb4dSq1wCEPhIS6IMsdSKoWHl1qineCrlMEMbeD1/o2uautG4QEc7w=="
|
||||||
|
},
|
||||||
"lodash": {
|
"lodash": {
|
||||||
"version": "4.17.21",
|
"version": "4.17.21",
|
||||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||||
|
@ -39,6 +39,7 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"cacache": "^15.2.0",
|
"cacache": "^15.2.0",
|
||||||
|
"locko": "0.0.3",
|
||||||
"node-fetch": "2.6.1"
|
"node-fetch": "2.6.1"
|
||||||
},
|
},
|
||||||
"husky": {
|
"husky": {
|
||||||
|
@ -404,6 +404,16 @@ describe('Data tests', function() {
|
|||||||
assert(err.message.includes('Unsupported body type'));
|
assert(err.message.includes('Unsupported body type'));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('Uses cache even if you make multiple requests at the same time', async function() {
|
||||||
|
const [res1, res2] = await Promise.all([
|
||||||
|
cachedFetch('http://httpbin.org/status/200'),
|
||||||
|
cachedFetch('http://httpbin.org/status/200'),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// One should be false, the other should be true
|
||||||
|
assert(res1.fromCache !== res2.fromCache);
|
||||||
|
});
|
||||||
}).timeout(10000);
|
}).timeout(10000);
|
||||||
|
|
||||||
describe('Memory cache tests', function() {
|
describe('Memory cache tests', function() {
|
||||||
@ -466,3 +476,4 @@ describe('File system cache tests', function() {
|
|||||||
assert.strictEqual(res.fromCache, true);
|
assert.strictEqual(res.fromCache, true);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
console.log(process.cwd())
|
Reference in New Issue
Block a user