Compare commits
22 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
0c8c0d4520 | ||
![]() |
2f14847a01 | ||
![]() |
e3ecf3b981 | ||
![]() |
05a146fef1 | ||
![]() |
dba1e2fbd3 | ||
![]() |
ad572f3f29 | ||
![]() |
6023b56164 | ||
![]() |
675ed9bbf2 | ||
![]() |
998d18ae84 | ||
![]() |
d5ab4d4b18 | ||
![]() |
847b330489 | ||
![]() |
dae57d2604 | ||
![]() |
6cd42272c4 | ||
![]() |
e8ad8da0bb | ||
![]() |
45ca35f057 | ||
![]() |
af1e977620 | ||
![]() |
ef5939f571 | ||
![]() |
e356e485b6 | ||
![]() |
a11a135234 | ||
![]() |
524308aa61 | ||
![]() |
bffaa2aadc | ||
![]() |
b8da556091 |
1
.eslintignore
Normal file
1
.eslintignore
Normal file
@ -0,0 +1 @@
|
||||
test
|
@ -1,4 +1,4 @@
|
||||
module.exports = {
|
||||
{
|
||||
"env": {
|
||||
"commonjs": true,
|
||||
"es6": true,
|
||||
@ -15,5 +15,7 @@ module.exports = {
|
||||
"ecmaVersion": 2018
|
||||
},
|
||||
"rules": {
|
||||
"import/extensions": "off",
|
||||
"import/prefer-default-export": "off"
|
||||
}
|
||||
};
|
||||
}
|
17
.github/workflows/ci.yml
vendored
Normal file
17
.github/workflows/ci.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
name: CI Pipeline
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm run coverage
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -102,3 +102,8 @@ dist
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Other
|
||||
.cache
|
||||
|
||||
commonjs
|
||||
|
10
.npmignore
10
.npmignore
@ -1,2 +1,8 @@
|
||||
.eslintrc.js
|
||||
.vscode
|
||||
.eslintrc.json
|
||||
test
|
||||
.cache
|
||||
.nyc_output
|
||||
.github
|
||||
.eslintignore
|
||||
.vscode
|
||||
coverage
|
||||
|
22
.vscode/launch.json
vendored
Normal file
22
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"args": [
|
||||
"--colors",
|
||||
"${workspaceFolder}/test"
|
||||
],
|
||||
"internalConsoleOptions": "openOnSessionStart",
|
||||
"name": "Mocha Tests",
|
||||
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
|
||||
"request": "launch",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"type": "pwa-node"
|
||||
}
|
||||
]
|
||||
}
|
109
README.md
109
README.md
@ -1,45 +1,120 @@
|
||||
# node-fetch-cache
|
||||
|
||||
node-fetch with caching to a directory on disk.
|
||||
node-fetch with caching of responses.
|
||||
|
||||
The first usage with any given arguments will result in an HTTP request and any subsequent usage with the same arguments and body function (text, json, buffer, or textConverted) will read the response body from the cache on disk.
|
||||
The first fetch with any given arguments will result in an HTTP request and any subsequent fetch with the same arguments will read the response from the cache.
|
||||
|
||||
By default responses are cached in memory, but you can also cache to files on disk, or implement your own cache. See the **Cache Customization** section for more info.
|
||||
|
||||
## Usage
|
||||
|
||||
Require it with a directory path to cache in, and then use it the same way you would use fetch.
|
||||
Require it and use it the same way you would use node-fetch:
|
||||
|
||||
```js
|
||||
const fetch = require('node-fetch-cache')('./path/to/cache/dir');
|
||||
const fetch = require('node-fetch-cache');
|
||||
|
||||
fetch('http://google.com')
|
||||
.then(response => response.text())
|
||||
.then(text => console.log(text));
|
||||
```
|
||||
|
||||
The next time you `fetch('http://google.com')`, the response will be returned from the cache. No HTTP request will be made.
|
||||
|
||||
## API
|
||||
|
||||
Note that this does not support the full fetch API. Headers and some other things are not accessible.
|
||||
This module's fetch function has almost the exact same API as node-fetch, and you should consult [the node-fetch documentation](https://www.npmjs.com/package/node-fetch) for how to use it.
|
||||
|
||||
### async fetch(resource [, init])
|
||||
This module just adds one extra function to the response object:
|
||||
|
||||
Same arguments as [browser fetch](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch).
|
||||
### res.ejectFromCache(): Promise\<void\>
|
||||
|
||||
Returns a **CachedResponse**.
|
||||
This function can be used to eject the response from the cache, so that the next request will perform a true HTTP request rather than returning a cached response.
|
||||
|
||||
### async CachedResponse.text()
|
||||
This module caches ALL responses, even those with 4xx and 5xx response statuses. You can use this function to uncache such responses if desired. For example:
|
||||
|
||||
Returns the body as a string.
|
||||
```js
|
||||
const fetch = require('node-fetch-cache');
|
||||
|
||||
### async CachedResponse.json()
|
||||
fetch('http://google.com')
|
||||
.then(async response => {
|
||||
if (!response.ok) {
|
||||
await response.ejectFromCache();
|
||||
throw new Error('Non-okay response from google.com');
|
||||
} else {
|
||||
return response.text();
|
||||
}
|
||||
}).then(text => console.log(text));
|
||||
```
|
||||
|
||||
Returns the body as a JavaScript object, parsed from JSON.
|
||||
## Streaming
|
||||
|
||||
### async CachedResponse.buffer()
|
||||
This module does not support Stream request bodies, except for fs.ReadStream. And when using fs.ReadStream, the cache key is generated based only on the path of the stream, not its content. That means if you stream `/my/desktop/image.png` twice, you will get a cached response the second time, **even if the content of image.png has changed**.
|
||||
|
||||
Returns the body as a Buffer.
|
||||
Streams don't quite play nice with the concept of caching based on request characteristics, because we would have to read the stream to the end to find out what's in it and hash it into a proper cache key.
|
||||
|
||||
### async CachedResponse.textConverted()
|
||||
## Cache Customization
|
||||
|
||||
Identical to CachedResponse.text(), except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8, if possible.
|
||||
By default responses are cached in memory, but you can also cache to files on disk, or implement your own cache.
|
||||
|
||||
(textConverted requires an optional dependency on [npm package encoding](https://www.npmjs.com/package/encoding), which you need to install manually.
|
||||
### MemoryCache
|
||||
|
||||
This is the default cache delegate. It caches responses in-process in a POJO.
|
||||
|
||||
Usage:
|
||||
|
||||
```js
|
||||
const { fetchBuilder, MemoryCache } = require('node-fetch-cache');
|
||||
const fetch = fetchBuilder.withCache(new MemoryCache(options));
|
||||
```
|
||||
|
||||
Options:
|
||||
|
||||
```js
|
||||
{
|
||||
ttl: 1000, // Time to live. How long (in ms) responses remain cached before being automatically ejected. If undefined, responses are never automatically ejected from the cache.
|
||||
}
|
||||
```
|
||||
|
||||
Note that by default (if you don't use `withCache()`) a **shared** MemoryCache will be used (you can import this module in multiple files and they will all share the same cache). If you instantiate and provide a `new MemoryCache()` as shown above however, the cache is *NOT* shared unless you explicitly pass it around and pass it into `withCache()` in each of your source files.
|
||||
|
||||
### FileSystemCache
|
||||
|
||||
Cache to a directory on disk. This allows the cache to survive the process exiting.
|
||||
|
||||
Usage:
|
||||
|
||||
```js
|
||||
const { fetchBuilder, FileSystemCache } = require('node-fetch-cache');
|
||||
const fetch = fetchBuilder.withCache(new FileSystemCache(options));
|
||||
```
|
||||
|
||||
Options:
|
||||
|
||||
```js
|
||||
{
|
||||
cacheDirectory: '/my/cache/directory/path', // Specify where to keep the cache. If undefined, '.cache' is used by default. If this directory does not exist, it will be created.
|
||||
ttl: 1000, // Time to live. How long (in ms) responses remain cached before being automatically ejected. If undefined, responses are never automatically ejected from the cache.
|
||||
}
|
||||
```
|
||||
|
||||
### Provide Your Own
|
||||
|
||||
You can implement a caching delegate yourself. The cache simply needs to be an object that has `set(key, bodyStream, bodyMeta)`, `get(key)`, and `remove(key)` functions.
|
||||
|
||||
Check the built-in [MemoryCache](https://github.com/mistval/node-fetch-cache/blob/master/src/classes/caching/memory_cache.js) and [FileSystemCache](https://github.com/mistval/node-fetch-cache/blob/master/src/classes/caching/file_system_cache.js) for examples.
|
||||
|
||||
The set function must accept a key (which will be a string), a body stream, and a metadata object (which will be a JSON-serializable JS object). It must store these, and then return an object with a `bodyStream` property, containing a fresh, unread stream of the body content, as well as a `metaData` property, containing the same metaData that was passed in.
|
||||
|
||||
The get function should accept a key and return undefined if no cached value is found, or else an object with a `bodyStream` property, containing a stream of the body content, as well as a `metaData` property, containing the metadata that was stored via the `set(key, bodyStream, bodyMeta)` function.
|
||||
|
||||
The remove function should accept a key and remove the cached value associated with that key, if any. It is also safe for your caching delegate to remove values from the cache arbitrarily if desired (for example if you want to implement a TTL in the caching delegate).
|
||||
|
||||
All three functions may be async.
|
||||
|
||||
## Bugs / Help / Feature Requests / Contributing
|
||||
|
||||
For feature requests or help, please visit [the discussions page on GitHub](https://github.com/mistval/node-fetch-cache/discussions).
|
||||
|
||||
For bug reports, please file an issue on [the issues page on GitHub](https://github.com/mistval/node-fetch-cache/issues).
|
||||
|
||||
Contributions welcome! Please open a [pull request on GitHub](https://github.com/mistval/node-fetch-cache/pulls) with your changes. You can run them by me first on [the discussions page](https://github.com/mistval/node-fetch-cache/discussions) if you'd like.
|
||||
|
4
commonjs/wrapper.cjs
Normal file
4
commonjs/wrapper.cjs
Normal file
@ -0,0 +1,4 @@
|
||||
const mod = require('./index.cjs');
|
||||
|
||||
module.exports = mod.default;
|
||||
Object.assign(module.exports, mod);
|
75
index.js
75
index.js
@ -1,75 +0,0 @@
|
||||
const fetch = require('node-fetch');
|
||||
const fs = require('fs');
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
|
||||
function md5(str) {
|
||||
return crypto.createHash('md5').update(str).digest('hex');
|
||||
}
|
||||
|
||||
async function getResponse(cacheDirPath, requestArguments, bodyFunctionName) {
|
||||
const [url, requestInit, ...rest] = requestArguments;
|
||||
const requestParams = requestInit && requestInit.body
|
||||
? ({ ...requestInit, body: typeof requestInit.body === 'object' ? requestInit.body.toString() : requestInit.body })
|
||||
: requestInit;
|
||||
|
||||
const cacheHash = md5(JSON.stringify([url, requestParams, ...rest]) + bodyFunctionName);
|
||||
const cachedFilePath = path.join(cacheDirPath, `${cacheHash}.json`);
|
||||
|
||||
try {
|
||||
const body = JSON.parse(await fs.promises.readFile(cachedFilePath));
|
||||
if (bodyFunctionName === 'buffer') {
|
||||
return Buffer.from(body);
|
||||
}
|
||||
|
||||
return body;
|
||||
} catch (err) {
|
||||
const fetchResponse = await fetch(...requestArguments);
|
||||
const bodyResponse = await fetchResponse[bodyFunctionName]();
|
||||
await fs.promises.writeFile(cachedFilePath, JSON.stringify(bodyResponse));
|
||||
return bodyResponse;
|
||||
}
|
||||
}
|
||||
|
||||
class ResponseWrapper {
|
||||
constructor(cacheDirPath, requestArguments) {
|
||||
this.cacheDirPath = cacheDirPath;
|
||||
this.requestArguments = requestArguments;
|
||||
}
|
||||
|
||||
text() {
|
||||
return getResponse(this.cacheDirPath, this.requestArguments, this.text.name);
|
||||
}
|
||||
|
||||
json() {
|
||||
return getResponse(this.cacheDirPath, this.requestArguments, this.json.name);
|
||||
}
|
||||
|
||||
buffer() {
|
||||
return getResponse(this.cacheDirPath, this.requestArguments, this.buffer.name);
|
||||
}
|
||||
|
||||
textConverted() {
|
||||
return getResponse(this.cacheDirPath, this.requestArguments, this.textConverted.name);
|
||||
}
|
||||
}
|
||||
|
||||
function createFetch(cacheDirPath) {
|
||||
let madeDir = false;
|
||||
|
||||
return async (...args) => {
|
||||
if (!madeDir) {
|
||||
try {
|
||||
await fs.promises.mkdir(cacheDirPath, { recursive: true });
|
||||
} catch (err) {
|
||||
// Ignore.
|
||||
}
|
||||
|
||||
madeDir = true;
|
||||
}
|
||||
|
||||
return new ResponseWrapper(cacheDirPath, args);
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = createFetch;
|
2388
package-lock.json
generated
2388
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
33
package.json
33
package.json
@ -1,12 +1,20 @@
|
||||
{
|
||||
"name": "node-fetch-cache",
|
||||
"version": "1.0.6",
|
||||
"version": "3.0.0",
|
||||
"description": "node-fetch with a persistent cache.",
|
||||
"main": "index.js",
|
||||
"main": "src/index.js",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
"import": "./src/index.js",
|
||||
"require": "./commonjs/wrapper.cjs"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"buildcjs": "rollup src/index.js --file commonjs/index.cjs --format cjs",
|
||||
"test": "npm run buildcjs && mocha --timeout 10000 --exit",
|
||||
"coverage": "nyc --reporter=lcov --reporter=text npm test",
|
||||
"lint": "./node_modules/.bin/eslint .",
|
||||
"lintfix": "./node_modules/.bin/eslint . --fix"
|
||||
"lintfix": "./node_modules/.bin/eslint . --fix",
|
||||
"prepublishOnly": "npm test"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@ -28,9 +36,22 @@
|
||||
"devDependencies": {
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-config-airbnb-base": "^14.1.0",
|
||||
"eslint-plugin-import": "^2.20.2"
|
||||
"eslint-plugin-import": "^2.20.2",
|
||||
"form-data": "^3.0.0",
|
||||
"husky": "^4.3.0",
|
||||
"mocha": "^8.2.1",
|
||||
"nyc": "^15.1.0",
|
||||
"rimraf": "^3.0.2",
|
||||
"rollup": "^2.53.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"node-fetch": "*"
|
||||
"cacache": "^15.2.0",
|
||||
"locko": "0.0.3",
|
||||
"node-fetch": "2.6.1"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "npm run lint && npm test"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
84
src/classes/caching/file_system_cache.js
Normal file
84
src/classes/caching/file_system_cache.js
Normal file
@ -0,0 +1,84 @@
|
||||
import cacache from 'cacache';
|
||||
import { Readable } from 'stream';
|
||||
import { KeyTimeout } from './key_timeout.js';
|
||||
|
||||
function getBodyAndMetaKeys(key) {
|
||||
return [`${key}body`, `${key}meta`];
|
||||
}
|
||||
|
||||
export class FileSystemCache {
|
||||
constructor(options = {}) {
|
||||
this.ttl = options.ttl;
|
||||
this.keyTimeout = new KeyTimeout();
|
||||
this.cacheDirectory = options.cacheDirectory || '.cache';
|
||||
}
|
||||
|
||||
async get(key) {
|
||||
const [, metaKey] = getBodyAndMetaKeys(key);
|
||||
|
||||
const metaInfo = await cacache.get.info(this.cacheDirectory, metaKey);
|
||||
|
||||
if (!metaInfo) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const metaBuffer = await cacache.get.byDigest(this.cacheDirectory, metaInfo.integrity);
|
||||
const metaData = JSON.parse(metaBuffer);
|
||||
const { bodyStreamIntegrity, empty } = metaData;
|
||||
delete metaData.bodyStreamIntegrity;
|
||||
delete metaData.empty;
|
||||
|
||||
const bodyStream = empty
|
||||
? Readable.from(Buffer.alloc(0))
|
||||
: cacache.get.stream.byDigest(this.cacheDirectory, bodyStreamIntegrity);
|
||||
|
||||
return {
|
||||
bodyStream,
|
||||
metaData,
|
||||
};
|
||||
}
|
||||
|
||||
remove(key) {
|
||||
const [bodyKey, metaKey] = getBodyAndMetaKeys(key);
|
||||
|
||||
this.keyTimeout.clearTimeout(key);
|
||||
|
||||
return Promise.all([
|
||||
cacache.rm.entry(this.cacheDirectory, bodyKey),
|
||||
cacache.rm.entry(this.cacheDirectory, metaKey),
|
||||
]);
|
||||
}
|
||||
|
||||
async set(key, bodyStream, metaData) {
|
||||
const [bodyKey, metaKey] = getBodyAndMetaKeys(key);
|
||||
const metaCopy = { ...metaData };
|
||||
|
||||
this.keyTimeout.clearTimeout(key);
|
||||
|
||||
try {
|
||||
metaCopy.bodyStreamIntegrity = await new Promise((fulfill, reject) => {
|
||||
bodyStream.pipe(cacache.put.stream(this.cacheDirectory, bodyKey))
|
||||
.on('integrity', (i) => fulfill(i))
|
||||
.on('error', (e) => {
|
||||
reject(e);
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENODATA') {
|
||||
throw err;
|
||||
}
|
||||
|
||||
metaCopy.empty = true;
|
||||
}
|
||||
|
||||
const metaBuffer = Buffer.from(JSON.stringify(metaCopy));
|
||||
await cacache.put(this.cacheDirectory, metaKey, metaBuffer);
|
||||
const cachedData = await this.get(key);
|
||||
|
||||
if (typeof this.ttl === 'number') {
|
||||
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
|
||||
}
|
||||
|
||||
return cachedData;
|
||||
}
|
||||
}
|
16
src/classes/caching/key_timeout.js
Normal file
16
src/classes/caching/key_timeout.js
Normal file
@ -0,0 +1,16 @@
|
||||
export class KeyTimeout {
|
||||
constructor() {
|
||||
this.timeoutHandleForKey = {};
|
||||
}
|
||||
|
||||
clearTimeout(key) {
|
||||
clearTimeout(this.timeoutHandleForKey[key]);
|
||||
}
|
||||
|
||||
updateTimeout(key, durationMs, callback) {
|
||||
this.clearTimeout(key);
|
||||
this.timeoutHandleForKey[key] = setTimeout(() => {
|
||||
callback();
|
||||
}, durationMs);
|
||||
}
|
||||
}
|
47
src/classes/caching/memory_cache.js
Normal file
47
src/classes/caching/memory_cache.js
Normal file
@ -0,0 +1,47 @@
|
||||
import { Readable } from 'stream';
|
||||
import { KeyTimeout } from './key_timeout.js';
|
||||
|
||||
function streamToBuffer(stream) {
|
||||
const chunks = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
|
||||
stream.on('error', (err) => reject(err));
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
});
|
||||
}
|
||||
|
||||
export class MemoryCache {
|
||||
constructor(options = {}) {
|
||||
this.ttl = options.ttl;
|
||||
this.keyTimeout = new KeyTimeout();
|
||||
this.cache = {};
|
||||
}
|
||||
|
||||
get(key) {
|
||||
const cachedValue = this.cache[key];
|
||||
if (cachedValue) {
|
||||
return {
|
||||
bodyStream: Readable.from(cachedValue.bodyBuffer),
|
||||
metaData: cachedValue.metaData,
|
||||
};
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
remove(key) {
|
||||
this.keyTimeout.clearTimeout(key);
|
||||
delete this.cache[key];
|
||||
}
|
||||
|
||||
async set(key, bodyStream, metaData) {
|
||||
const bodyBuffer = await streamToBuffer(bodyStream);
|
||||
this.cache[key] = { bodyBuffer, metaData };
|
||||
|
||||
if (typeof this.ttl === 'number') {
|
||||
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
|
||||
}
|
||||
|
||||
return this.get(key);
|
||||
}
|
||||
}
|
29
src/classes/response.js
Normal file
29
src/classes/response.js
Normal file
@ -0,0 +1,29 @@
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
const responseInternalSymbol = Object.getOwnPropertySymbols(new Response())[1];
|
||||
|
||||
export class NFCResponse extends Response {
|
||||
constructor(bodyStream, metaData, ejectFromCache, fromCache) {
|
||||
super(bodyStream, metaData);
|
||||
this.ejectFromCache = ejectFromCache;
|
||||
this.fromCache = fromCache;
|
||||
}
|
||||
|
||||
static serializeMetaFromNodeFetchResponse(res) {
|
||||
const metaData = {
|
||||
url: res.url,
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: res.headers.raw(),
|
||||
size: res.size,
|
||||
timeout: res.timeout,
|
||||
counter: res[responseInternalSymbol].counter,
|
||||
};
|
||||
|
||||
return metaData;
|
||||
}
|
||||
|
||||
ejectFromCache() {
|
||||
return this.ejectSelfFromCache();
|
||||
}
|
||||
}
|
150
src/index.js
Normal file
150
src/index.js
Normal file
@ -0,0 +1,150 @@
|
||||
import fetch, { Request } from 'node-fetch';
|
||||
import fs from 'fs';
|
||||
import { URLSearchParams } from 'url';
|
||||
import crypto from 'crypto';
|
||||
import locko from 'locko';
|
||||
import { NFCResponse } from './classes/response.js';
|
||||
import { MemoryCache } from './classes/caching/memory_cache.js';
|
||||
|
||||
const CACHE_VERSION = 3;
|
||||
|
||||
function md5(str) {
|
||||
return crypto.createHash('md5').update(str).digest('hex');
|
||||
}
|
||||
|
||||
// Since the bounday in FormData is random,
|
||||
// we ignore it for purposes of calculating
|
||||
// the cache key.
|
||||
function getFormDataCacheKey(formData) {
|
||||
const cacheKey = { ...formData };
|
||||
const boundary = formData.getBoundary();
|
||||
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
delete cacheKey._boundary;
|
||||
|
||||
const boundaryReplaceRegex = new RegExp(boundary, 'g');
|
||||
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
cacheKey._streams = cacheKey._streams.map((s) => {
|
||||
if (typeof s === 'string') {
|
||||
return s.replace(boundaryReplaceRegex, '');
|
||||
}
|
||||
|
||||
return s;
|
||||
});
|
||||
|
||||
return cacheKey;
|
||||
}
|
||||
|
||||
function getBodyCacheKeyJson(body) {
|
||||
if (!body) {
|
||||
return body;
|
||||
} if (typeof body === 'string') {
|
||||
return body;
|
||||
} if (body instanceof URLSearchParams) {
|
||||
return body.toString();
|
||||
} if (body instanceof fs.ReadStream) {
|
||||
return body.path;
|
||||
} if (body.toString && body.toString() === '[object FormData]') {
|
||||
return getFormDataCacheKey(body);
|
||||
} if (body instanceof Buffer) {
|
||||
return body.toString();
|
||||
}
|
||||
|
||||
throw new Error('Unsupported body type. Supported body types are: string, number, undefined, null, url.URLSearchParams, fs.ReadStream, FormData');
|
||||
}
|
||||
|
||||
function getRequestCacheKey(req) {
|
||||
return {
|
||||
cache: req.cache,
|
||||
credentials: req.credentials,
|
||||
destination: req.destination,
|
||||
headers: req.headers,
|
||||
integrity: req.integrity,
|
||||
method: req.method,
|
||||
redirect: req.redirect,
|
||||
referrer: req.referrer,
|
||||
referrerPolicy: req.referrerPolicy,
|
||||
url: req.url,
|
||||
body: getBodyCacheKeyJson(req.body),
|
||||
};
|
||||
}
|
||||
|
||||
function getCacheKey(requestArguments) {
|
||||
const resource = requestArguments[0];
|
||||
const init = requestArguments[1] || {};
|
||||
|
||||
const resourceCacheKeyJson = resource instanceof Request
|
||||
? getRequestCacheKey(resource)
|
||||
: { url: resource };
|
||||
|
||||
const initCacheKeyJson = { ...init };
|
||||
|
||||
resourceCacheKeyJson.body = getBodyCacheKeyJson(resourceCacheKeyJson.body);
|
||||
initCacheKeyJson.body = getBodyCacheKeyJson(initCacheKeyJson.body);
|
||||
|
||||
delete initCacheKeyJson.agent;
|
||||
|
||||
return md5(JSON.stringify([resourceCacheKeyJson, initCacheKeyJson, CACHE_VERSION]));
|
||||
}
|
||||
|
||||
async function getResponse(cache, requestArguments) {
|
||||
const cacheKey = getCacheKey(requestArguments);
|
||||
let cachedValue = await cache.get(cacheKey);
|
||||
|
||||
const ejectSelfFromCache = () => cache.remove(cacheKey);
|
||||
|
||||
if (cachedValue) {
|
||||
return new NFCResponse(
|
||||
cachedValue.bodyStream,
|
||||
cachedValue.metaData,
|
||||
ejectSelfFromCache,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
await locko.lock(cacheKey);
|
||||
try {
|
||||
cachedValue = await cache.get(cacheKey);
|
||||
if (cachedValue) {
|
||||
return new NFCResponse(
|
||||
cachedValue.bodyStream,
|
||||
cachedValue.metaData,
|
||||
ejectSelfFromCache,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
const fetchResponse = await fetch(...requestArguments);
|
||||
const serializedMeta = NFCResponse.serializeMetaFromNodeFetchResponse(fetchResponse);
|
||||
|
||||
const newlyCachedData = await cache.set(
|
||||
cacheKey,
|
||||
fetchResponse.body,
|
||||
serializedMeta,
|
||||
);
|
||||
|
||||
return new NFCResponse(
|
||||
newlyCachedData.bodyStream,
|
||||
newlyCachedData.metaData,
|
||||
ejectSelfFromCache,
|
||||
false,
|
||||
);
|
||||
} finally {
|
||||
locko.unlock(cacheKey);
|
||||
}
|
||||
}
|
||||
|
||||
function createFetchWithCache(cache) {
|
||||
const fetchCache = (...args) => getResponse(cache, args);
|
||||
fetchCache.withCache = createFetchWithCache;
|
||||
|
||||
return fetchCache;
|
||||
}
|
||||
|
||||
const defaultFetch = createFetchWithCache(new MemoryCache());
|
||||
|
||||
export default defaultFetch;
|
||||
export const fetchBuilder = defaultFetch;
|
||||
export { MemoryCache } from './classes/caching/memory_cache.js';
|
||||
export { FileSystemCache } from './classes/caching/file_system_cache.js';
|
BIN
test/expected_png.png
Normal file
BIN
test/expected_png.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 7.9 KiB |
18
test/tests.cjs
Normal file
18
test/tests.cjs
Normal file
@ -0,0 +1,18 @@
|
||||
const assert = require('assert');
|
||||
const fetch = require('../commonjs/wrapper.cjs');
|
||||
|
||||
const TWO_HUNDRED_URL = 'https://httpbin.org/status/200';
|
||||
|
||||
describe('Commonjs module tests', function() {
|
||||
it('Can make a request', async function() {
|
||||
const res = await fetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.status, 200);
|
||||
});
|
||||
|
||||
it('Has expected properties', function() {
|
||||
assert(typeof fetch === 'function');
|
||||
assert(fetch.MemoryCache);
|
||||
assert(fetch.FileSystemCache);
|
||||
assert(fetch.fetchBuilder);
|
||||
});
|
||||
});
|
478
test/tests.js
Normal file
478
test/tests.js
Normal file
@ -0,0 +1,478 @@
|
||||
import { dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import fs from 'fs';
|
||||
import FormData from 'form-data';
|
||||
import assert from 'assert';
|
||||
import rimraf from 'rimraf';
|
||||
import path from 'path';
|
||||
import { URLSearchParams } from 'url';
|
||||
import standardFetch from 'node-fetch';
|
||||
import FetchCache, { MemoryCache, FileSystemCache } from '../src/index.js';
|
||||
import { Agent } from 'http';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const CACHE_PATH = path.join(__dirname, '..', '.cache');
|
||||
const expectedPngBuffer = fs.readFileSync(path.join(__dirname, 'expected_png.png'));
|
||||
|
||||
const TWO_HUNDRED_URL = 'https://httpbin.org/status/200';
|
||||
const FOUR_HUNDRED_URL = 'https://httpbin.org/status/400';
|
||||
const THREE_HUNDRED_TWO_URL = 'https://httpbin.org/status/302';
|
||||
const TEXT_BODY_URL = 'https://httpbin.org/robots.txt';
|
||||
const JSON_BODY_URL = 'https://httpbin.org/json';
|
||||
const PNG_BODY_URL = 'https://httpbin.org/image/png';
|
||||
|
||||
const TEXT_BODY_EXPECTED = 'User-agent: *\nDisallow: /deny\n';
|
||||
|
||||
let cachedFetch;
|
||||
let body;
|
||||
|
||||
function post(body) {
|
||||
return { method: 'POST', body };
|
||||
}
|
||||
|
||||
function removeDates(arrOrObj) {
|
||||
if (arrOrObj.date) {
|
||||
const copy = { ...arrOrObj };
|
||||
delete copy.date;
|
||||
return copy;
|
||||
}
|
||||
|
||||
if (Array.isArray(arrOrObj)) {
|
||||
if (Array.isArray(arrOrObj[0])) {
|
||||
return arrOrObj.filter(e => e[0] !== 'date');
|
||||
}
|
||||
|
||||
return arrOrObj.filter(e => !Date.parse(e));
|
||||
}
|
||||
|
||||
return arrOrObj;
|
||||
}
|
||||
|
||||
function wait(ms) {
|
||||
return new Promise((fulfill) => setTimeout(fulfill, ms));
|
||||
}
|
||||
|
||||
async function dualFetch(...args) {
|
||||
const [cachedFetchResponse, standardFetchResponse] = await Promise.all([
|
||||
cachedFetch(...args),
|
||||
standardFetch(...args),
|
||||
]);
|
||||
|
||||
return { cachedFetchResponse, standardFetchResponse };
|
||||
}
|
||||
|
||||
beforeEach(async function() {
|
||||
rimraf.sync(CACHE_PATH);
|
||||
cachedFetch = FetchCache.withCache(new MemoryCache());
|
||||
});
|
||||
|
||||
let res;
|
||||
|
||||
describe('Basic property tests', function() {
|
||||
it('Has a status property', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||
});
|
||||
|
||||
it('Has a statusText property', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.statusText, standardFetchResponse.statusText);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.statusText, standardFetchResponse.statusText);
|
||||
});
|
||||
|
||||
it('Has a url property', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.url, standardFetchResponse.url);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.url, standardFetchResponse.url);
|
||||
});
|
||||
|
||||
it('Has an ok property', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(FOUR_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.ok, standardFetchResponse.ok);
|
||||
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(FOUR_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.ok, standardFetchResponse.ok);
|
||||
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||
});
|
||||
|
||||
it('Has a redirected property', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(THREE_HUNDRED_TWO_URL);
|
||||
assert.strictEqual(cachedFetchResponse.redirected, standardFetchResponse.redirected);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(THREE_HUNDRED_TWO_URL);
|
||||
assert.strictEqual(cachedFetchResponse.redirected, standardFetchResponse.redirected);
|
||||
});
|
||||
}).timeout(10000);
|
||||
|
||||
describe('Header tests', function() {
|
||||
it('Gets correct raw headers', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates(cachedFetchResponse.headers.raw()),
|
||||
removeDates(standardFetchResponse.headers.raw()),
|
||||
);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates(cachedFetchResponse.headers.raw()),
|
||||
removeDates(standardFetchResponse.headers.raw()),
|
||||
);
|
||||
});
|
||||
|
||||
it('Gets correct header keys', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual([...cachedFetchResponse.headers.keys()], [...standardFetchResponse.headers.keys()]);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual([...cachedFetchResponse.headers.keys()], [...standardFetchResponse.headers.keys()]);
|
||||
});
|
||||
|
||||
it('Gets correct header values', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates([...cachedFetchResponse.headers.values()]),
|
||||
removeDates([...standardFetchResponse.headers.values()]),
|
||||
);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates([...cachedFetchResponse.headers.values()]),
|
||||
removeDates([...standardFetchResponse.headers.values()]),
|
||||
);
|
||||
});
|
||||
|
||||
it('Gets correct header entries', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates([...cachedFetchResponse.headers.entries()]),
|
||||
removeDates([...standardFetchResponse.headers.entries()]),
|
||||
);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates([...cachedFetchResponse.headers.entries()]),
|
||||
removeDates([...standardFetchResponse.headers.entries()]),
|
||||
);
|
||||
});
|
||||
|
||||
it('Can get a header by value', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert(standardFetchResponse.headers.get('content-length'));
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.get('content-length'), standardFetchResponse.headers.get('content-length'));
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.get('content-length'), standardFetchResponse.headers.get('content-length'));
|
||||
});
|
||||
|
||||
it('Returns undefined for non-existent header', async function() {
|
||||
const headerName = 'zzzz';
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert(!standardFetchResponse.headers.get(headerName));
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.get(headerName), standardFetchResponse.headers.get(headerName));
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.get(headerName), standardFetchResponse.headers.get(headerName));
|
||||
});
|
||||
|
||||
it('Can get whether a header is present', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert(standardFetchResponse.headers.has('content-length'));
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.has('content-length'), standardFetchResponse.headers.has('content-length'));
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.has('content-length'), standardFetchResponse.headers.has('content-length'));
|
||||
});
|
||||
}).timeout(10000);
|
||||
|
||||
describe('Cache tests', function() {
|
||||
it('Uses cache', async function() {
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Can eject from cache', async function() {
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
|
||||
await res.ejectFromCache();
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Does not error if ejecting from cache twice', async function() {
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
await res.ejectFromCache();
|
||||
await res.ejectFromCache();
|
||||
});
|
||||
|
||||
it('Gives different string bodies different cache keys', async function() {
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post('b'));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
|
||||
it('Gives same string bodies same cache keys', async function() {
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Gives different URLSearchParams different cache keys', async function() {
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=b')));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
|
||||
it('Gives same URLSearchParams same cache keys', async function() {
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Gives different read streams different cache keys', async function() {
|
||||
const s1 = fs.createReadStream(path.join(__dirname, 'expected_png.png'));
|
||||
const s2 = fs.createReadStream(path.join(__dirname, '..', 'src', 'index.js'));
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(s2));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
|
||||
it('Gives the same read streams the same cache key', async function() {
|
||||
const s1 = fs.createReadStream(path.join(__dirname, 'expected_png.png'));
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Gives different form data different cache keys', async function() {
|
||||
const data1 = new FormData();
|
||||
data1.append('a', 'a');
|
||||
|
||||
const data2 = new FormData();
|
||||
data2.append('b', 'b');
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(data1));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(data2));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
|
||||
it('Gives same form data same cache keys', async function() {
|
||||
const data1 = new FormData();
|
||||
data1.append('a', 'a');
|
||||
|
||||
const data2 = new FormData();
|
||||
data2.append('a', 'a');
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(data1));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(data2));
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Does not error with custom agent with circular properties', async function() {
|
||||
const agent = new Agent();
|
||||
agent.agent = agent;
|
||||
|
||||
await cachedFetch('http://httpbin.org/status/200', { agent });
|
||||
})
|
||||
}).timeout(10000);
|
||||
|
||||
describe('Data tests', function() {
|
||||
it('Supports request objects', async function() {
|
||||
let request = new standardFetch.Request('https://google.com', { body: 'test', method: 'POST' });
|
||||
res = await cachedFetch(request);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
request = new standardFetch.Request('https://google.com', { body: 'test', method: 'POST' });
|
||||
res = await cachedFetch(request);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Refuses to consume body twice', async function() {
|
||||
res = await cachedFetch(TEXT_BODY_URL);
|
||||
await res.text();
|
||||
|
||||
try {
|
||||
await res.text();
|
||||
throw new Error('The above line should have thrown.');
|
||||
} catch (err) {
|
||||
assert(err.message.includes('body used already for:'));
|
||||
}
|
||||
});
|
||||
|
||||
it('Can get text body', async function() {
|
||||
res = await cachedFetch(TEXT_BODY_URL);
|
||||
body = await res.text();
|
||||
assert.strictEqual(body, TEXT_BODY_EXPECTED);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TEXT_BODY_URL);
|
||||
body = await res.text();
|
||||
assert.strictEqual(body, TEXT_BODY_EXPECTED);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Can get JSON body', async function() {
|
||||
res = await cachedFetch(JSON_BODY_URL);
|
||||
body = await res.json();
|
||||
assert(body.slideshow);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(JSON_BODY_URL);
|
||||
body = await res.json();
|
||||
assert(body.slideshow);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Can get PNG buffer body', async function() {
|
||||
res = await cachedFetch(PNG_BODY_URL);
|
||||
body = await res.buffer();
|
||||
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(PNG_BODY_URL);
|
||||
body = await res.buffer();
|
||||
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Can stream a body', async function() {
|
||||
res = await cachedFetch(TEXT_BODY_URL);
|
||||
body = '';
|
||||
|
||||
for await (const chunk of res.body) {
|
||||
body += chunk.toString();
|
||||
}
|
||||
|
||||
assert.strictEqual(TEXT_BODY_EXPECTED, body);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TEXT_BODY_URL);
|
||||
body = '';
|
||||
|
||||
for await (const chunk of res.body) {
|
||||
body += chunk.toString();
|
||||
}
|
||||
|
||||
assert.strictEqual(TEXT_BODY_EXPECTED, body);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Errors if the body type is not supported', async function() {
|
||||
try {
|
||||
await cachedFetch(TEXT_BODY_URL, { body: {} });
|
||||
throw new Error('It was supposed to throw');
|
||||
} catch (err) {
|
||||
assert(err.message.includes('Unsupported body type'));
|
||||
}
|
||||
});
|
||||
|
||||
it('Uses cache even if you make multiple requests at the same time', async function() {
|
||||
const [res1, res2] = await Promise.all([
|
||||
cachedFetch('http://httpbin.org/status/200'),
|
||||
cachedFetch('http://httpbin.org/status/200'),
|
||||
]);
|
||||
|
||||
// One should be false, the other should be true
|
||||
assert(res1.fromCache !== res2.fromCache);
|
||||
});
|
||||
}).timeout(10000);
|
||||
|
||||
describe('Memory cache tests', function() {
|
||||
it('Supports TTL', async function() {
|
||||
cachedFetch = FetchCache.withCache(new MemoryCache({ ttl: 100 }));
|
||||
let res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
|
||||
await wait(200);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
}).timeout(10000);
|
||||
|
||||
describe('File system cache tests', function() {
|
||||
it('Supports TTL', async function() {
|
||||
cachedFetch = FetchCache.withCache(new FileSystemCache({ ttl: 100 }));
|
||||
let res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
|
||||
await wait(200);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
|
||||
it('Can get PNG buffer body', async function() {
|
||||
cachedFetch = FetchCache.withCache(new FileSystemCache());
|
||||
res = await cachedFetch(PNG_BODY_URL);
|
||||
body = await res.buffer();
|
||||
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(PNG_BODY_URL);
|
||||
body = await res.buffer();
|
||||
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Can eject from cache', async function() {
|
||||
cachedFetch = FetchCache.withCache(new FileSystemCache());
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
|
||||
await res.ejectFromCache();
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
});
|
Reference in New Issue
Block a user