Version 2.0
This commit is contained in:
parent
dea019c42c
commit
b8da556091
1
.eslintignore
Normal file
1
.eslintignore
Normal file
@ -0,0 +1 @@
|
|||||||
|
test
|
17
.github/workspaces/ci.yml
vendored
Normal file
17
.github/workspaces/ci.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
name: CI Pipeline
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Use Node.js 14.x
|
||||||
|
uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: 14.x
|
||||||
|
- run: npm ci
|
||||||
|
- run: npm run lint
|
||||||
|
- run: npm run coverage
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -102,3 +102,6 @@ dist
|
|||||||
|
|
||||||
# TernJS port file
|
# TernJS port file
|
||||||
.tern-port
|
.tern-port
|
||||||
|
|
||||||
|
# Other
|
||||||
|
.cache
|
||||||
|
@ -1,2 +1,8 @@
|
|||||||
.eslintrc.js
|
.eslintrc.js
|
||||||
|
test
|
||||||
|
.cache
|
||||||
|
.nyc_output
|
||||||
|
.github
|
||||||
|
.eslintignore
|
||||||
.vscode
|
.vscode
|
||||||
|
coverage
|
||||||
|
22
.vscode/launch.json
vendored
Normal file
22
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
// Use IntelliSense to learn about possible attributes.
|
||||||
|
// Hover to view descriptions of existing attributes.
|
||||||
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"args": [
|
||||||
|
"--colors",
|
||||||
|
"${workspaceFolder}/test"
|
||||||
|
],
|
||||||
|
"internalConsoleOptions": "openOnSessionStart",
|
||||||
|
"name": "Mocha Tests",
|
||||||
|
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
|
||||||
|
"request": "launch",
|
||||||
|
"skipFiles": [
|
||||||
|
"<node_internals>/**"
|
||||||
|
],
|
||||||
|
"type": "pwa-node"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
181
README.md
181
README.md
@ -1,45 +1,194 @@
|
|||||||
# node-fetch-cache
|
# node-fetch-cache
|
||||||
|
|
||||||
node-fetch with caching to a directory on disk.
|
node-fetch with caching of responses.
|
||||||
|
|
||||||
The first usage with any given arguments will result in an HTTP request and any subsequent usage with the same arguments and body function (text, json, buffer, or textConverted) will read the response body from the cache on disk.
|
The first fetch with any given arguments will result in an HTTP request and any subsequent fetch with the same arguments will read the response from the cache.
|
||||||
|
|
||||||
|
By default responses are cached in memory, but you can also cache to files on disk, or implement your own cache. See the **Cache Customization** section for more info.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
Require it with a directory path to cache in, and then use it the same way you would use fetch.
|
Require it and use it the same way you would use node-fetch:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const fetch = require('node-fetch-cache')('./path/to/cache/dir');
|
const fetch = require('node-fetch-cache');
|
||||||
|
|
||||||
fetch('http://google.com')
|
fetch('http://google.com')
|
||||||
.then(response => response.text())
|
.then(response => response.text())
|
||||||
.then(text => console.log(text));
|
.then(text => console.log(text));
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The next time you `fetch('http://google.com')`, the response will be returned from the cache. No HTTP request will be made.
|
||||||
|
|
||||||
## API
|
## API
|
||||||
|
|
||||||
Note that this does not support the full fetch API. Headers and some other things are not accessible.
|
This module aims to expose the same API as `node-fetch` does for the most common use cases, but may not support some of the less common functions, properties, and use cases.
|
||||||
|
|
||||||
### async fetch(resource [, init])
|
### const fetch = require('node-fetch-cache');
|
||||||
|
|
||||||
Same arguments as [browser fetch](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch).
|
Load the module.
|
||||||
|
|
||||||
|
### await fetch(resource [, init])
|
||||||
|
|
||||||
|
Same arguments as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
Returns a **CachedResponse**.
|
Returns a **CachedResponse**.
|
||||||
|
|
||||||
### async CachedResponse.text()
|
### await CachedResponse.ejectFromCache()
|
||||||
|
|
||||||
Returns the body as a string.
|
Eject the response from the cache, so that the next request will perform a true HTTP request rather than returning a cached response.
|
||||||
|
|
||||||
### async CachedResponse.json()
|
Keep in mind that this module caches **all** responses, even if they return errors. You might want to use this function in certain cases like receiving a 5xx response status, so that you can retry requests.
|
||||||
|
|
||||||
Returns the body as a JavaScript object, parsed from JSON.
|
### await CachedResponse.text()
|
||||||
|
|
||||||
### async CachedResponse.buffer()
|
Returns the body as a string, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
Returns the body as a Buffer.
|
### await CachedResponse.json()
|
||||||
|
|
||||||
### async CachedResponse.textConverted()
|
Returns the body as a JavaScript object, parsed from JSON, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
Identical to CachedResponse.text(), except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8, if possible.
|
### await CachedResponse.buffer()
|
||||||
|
|
||||||
(textConverted requires an optional dependency on [npm package encoding](https://www.npmjs.com/package/encoding), which you need to install manually.
|
Returns the body as a Buffer, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
### CachedResponse.status
|
||||||
|
|
||||||
|
Returns the HTTP status code of the response, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
### CachedResponse.statusText
|
||||||
|
|
||||||
|
Returns a text represention of the response status, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
### CachedResponse.ok
|
||||||
|
|
||||||
|
Returns true if the request returned a successful response status, false otherwise, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
### CachedResponse.redirected
|
||||||
|
|
||||||
|
Returns true if the request was redirected, false otherwise, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
### CachedResponse.headers
|
||||||
|
|
||||||
|
Returns a **ResponseHeaders** object representing the headers of the response, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
### ResponseHeaders.entries()
|
||||||
|
|
||||||
|
Returns the raw headers as an array of `[key, value]` pairs, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
### ResponseHeaders.keys()
|
||||||
|
|
||||||
|
Returns an array of all header keys, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
### ResponseHeaders.values()
|
||||||
|
|
||||||
|
Returns an array of all header values, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
### ResponseHeaders.get(key)
|
||||||
|
|
||||||
|
Returns the value of the header with the given key, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
### ResponseHeaders.has(key)
|
||||||
|
|
||||||
|
Returns true if the headers has a value for the given key, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
### ResponseHeaders.raw
|
||||||
|
|
||||||
|
Returns the headers as an object of `{ "key": "value" }` pairs, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||||
|
|
||||||
|
## Streaming
|
||||||
|
|
||||||
|
This module supports streams like [node-fetch](https://www.npmjs.com/package/node-fetch) does, but with a couple of caveats you should be aware of if you want to use streams.
|
||||||
|
|
||||||
|
1. Response bodies are always read into memory even if you stream them to disk. That means if you need to stream large responses that don't fit into RAM, this module may be unsuitable.
|
||||||
|
2. When streaming a request body with fs.ReadStream, the cache key is generated based only on the path of the stream, not its content. That means if you stream `/my/desktop/image.png` twice, you will get a cached response the second time, **even if the content of image.png has changed**. This module may be unsuitable if you need to stream files in requests and the content of those files can change.
|
||||||
|
|
||||||
|
## Cache Customization
|
||||||
|
|
||||||
|
By default responses are cached in memory, but you can also cache to files on disk, or implement your own cache.
|
||||||
|
|
||||||
|
### MemoryCache
|
||||||
|
|
||||||
|
This is the default cache delegate. It caches responses in-process in a POJO.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const { fetchBuilder, MemoryCache } = require('node-fetch-cache');
|
||||||
|
const fetch = fetchBuilder.withCache(new MemoryCache(options));
|
||||||
|
```
|
||||||
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
```js
|
||||||
|
{
|
||||||
|
ttl: 1000, // Time to live. How long (in ms) responses remain cached before being automatically ejected. If undefined, responses are never automatically ejected from the cache.
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that by default (if you don't use `withCache()`) a **shared** MemoryCache will be used (you can import this module in multiple files and they will all share the same cache). If you instantiate and provide a `new MemoryCache()` as shown above however, the cache is *NOT* shared unless you explicitly pass it around and pass it into `withCache()` in each of your source files.
|
||||||
|
|
||||||
|
### FileSystemCache
|
||||||
|
|
||||||
|
Cache to a directory on disk. This allows the cache to survive the process exiting.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const { fetchBuilder, FileSystemCache } = require('node-fetch-cache');
|
||||||
|
const fetch = fetchBuilder.withCache(new FileSystemCache(options));
|
||||||
|
```
|
||||||
|
|
||||||
|
```js
|
||||||
|
{
|
||||||
|
cacheDirectory: '/my/cache/directory/path', // Specify where to keep the cache. If undefined, '.cache' is used by default. If this directory does not exist, it will be created.
|
||||||
|
ttl: 1000, // Time to live. How long (in ms) responses remain cached before being automatically ejected. If undefined, responses are never automatically ejected from the cache.
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Provide Your Own
|
||||||
|
|
||||||
|
You can implement a caching delegate yourself. The cache simply needs to be an object that has `set(key, value)`, `get(key)`, and `remove(key)` functions.
|
||||||
|
|
||||||
|
The set function must accept a key (which will be a string) and a value (which will be a JSON-serializable JS object) and store them.
|
||||||
|
|
||||||
|
The get function should accept a key and return whatever value was set for that key (or `undefined`/`null` if there is no value for that key).
|
||||||
|
|
||||||
|
The remove function should accept a key and remove the cached value associated with that key, if any.
|
||||||
|
|
||||||
|
Both functions can be async.
|
||||||
|
|
||||||
|
It is safe to remove values from the cache arbitrarily (for example if you implement a TTL in the caching delegate).
|
||||||
|
|
||||||
|
Example: you could make and use your own simple memory cache like this:
|
||||||
|
|
||||||
|
```js
|
||||||
|
class MyMemoryCache {
|
||||||
|
set(key, value) {
|
||||||
|
this[key] = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
get(key) {
|
||||||
|
return this[key];
|
||||||
|
}
|
||||||
|
|
||||||
|
remove(key) {
|
||||||
|
delete this[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const fetchBuilder = require('node-fetch-cache');
|
||||||
|
fetch = fetchBuilder.withCache(new MyMemoryCache());
|
||||||
|
|
||||||
|
fetch('http://google.com')
|
||||||
|
.then(response => response.text())
|
||||||
|
.then(text => console.log(text));
|
||||||
|
```
|
||||||
|
|
||||||
|
## Bugs / Help / Feature Requests / Contributing
|
||||||
|
|
||||||
|
For feature requests or help, please visit [the discussions page on GitHub](https://github.com/mistval/node-fetch-cache/discussions).
|
||||||
|
|
||||||
|
For bug reports, please file an issue on [the issues page on GitHub](https://github.com/mistval/node-fetch-cache/issues).
|
||||||
|
|
||||||
|
Contributions welcome! Please open a [pull request on GitHub](https://github.com/mistval/node-fetch-cache/pulls) with your changes. You can run them by me first on [the discussions page](https://github.com/mistval/node-fetch-cache/discussions) if you'd like.
|
||||||
|
29
classes/caching/file_system_cache.js
Normal file
29
classes/caching/file_system_cache.js
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
const FPersist = require('fpersist');
|
||||||
|
const KeyTimeout = require('./key_timeout.js');
|
||||||
|
|
||||||
|
module.exports = class FileSystemCache {
|
||||||
|
constructor(options = {}) {
|
||||||
|
this.ttl = options.ttl;
|
||||||
|
this.keyTimeout = new KeyTimeout();
|
||||||
|
|
||||||
|
const cacheDirectory = options.cacheDirectory || '.cache';
|
||||||
|
this.cache = new FPersist(cacheDirectory);
|
||||||
|
}
|
||||||
|
|
||||||
|
get(key) {
|
||||||
|
return this.cache.getItem(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
remove(key) {
|
||||||
|
this.keyTimeout.clearTimeout(key);
|
||||||
|
return this.cache.deleteItem(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
async set(key, value) {
|
||||||
|
await this.cache.setItem(key, value);
|
||||||
|
|
||||||
|
if (typeof this.ttl === 'number') {
|
||||||
|
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
16
classes/caching/key_timeout.js
Normal file
16
classes/caching/key_timeout.js
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
module.exports = class KeyTimeout {
|
||||||
|
constructor() {
|
||||||
|
this.timeoutHandleForKey = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
clearTimeout(key) {
|
||||||
|
clearTimeout(this.timeoutHandleForKey[key]);
|
||||||
|
}
|
||||||
|
|
||||||
|
updateTimeout(key, durationMs, callback) {
|
||||||
|
this.clearTimeout(key);
|
||||||
|
this.timeoutHandleForKey[key] = setTimeout(() => {
|
||||||
|
callback();
|
||||||
|
}, durationMs);
|
||||||
|
}
|
||||||
|
};
|
26
classes/caching/memory_cache.js
Normal file
26
classes/caching/memory_cache.js
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
const KeyTimeout = require('./key_timeout.js');
|
||||||
|
|
||||||
|
module.exports = class MemoryCache {
|
||||||
|
constructor(options = {}) {
|
||||||
|
this.ttl = options.ttl;
|
||||||
|
this.keyTimeout = new KeyTimeout();
|
||||||
|
this.cache = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
get(key) {
|
||||||
|
return this.cache[key];
|
||||||
|
}
|
||||||
|
|
||||||
|
remove(key) {
|
||||||
|
this.keyTimeout.clearTimeout(key);
|
||||||
|
delete this.cache[key];
|
||||||
|
}
|
||||||
|
|
||||||
|
set(key, value) {
|
||||||
|
this.cache[key] = value;
|
||||||
|
|
||||||
|
if (typeof this.ttl === 'number') {
|
||||||
|
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
33
classes/headers.js
Normal file
33
classes/headers.js
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
class Headers {
|
||||||
|
constructor(rawHeaders) {
|
||||||
|
this.rawHeaders = rawHeaders;
|
||||||
|
}
|
||||||
|
|
||||||
|
entries() {
|
||||||
|
return Object.entries(this.rawHeaders)
|
||||||
|
.sort((e1, e2) => e1[0].localeCompare(e2[0]))
|
||||||
|
.map(([key, val]) => [key, val[0]]);
|
||||||
|
}
|
||||||
|
|
||||||
|
keys() {
|
||||||
|
return this.entries().map((e) => e[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
values() {
|
||||||
|
return this.entries().map((e) => e[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
get(name) {
|
||||||
|
return (this.rawHeaders[name.toLowerCase()] || [])[0] || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
has(name) {
|
||||||
|
return !!this.get(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
raw() {
|
||||||
|
return this.rawHeaders;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Headers;
|
47
classes/response.js
Normal file
47
classes/response.js
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
const stream = require('stream');
|
||||||
|
const Headers = require('./headers.js');
|
||||||
|
|
||||||
|
class Response {
|
||||||
|
constructor(raw, ejectSelfFromCache, fromCache) {
|
||||||
|
Object.assign(this, raw);
|
||||||
|
this.ejectSelfFromCache = ejectSelfFromCache;
|
||||||
|
this.headers = new Headers(raw.headers);
|
||||||
|
this.fromCache = fromCache;
|
||||||
|
this.bodyUsed = false;
|
||||||
|
|
||||||
|
if (this.bodyBuffer.type === 'Buffer') {
|
||||||
|
this.bodyBuffer = Buffer.from(this.bodyBuffer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get body() {
|
||||||
|
return stream.Readable.from(this.bodyBuffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
consumeBody() {
|
||||||
|
if (this.bodyUsed) {
|
||||||
|
throw new Error('Error: body used already');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.bodyUsed = true;
|
||||||
|
return this.bodyBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
async text() {
|
||||||
|
return this.consumeBody().toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
async json() {
|
||||||
|
return JSON.parse(this.consumeBody().toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
async buffer() {
|
||||||
|
return this.consumeBody();
|
||||||
|
}
|
||||||
|
|
||||||
|
ejectFromCache() {
|
||||||
|
return this.ejectSelfFromCache();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Response;
|
138
index.js
138
index.js
@ -1,75 +1,115 @@
|
|||||||
const fetch = require('node-fetch');
|
const fetch = require('node-fetch');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
|
const { URLSearchParams } = require('url');
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const path = require('path');
|
const Response = require('./classes/response.js');
|
||||||
|
const MemoryCache = require('./classes/caching/memory_cache.js');
|
||||||
|
const FileSystemCache = require('./classes/caching/file_system_cache.js');
|
||||||
|
|
||||||
|
const CACHE_VERSION = 2;
|
||||||
|
|
||||||
function md5(str) {
|
function md5(str) {
|
||||||
return crypto.createHash('md5').update(str).digest('hex');
|
return crypto.createHash('md5').update(str).digest('hex');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getResponse(cacheDirPath, requestArguments, bodyFunctionName) {
|
// Since the bounday in FormData is random,
|
||||||
const [url, requestInit, ...rest] = requestArguments;
|
// we ignore it for purposes of calculating
|
||||||
const requestParams = requestInit && requestInit.body
|
// the cache key.
|
||||||
? ({ ...requestInit, body: typeof requestInit.body === 'object' ? requestInit.body.toString() : requestInit.body })
|
function getFormDataCacheKey(formData) {
|
||||||
: requestInit;
|
const cacheKey = { ...formData };
|
||||||
|
const boundary = formData.getBoundary();
|
||||||
|
|
||||||
const cacheHash = md5(JSON.stringify([url, requestParams, ...rest]) + bodyFunctionName);
|
// eslint-disable-next-line no-underscore-dangle
|
||||||
const cachedFilePath = path.join(cacheDirPath, `${cacheHash}.json`);
|
delete cacheKey._boundary;
|
||||||
|
|
||||||
try {
|
const boundaryReplaceRegex = new RegExp(boundary, 'g');
|
||||||
const body = JSON.parse(await fs.promises.readFile(cachedFilePath));
|
|
||||||
if (bodyFunctionName === 'buffer') {
|
// eslint-disable-next-line no-underscore-dangle
|
||||||
return Buffer.from(body);
|
cacheKey._streams = cacheKey._streams.map((s) => {
|
||||||
|
if (typeof s === 'string') {
|
||||||
|
return s.replace(boundaryReplaceRegex, '');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return s;
|
||||||
|
});
|
||||||
|
|
||||||
|
return cacheKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getBodyCacheKeyJson(body) {
|
||||||
|
if (!body) {
|
||||||
return body;
|
return body;
|
||||||
} catch (err) {
|
} if (typeof body === 'string') {
|
||||||
const fetchResponse = await fetch(...requestArguments);
|
return body;
|
||||||
const bodyResponse = await fetchResponse[bodyFunctionName]();
|
} if (body instanceof URLSearchParams) {
|
||||||
await fs.promises.writeFile(cachedFilePath, JSON.stringify(bodyResponse));
|
return body.toString();
|
||||||
return bodyResponse;
|
} if (body instanceof fs.ReadStream) {
|
||||||
|
return body.path;
|
||||||
|
} if (body.toString && body.toString() === '[object FormData]') {
|
||||||
|
return getFormDataCacheKey(body);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
throw new Error('Unsupported body type. Supported body types are: string, number, undefined, null, url.URLSearchParams, fs.ReadStream, FormData');
|
||||||
}
|
}
|
||||||
|
|
||||||
class ResponseWrapper {
|
function getCacheKey(requestArguments) {
|
||||||
constructor(cacheDirPath, requestArguments) {
|
const resource = requestArguments[0];
|
||||||
this.cacheDirPath = cacheDirPath;
|
const init = requestArguments[1] || {};
|
||||||
this.requestArguments = requestArguments;
|
|
||||||
|
if (typeof resource !== 'string') {
|
||||||
|
throw new Error('The first argument must be a string (fetch.Request is not supported).');
|
||||||
}
|
}
|
||||||
|
|
||||||
text() {
|
const resourceCacheKeyJson = { url: resource };
|
||||||
return getResponse(this.cacheDirPath, this.requestArguments, this.text.name);
|
const initCacheKeyJson = { ...init };
|
||||||
}
|
|
||||||
|
|
||||||
json() {
|
resourceCacheKeyJson.body = getBodyCacheKeyJson(resourceCacheKeyJson.body);
|
||||||
return getResponse(this.cacheDirPath, this.requestArguments, this.json.name);
|
initCacheKeyJson.body = getBodyCacheKeyJson(initCacheKeyJson.body);
|
||||||
}
|
|
||||||
|
|
||||||
buffer() {
|
return md5(JSON.stringify([resourceCacheKeyJson, initCacheKeyJson, CACHE_VERSION]));
|
||||||
return getResponse(this.cacheDirPath, this.requestArguments, this.buffer.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
textConverted() {
|
|
||||||
return getResponse(this.cacheDirPath, this.requestArguments, this.textConverted.name);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function createFetch(cacheDirPath) {
|
async function createRawResponse(fetchRes) {
|
||||||
let madeDir = false;
|
const buffer = await fetchRes.buffer();
|
||||||
|
|
||||||
return async (...args) => {
|
return {
|
||||||
if (!madeDir) {
|
status: fetchRes.status,
|
||||||
try {
|
statusText: fetchRes.statusText,
|
||||||
await fs.promises.mkdir(cacheDirPath, { recursive: true });
|
type: fetchRes.type,
|
||||||
} catch (err) {
|
url: fetchRes.url,
|
||||||
// Ignore.
|
ok: fetchRes.ok,
|
||||||
}
|
headers: fetchRes.headers.raw(),
|
||||||
|
redirected: fetchRes.redirected,
|
||||||
madeDir = true;
|
bodyBuffer: buffer,
|
||||||
}
|
|
||||||
|
|
||||||
return new ResponseWrapper(cacheDirPath, args);
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = createFetch;
|
async function getResponse(cache, requestArguments) {
|
||||||
|
const cacheKey = getCacheKey(requestArguments);
|
||||||
|
const cachedValue = await cache.get(cacheKey);
|
||||||
|
|
||||||
|
const ejectSelfFromCache = () => cache.remove(cacheKey);
|
||||||
|
|
||||||
|
if (cachedValue) {
|
||||||
|
return new Response(cachedValue, ejectSelfFromCache, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
const fetchResponse = await fetch(...requestArguments);
|
||||||
|
const rawResponse = await createRawResponse(fetchResponse);
|
||||||
|
await cache.set(cacheKey, rawResponse);
|
||||||
|
return new Response(rawResponse, ejectSelfFromCache, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
function createFetchWithCache(cache) {
|
||||||
|
const fetchCache = (...args) => getResponse(cache, args);
|
||||||
|
fetchCache.withCache = createFetchWithCache;
|
||||||
|
|
||||||
|
return fetchCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaultFetch = createFetchWithCache(new MemoryCache());
|
||||||
|
|
||||||
|
module.exports = defaultFetch;
|
||||||
|
module.exports.fetchBuilder = defaultFetch;
|
||||||
|
module.exports.MemoryCache = MemoryCache;
|
||||||
|
module.exports.FileSystemCache = FileSystemCache;
|
||||||
|
2172
package-lock.json
generated
2172
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
18
package.json
18
package.json
@ -1,10 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "node-fetch-cache",
|
"name": "node-fetch-cache",
|
||||||
"version": "1.0.6",
|
"version": "2.0.0",
|
||||||
"description": "node-fetch with a persistent cache.",
|
"description": "node-fetch with a persistent cache.",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "echo \"Error: no test specified\" && exit 1",
|
"test": "mocha --timeout 10000 --exit",
|
||||||
|
"coverage": "nyc --reporter=lcov --reporter=text npm test",
|
||||||
"lint": "./node_modules/.bin/eslint .",
|
"lint": "./node_modules/.bin/eslint .",
|
||||||
"lintfix": "./node_modules/.bin/eslint . --fix"
|
"lintfix": "./node_modules/.bin/eslint . --fix"
|
||||||
},
|
},
|
||||||
@ -28,9 +29,20 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"eslint": "^6.8.0",
|
"eslint": "^6.8.0",
|
||||||
"eslint-config-airbnb-base": "^14.1.0",
|
"eslint-config-airbnb-base": "^14.1.0",
|
||||||
"eslint-plugin-import": "^2.20.2"
|
"eslint-plugin-import": "^2.20.2",
|
||||||
|
"form-data": "^3.0.0",
|
||||||
|
"husky": "^4.3.0",
|
||||||
|
"mocha": "^8.2.1",
|
||||||
|
"nyc": "^15.1.0",
|
||||||
|
"rimraf": "^3.0.2"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"fpersist": "^1.0.5",
|
||||||
"node-fetch": "*"
|
"node-fetch": "*"
|
||||||
|
},
|
||||||
|
"husky": {
|
||||||
|
"hooks": {
|
||||||
|
"pre-commit": "npm run lint && npm test"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
BIN
test/expected_png.png
Normal file
BIN
test/expected_png.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 7.9 KiB |
436
test/tests.js
Normal file
436
test/tests.js
Normal file
@ -0,0 +1,436 @@
|
|||||||
|
const fs = require('fs');
|
||||||
|
const FormData = require('form-data');
|
||||||
|
const assert = require('assert');
|
||||||
|
const rimraf = require('rimraf');
|
||||||
|
const path = require('path');
|
||||||
|
const { URLSearchParams } = require('url');
|
||||||
|
const standardFetch = require('node-fetch');
|
||||||
|
const FetchCache = require('../index.js');
|
||||||
|
|
||||||
|
const CACHE_PATH = path.join(__dirname, '..', '.cache');
|
||||||
|
const expectedPngBuffer = fs.readFileSync(path.join(__dirname, 'expected_png.png'));
|
||||||
|
|
||||||
|
const TWO_HUNDRED_URL = 'https://httpbin.org/status/200';
|
||||||
|
const FOUR_HUNDRED_URL = 'https://httpbin.org/status/400';
|
||||||
|
const THREE_HUNDRED_TWO_URL = 'https://httpbin.org/status/302';
|
||||||
|
const TEXT_BODY_URL = 'https://httpbin.org/robots.txt';
|
||||||
|
const JSON_BODY_URL = 'https://httpbin.org/json';
|
||||||
|
const PNG_BODY_URL = 'https://httpbin.org/image/png';
|
||||||
|
|
||||||
|
const TEXT_BODY_EXPECTED = 'User-agent: *\nDisallow: /deny\n';
|
||||||
|
|
||||||
|
let cachedFetch;
|
||||||
|
let body;
|
||||||
|
|
||||||
|
function post(body) {
|
||||||
|
return { method: 'POST', body };
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeDates(arrOrObj) {
|
||||||
|
if (arrOrObj.date) {
|
||||||
|
const copy = { ...arrOrObj };
|
||||||
|
delete copy.date;
|
||||||
|
return copy;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(arrOrObj)) {
|
||||||
|
if (Array.isArray(arrOrObj[0])) {
|
||||||
|
return arrOrObj.filter(e => e[0] !== 'date');
|
||||||
|
}
|
||||||
|
|
||||||
|
return arrOrObj.filter(e => !Date.parse(e));
|
||||||
|
}
|
||||||
|
|
||||||
|
return arrOrObj;
|
||||||
|
}
|
||||||
|
|
||||||
|
function wait(ms) {
|
||||||
|
return new Promise((fulfill) => setTimeout(fulfill, ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function dualFetch(...args) {
|
||||||
|
const [cachedFetchResponse, standardFetchResponse] = await Promise.all([
|
||||||
|
cachedFetch(...args),
|
||||||
|
standardFetch(...args),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return { cachedFetchResponse, standardFetchResponse };
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
rimraf.sync(CACHE_PATH);
|
||||||
|
cachedFetch = FetchCache.withCache(new FetchCache.MemoryCache());
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Basic property tests', function() {
|
||||||
|
it('Has a status property', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Has a statusText property', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.statusText, standardFetchResponse.statusText);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.statusText, standardFetchResponse.statusText);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Has a url property', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.url, standardFetchResponse.url);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.url, standardFetchResponse.url);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Has an ok property', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(FOUR_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.ok, standardFetchResponse.ok);
|
||||||
|
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(FOUR_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.ok, standardFetchResponse.ok);
|
||||||
|
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Has a redirected property', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(THREE_HUNDRED_TWO_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.redirected, standardFetchResponse.redirected);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(THREE_HUNDRED_TWO_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.redirected, standardFetchResponse.redirected);
|
||||||
|
});
|
||||||
|
}).timeout(10000);
|
||||||
|
|
||||||
|
describe('Header tests', function() {
|
||||||
|
it('Gets correct raw headers', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates(cachedFetchResponse.headers.raw()),
|
||||||
|
removeDates(standardFetchResponse.headers.raw()),
|
||||||
|
);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates(cachedFetchResponse.headers.raw()),
|
||||||
|
removeDates(standardFetchResponse.headers.raw()),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gets correct header keys', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.keys(), [...standardFetchResponse.headers.keys()]);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.keys(), [...standardFetchResponse.headers.keys()]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gets correct header values', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates(cachedFetchResponse.headers.values()),
|
||||||
|
removeDates([...standardFetchResponse.headers.values()]),
|
||||||
|
);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates(cachedFetchResponse.headers.values()),
|
||||||
|
removeDates([...standardFetchResponse.headers.values()]),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gets correct header entries', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates(cachedFetchResponse.headers.entries()),
|
||||||
|
removeDates([...standardFetchResponse.headers.entries()]),
|
||||||
|
);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates(cachedFetchResponse.headers.entries()),
|
||||||
|
removeDates([...standardFetchResponse.headers.entries()]),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get a header by value', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert(standardFetchResponse.headers.get('content-length'));
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.get('content-length'), standardFetchResponse.headers.get('content-length'));
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.get('content-length'), standardFetchResponse.headers.get('content-length'));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Returns undefined for non-existent header', async function() {
|
||||||
|
const headerName = 'zzzz';
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert(!standardFetchResponse.headers.get(headerName));
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.get(headerName), standardFetchResponse.headers.get(headerName));
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.get(headerName), standardFetchResponse.headers.get(headerName));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get whether a header is present', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert(standardFetchResponse.headers.has('content-length'));
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.has('content-length'), standardFetchResponse.headers.has('content-length'));
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.has('content-length'), standardFetchResponse.headers.has('content-length'));
|
||||||
|
});
|
||||||
|
}).timeout(10000);
|
||||||
|
|
||||||
|
describe('Cache tests', function() {
|
||||||
|
it('Uses cache', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can eject from cache', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
|
||||||
|
await res.ejectFromCache();
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Does not error if ejecting from cache twice', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
await res.ejectFromCache();
|
||||||
|
await res.ejectFromCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives different string bodies different cache keys', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post('b'));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives same string bodies same cache keys', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives different URLSearchParams different cache keys', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=b')));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives same URLSearchParams same cache keys', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives different read streams different cache keys', async function() {
|
||||||
|
const s1 = fs.createReadStream(__filename);
|
||||||
|
const s2 = fs.createReadStream(path.join(__dirname, '..', 'index.js'));
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(s2));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives the same read streams the same cache key', async function() {
|
||||||
|
const s1 = fs.createReadStream(__filename);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives different form data different cache keys', async function() {
|
||||||
|
const data1 = new FormData();
|
||||||
|
data1.append('a', 'a');
|
||||||
|
|
||||||
|
const data2 = new FormData();
|
||||||
|
data2.append('b', 'b');
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(data1));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(data2));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives same form data same cache keys', async function() {
|
||||||
|
const data1 = new FormData();
|
||||||
|
data1.append('a', 'a');
|
||||||
|
|
||||||
|
const data2 = new FormData();
|
||||||
|
data2.append('a', 'a');
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(data1));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(data2));
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
}).timeout(10000);
|
||||||
|
|
||||||
|
describe('Data tests', function() {
|
||||||
|
it('Does not support Request objects', async function() {
|
||||||
|
try {
|
||||||
|
const request = new standardFetch.Request('https://google.com');
|
||||||
|
await cachedFetch(request);
|
||||||
|
throw new Error('The above line should have thrown.');
|
||||||
|
} catch (err) {
|
||||||
|
assert(err.message.includes('The first argument must be a string (fetch.Request is not supported).'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Refuses to consume body twice', async function() {
|
||||||
|
res = await cachedFetch(TEXT_BODY_URL);
|
||||||
|
await res.text();
|
||||||
|
|
||||||
|
try {
|
||||||
|
await res.text();
|
||||||
|
throw new Error('The above line should have thrown.');
|
||||||
|
} catch (err) {
|
||||||
|
assert(err.message.includes('Error: body used already'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get text body', async function() {
|
||||||
|
res = await cachedFetch(TEXT_BODY_URL);
|
||||||
|
body = await res.text();
|
||||||
|
assert.strictEqual(body, TEXT_BODY_EXPECTED);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TEXT_BODY_URL);
|
||||||
|
body = await res.text();
|
||||||
|
assert.strictEqual(body, TEXT_BODY_EXPECTED);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get JSON body', async function() {
|
||||||
|
res = await cachedFetch(JSON_BODY_URL);
|
||||||
|
body = await res.json();
|
||||||
|
assert(body.slideshow);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(JSON_BODY_URL);
|
||||||
|
body = await res.json();
|
||||||
|
assert(body.slideshow);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get PNG buffer body', async function() {
|
||||||
|
res = await cachedFetch(PNG_BODY_URL);
|
||||||
|
body = await res.buffer();
|
||||||
|
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(PNG_BODY_URL);
|
||||||
|
body = await res.buffer();
|
||||||
|
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can stream a body', async function() {
|
||||||
|
res = await cachedFetch(TEXT_BODY_URL);
|
||||||
|
body = '';
|
||||||
|
|
||||||
|
for await (const chunk of res.body) {
|
||||||
|
body += chunk.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.strictEqual(TEXT_BODY_EXPECTED, body);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TEXT_BODY_URL);
|
||||||
|
body = '';
|
||||||
|
|
||||||
|
for await (const chunk of res.body) {
|
||||||
|
body += chunk.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.strictEqual(TEXT_BODY_EXPECTED, body);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Errors if the body type is not supported', async function() {
|
||||||
|
try {
|
||||||
|
await cachedFetch(TEXT_BODY_URL, { body: {} });
|
||||||
|
throw new Error('It was supposed to throw');
|
||||||
|
} catch (err) {
|
||||||
|
assert(err.message.includes('Unsupported body type'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}).timeout(10000);
|
||||||
|
|
||||||
|
describe('Memory cache tests', function() {
|
||||||
|
it('Supports TTL', async function() {
|
||||||
|
cachedFetch = FetchCache.withCache(new FetchCache.MemoryCache({ ttl: 100 }));
|
||||||
|
let res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
|
||||||
|
await wait(200);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
}).timeout(10000);
|
||||||
|
|
||||||
|
describe('File system cache tests', function() {
|
||||||
|
it('Supports TTL', async function() {
|
||||||
|
cachedFetch = FetchCache.withCache(new FetchCache.FileSystemCache({ ttl: 100 }));
|
||||||
|
let res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
|
||||||
|
await wait(200);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get PNG buffer body', async function() {
|
||||||
|
cachedFetch = FetchCache.withCache(new FetchCache.FileSystemCache());
|
||||||
|
res = await cachedFetch(PNG_BODY_URL);
|
||||||
|
body = await res.buffer();
|
||||||
|
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(PNG_BODY_URL);
|
||||||
|
body = await res.buffer();
|
||||||
|
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
});
|
Reference in New Issue
Block a user