Compare commits
62 Commits
2.0
...
renovate/c
Author | SHA1 | Date | |
---|---|---|---|
0e0925d4a3 | |||
360dc102bb | |||
0c643e9b87 | |||
a9f7d70cd0 | |||
07777a74f2 | |||
d5382860f3 | |||
73601127b2 | |||
eb86e3e4c5 | |||
![]() |
51e15ad046 | ||
![]() |
571b65cdd2 | ||
![]() |
b0348460a7 | ||
![]() |
44a0ffb8f3 | ||
![]() |
41d52f39c8 | ||
![]() |
3b92b39430 | ||
![]() |
b234de4b69 | ||
![]() |
44786dfb68 | ||
![]() |
5f48f0fc8a | ||
![]() |
4f93c9ba1c | ||
![]() |
faf73e49d8 | ||
![]() |
0173de3f6d | ||
![]() |
decb904331 | ||
![]() |
7f502a98a2 | ||
![]() |
18b276c5dd | ||
![]() |
cd6f4f01e3 | ||
![]() |
fee0cdd085 | ||
![]() |
2bc4cd6de0 | ||
![]() |
51e5754c3e | ||
![]() |
29de8ee068 | ||
![]() |
f1128f2f64 | ||
![]() |
0397d759dd | ||
![]() |
a68dd5b0d0 | ||
![]() |
ba8ff5d257 | ||
![]() |
9c40ddf786 | ||
![]() |
9cea80be47 | ||
![]() |
a386a54fbb | ||
![]() |
b7190b933a | ||
![]() |
55b92ee40f | ||
![]() |
6607a71af6 | ||
![]() |
0f5f1edb22 | ||
![]() |
d6f73c4778 | ||
![]() |
0c8c0d4520 | ||
![]() |
2f14847a01 | ||
![]() |
e3ecf3b981 | ||
![]() |
05a146fef1 | ||
![]() |
dba1e2fbd3 | ||
![]() |
ad572f3f29 | ||
![]() |
6023b56164 | ||
![]() |
675ed9bbf2 | ||
![]() |
998d18ae84 | ||
![]() |
d5ab4d4b18 | ||
![]() |
847b330489 | ||
![]() |
dae57d2604 | ||
![]() |
6cd42272c4 | ||
![]() |
e8ad8da0bb | ||
![]() |
45ca35f057 | ||
![]() |
af1e977620 | ||
![]() |
ef5939f571 | ||
![]() |
e356e485b6 | ||
![]() |
a11a135234 | ||
![]() |
524308aa61 | ||
![]() |
bffaa2aadc | ||
![]() |
b8da556091 |
1
.eslintignore
Normal file
1
.eslintignore
Normal file
@ -0,0 +1 @@
|
|||||||
|
test
|
@ -1,4 +1,4 @@
|
|||||||
module.exports = {
|
{
|
||||||
"env": {
|
"env": {
|
||||||
"commonjs": true,
|
"commonjs": true,
|
||||||
"es6": true,
|
"es6": true,
|
||||||
@ -15,5 +15,7 @@ module.exports = {
|
|||||||
"ecmaVersion": 2018
|
"ecmaVersion": 2018
|
||||||
},
|
},
|
||||||
"rules": {
|
"rules": {
|
||||||
|
"import/extensions": "off",
|
||||||
|
"import/prefer-default-export": "off"
|
||||||
}
|
}
|
||||||
};
|
}
|
23
.gitea/workflows/ci.yml
Normal file
23
.gitea/workflows/ci.yml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
name: CI Pipeline
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
node-version: [20.x]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Use Node.js
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: ${{ matrix.node-version }}
|
||||||
|
registry-url: "https://registry.npmjs.org"
|
||||||
|
- run: npm ci
|
||||||
|
- run: npm publish
|
||||||
|
env:
|
||||||
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -102,3 +102,8 @@ dist
|
|||||||
|
|
||||||
# TernJS port file
|
# TernJS port file
|
||||||
.tern-port
|
.tern-port
|
||||||
|
|
||||||
|
# Other
|
||||||
|
.cache
|
||||||
|
|
||||||
|
commonjs
|
||||||
|
10
.npmignore
10
.npmignore
@ -1,2 +1,8 @@
|
|||||||
.eslintrc.js
|
.eslintrc.json
|
||||||
.vscode
|
test
|
||||||
|
.cache
|
||||||
|
.nyc_output
|
||||||
|
.github
|
||||||
|
.eslintignore
|
||||||
|
.vscode
|
||||||
|
coverage
|
||||||
|
22
.vscode/launch.json
vendored
Normal file
22
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
// Use IntelliSense to learn about possible attributes.
|
||||||
|
// Hover to view descriptions of existing attributes.
|
||||||
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"args": [
|
||||||
|
"--colors",
|
||||||
|
"${workspaceFolder}/test"
|
||||||
|
],
|
||||||
|
"internalConsoleOptions": "openOnSessionStart",
|
||||||
|
"name": "Mocha Tests",
|
||||||
|
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
|
||||||
|
"request": "launch",
|
||||||
|
"skipFiles": [
|
||||||
|
"<node_internals>/**"
|
||||||
|
],
|
||||||
|
"type": "pwa-node"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
167
README.md
167
README.md
@ -1,45 +1,172 @@
|
|||||||
# node-fetch-cache
|
# node-fetch-cache
|
||||||
|
|
||||||
node-fetch with caching to a directory on disk.
|
Forked from: <https://github.com/mistval/node-fetch-cache>
|
||||||
|
|
||||||
The first usage with any given arguments will result in an HTTP request and any subsequent usage with the same arguments and body function (text, json, buffer, or textConverted) will read the response body from the cache on disk.
|
node-fetch with caching of responses.
|
||||||
|
|
||||||
|
The first fetch with any given arguments will result in an HTTP request and any subsequent fetch with the same arguments will read the response from the cache.
|
||||||
|
|
||||||
|
By default responses are cached in memory, but you can also cache to files on disk, or implement your own cache. See the **Cache Customization** section for more info.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
Require it with a directory path to cache in, and then use it the same way you would use fetch.
|
Require it and use it the same way you would use node-fetch:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const fetch = require('node-fetch-cache')('./path/to/cache/dir');
|
import fetch from "node-fetch-cache";
|
||||||
|
|
||||||
fetch('http://google.com')
|
fetch("http://google.com")
|
||||||
.then(response => response.text())
|
.then((response) => response.text())
|
||||||
.then(text => console.log(text));
|
.then((text) => console.log(text));
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The next time you `fetch('http://google.com')`, the response will be returned from the cache. No HTTP request will be made.
|
||||||
|
|
||||||
## API
|
## API
|
||||||
|
|
||||||
Note that this does not support the full fetch API. Headers and some other things are not accessible.
|
This module's fetch function has almost the exact same API as node-fetch, and you should consult [the node-fetch documentation](https://www.npmjs.com/package/node-fetch) for how to use it.
|
||||||
|
|
||||||
### async fetch(resource [, init])
|
This module just adds one extra function to the response object:
|
||||||
|
|
||||||
Same arguments as [browser fetch](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch).
|
### res.ejectFromCache(): Promise\<void\>
|
||||||
|
|
||||||
Returns a **CachedResponse**.
|
This function can be used to eject the response from the cache, so that the next request will perform a true HTTP request rather than returning a cached response.
|
||||||
|
|
||||||
### async CachedResponse.text()
|
This module caches ALL responses, even those with 4xx and 5xx response statuses. You can use this function to uncache such responses if desired. For example:
|
||||||
|
|
||||||
Returns the body as a string.
|
```js
|
||||||
|
import fetch from "node-fetch-cache";
|
||||||
|
|
||||||
### async CachedResponse.json()
|
fetch("http://google.com")
|
||||||
|
.then(async (response) => {
|
||||||
|
if (!response.ok) {
|
||||||
|
await response.ejectFromCache();
|
||||||
|
throw new Error("Non-okay response from google.com");
|
||||||
|
} else {
|
||||||
|
return response.text();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then((text) => console.log(text));
|
||||||
|
```
|
||||||
|
|
||||||
Returns the body as a JavaScript object, parsed from JSON.
|
## Cache Customization
|
||||||
|
|
||||||
### async CachedResponse.buffer()
|
By default responses are cached in memory, but you can also cache to files on disk, or implement your own cache.
|
||||||
|
|
||||||
Returns the body as a Buffer.
|
### MemoryCache
|
||||||
|
|
||||||
### async CachedResponse.textConverted()
|
This is the default cache delegate. It caches responses in-process in a POJO.
|
||||||
|
|
||||||
Identical to CachedResponse.text(), except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8, if possible.
|
Usage:
|
||||||
|
|
||||||
(textConverted requires an optional dependency on [npm package encoding](https://www.npmjs.com/package/encoding), which you need to install manually.
|
```js
|
||||||
|
import { fetchBuilder, MemoryCache } from "node-fetch-cache";
|
||||||
|
const fetch = fetchBuilder.withCache(new MemoryCache(options));
|
||||||
|
```
|
||||||
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
```js
|
||||||
|
{
|
||||||
|
ttl: 1000, // Time to live. How long (in ms) responses remain cached before being automatically ejected. If undefined, responses are never automatically ejected from the cache.
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that by default (if you don't use `withCache()`) a **shared** MemoryCache will be used (you can import this module in multiple files and they will all share the same cache). If you instantiate and provide a `new MemoryCache()` as shown above however, the cache is _NOT_ shared unless you explicitly pass it around and pass it into `withCache()` in each of your source files.
|
||||||
|
|
||||||
|
### FileSystemCache
|
||||||
|
|
||||||
|
Cache to a directory on disk. This allows the cache to survive the process exiting.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { fetchBuilder, FileSystemCache } from "node-fetch-cache";
|
||||||
|
const fetch = fetchBuilder.withCache(new FileSystemCache(options));
|
||||||
|
```
|
||||||
|
|
||||||
|
Options:
|
||||||
|
|
||||||
|
```js
|
||||||
|
{
|
||||||
|
cacheDirectory: '/my/cache/directory/path', // Specify where to keep the cache. If undefined, '.cache' is used by default. If this directory does not exist, it will be created.
|
||||||
|
ttl: 1000, // Time to live. How long (in ms) responses remain cached before being automatically ejected. If undefined, responses are never automatically ejected from the cache.
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Provide Your Own
|
||||||
|
|
||||||
|
You can implement a caching delegate yourself. The cache simply needs to be an object that has `set(key, bodyStream, bodyMeta)`, `get(key)`, and `remove(key)` functions.
|
||||||
|
|
||||||
|
Check the built-in [MemoryCache](https://github.com/mistval/node-fetch-cache/blob/master/src/classes/caching/memory_cache.js) and [FileSystemCache](https://github.com/mistval/node-fetch-cache/blob/master/src/classes/caching/file_system_cache.js) for examples.
|
||||||
|
|
||||||
|
The set function must accept a key (which will be a string), a body stream, and a metadata object (which will be a JSON-serializable JS object). It must store these, and then return an object with a `bodyStream` property, containing a fresh, unread stream of the body content, as well as a `metaData` property, containing the same metaData that was passed in.
|
||||||
|
|
||||||
|
The get function should accept a key and return undefined if no cached value is found, or else an object with a `bodyStream` property, containing a stream of the body content, as well as a `metaData` property, containing the metadata that was stored via the `set(key, bodyStream, bodyMeta)` function.
|
||||||
|
|
||||||
|
The remove function should accept a key and remove the cached value associated with that key, if any. It is also safe for your caching delegate to remove values from the cache arbitrarily if desired (for example if you want to implement a TTL in the caching delegate).
|
||||||
|
|
||||||
|
All three functions may be async.
|
||||||
|
|
||||||
|
## Misc Tips
|
||||||
|
|
||||||
|
### Streaming
|
||||||
|
|
||||||
|
This module does not support Stream request bodies, except for fs.ReadStream. And when using fs.ReadStream, the cache key is generated based only on the path of the stream, not its content. That means if you stream `/my/desktop/image.png` twice, you will get a cached response the second time, **even if the content of image.png has changed**.
|
||||||
|
|
||||||
|
Streams don't quite play nice with the concept of caching based on request characteristics, because we would have to read the stream to the end to find out what's in it and hash it into a proper cache key.
|
||||||
|
|
||||||
|
### Request Concurrency
|
||||||
|
|
||||||
|
Requests with the same cache key are queued. For example, you might wonder if making the same request 100 times simultaneously would result in 100 HTTP requests:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import fetch from "node-fetch-cache";
|
||||||
|
|
||||||
|
await Promise.all(
|
||||||
|
Array(100)
|
||||||
|
.fill()
|
||||||
|
.map(() => fetch("https://google.com"))
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
The answer is no. Only one request would be made, and 99 of the `fetch()`s will read the response from the cache.
|
||||||
|
|
||||||
|
### Cache-Control: only-if-cached Requests
|
||||||
|
|
||||||
|
The HTTP standard describes a [Cache-Control request header](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control#request_directives) to control certain aspects of cache behavior. Node-fetch ignores these, but node-fetch-cache respects the `Cache-Control: only-if-cached` directive. When `only-if-cached` is specified, node-fetch-cache will return `undefined` if there is no cached response. No HTTP request will be made. For example:
|
||||||
|
|
||||||
|
```js
|
||||||
|
import fetch from "node-fetch-cache";
|
||||||
|
|
||||||
|
const response = await fetch("https://google.com", {
|
||||||
|
headers: { "Cache-Control": "only-if-cached" },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response === undefined) {
|
||||||
|
// No response was found in the cache
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that this is slightly different from browser fetch, which returns a `504 Gateway Timeout` response if no cached response is available.
|
||||||
|
|
||||||
|
### Calculating the Cache Key
|
||||||
|
|
||||||
|
This module exports a `getCacheKey()` function to calculate a cache key string from request parameters, which may be useful for enabling some advanced use cases (especially if you want to call cache functions directly). Call `getCacheKey()` exactly like you would call `fetch()`.
|
||||||
|
|
||||||
|
```js
|
||||||
|
import { fetchBuilder, MemoryCache, getCacheKey } from "node-fetch-cache";
|
||||||
|
|
||||||
|
const cache = new MemoryCache();
|
||||||
|
const fetch = fetchBuilder.withCache(cache);
|
||||||
|
|
||||||
|
const rawCacheData = await cache.get(getCacheKey("https://google.com"));
|
||||||
|
```
|
||||||
|
|
||||||
|
## Bugs / Help / Feature Requests / Contributing
|
||||||
|
|
||||||
|
For feature requests or help, please visit [the discussions page on GitHub](https://github.com/mistval/node-fetch-cache/discussions).
|
||||||
|
|
||||||
|
For bug reports, please file an issue on [the issues page on GitHub](https://github.com/mistval/node-fetch-cache/issues).
|
||||||
|
|
||||||
|
Contributions welcome! Please open a [pull request on GitHub](https://github.com/mistval/node-fetch-cache/pulls) with your changes. You can run them by me first on [the discussions page](https://github.com/mistval/node-fetch-cache/discussions) if you'd like.
|
||||||
|
4
commonjs/wrapper.cjs
Normal file
4
commonjs/wrapper.cjs
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
const mod = require('./index.cjs');
|
||||||
|
|
||||||
|
module.exports = mod.default;
|
||||||
|
Object.assign(module.exports, mod);
|
75
index.js
75
index.js
@ -1,75 +0,0 @@
|
|||||||
const fetch = require('node-fetch');
|
|
||||||
const fs = require('fs');
|
|
||||||
const crypto = require('crypto');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
function md5(str) {
|
|
||||||
return crypto.createHash('md5').update(str).digest('hex');
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getResponse(cacheDirPath, requestArguments, bodyFunctionName) {
|
|
||||||
const [url, requestInit, ...rest] = requestArguments;
|
|
||||||
const requestParams = requestInit && requestInit.body
|
|
||||||
? ({ ...requestInit, body: typeof requestInit.body === 'object' ? requestInit.body.toString() : requestInit.body })
|
|
||||||
: requestInit;
|
|
||||||
|
|
||||||
const cacheHash = md5(JSON.stringify([url, requestParams, ...rest]) + bodyFunctionName);
|
|
||||||
const cachedFilePath = path.join(cacheDirPath, `${cacheHash}.json`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const body = JSON.parse(await fs.promises.readFile(cachedFilePath));
|
|
||||||
if (bodyFunctionName === 'buffer') {
|
|
||||||
return Buffer.from(body);
|
|
||||||
}
|
|
||||||
|
|
||||||
return body;
|
|
||||||
} catch (err) {
|
|
||||||
const fetchResponse = await fetch(...requestArguments);
|
|
||||||
const bodyResponse = await fetchResponse[bodyFunctionName]();
|
|
||||||
await fs.promises.writeFile(cachedFilePath, JSON.stringify(bodyResponse));
|
|
||||||
return bodyResponse;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class ResponseWrapper {
|
|
||||||
constructor(cacheDirPath, requestArguments) {
|
|
||||||
this.cacheDirPath = cacheDirPath;
|
|
||||||
this.requestArguments = requestArguments;
|
|
||||||
}
|
|
||||||
|
|
||||||
text() {
|
|
||||||
return getResponse(this.cacheDirPath, this.requestArguments, this.text.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
json() {
|
|
||||||
return getResponse(this.cacheDirPath, this.requestArguments, this.json.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer() {
|
|
||||||
return getResponse(this.cacheDirPath, this.requestArguments, this.buffer.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
textConverted() {
|
|
||||||
return getResponse(this.cacheDirPath, this.requestArguments, this.textConverted.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function createFetch(cacheDirPath) {
|
|
||||||
let madeDir = false;
|
|
||||||
|
|
||||||
return async (...args) => {
|
|
||||||
if (!madeDir) {
|
|
||||||
try {
|
|
||||||
await fs.promises.mkdir(cacheDirPath, { recursive: true });
|
|
||||||
} catch (err) {
|
|
||||||
// Ignore.
|
|
||||||
}
|
|
||||||
|
|
||||||
madeDir = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return new ResponseWrapper(cacheDirPath, args);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = createFetch;
|
|
7732
package-lock.json
generated
7732
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
53
package.json
53
package.json
@ -1,16 +1,28 @@
|
|||||||
{
|
{
|
||||||
"name": "node-fetch-cache",
|
"name": "@fascinated/node-fetch-cache",
|
||||||
"version": "1.0.6",
|
"version": "3.1.3",
|
||||||
"description": "node-fetch with a persistent cache.",
|
"description": "node-fetch with caching.",
|
||||||
"main": "index.js",
|
"main": "src/index.js",
|
||||||
|
"type": "module",
|
||||||
|
"publishConfig": {
|
||||||
|
"access": "public",
|
||||||
|
"registry": "https://registry.npmjs.org/"
|
||||||
|
},
|
||||||
|
"exports": {
|
||||||
|
"import": "./src/index.js",
|
||||||
|
"require": "./commonjs/wrapper.cjs"
|
||||||
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "echo \"Error: no test specified\" && exit 1",
|
"buildcjs": "rollup src/index.js --file commonjs/index.cjs --format cjs",
|
||||||
"lint": "./node_modules/.bin/eslint .",
|
"test": "npm run lintfix && npm run buildcjs && mocha --timeout 10000 --exit",
|
||||||
"lintfix": "./node_modules/.bin/eslint . --fix"
|
"coverage": "nyc --reporter=lcov --reporter=text npm test",
|
||||||
|
"lint": "eslint .",
|
||||||
|
"lintfix": "eslint . --fix",
|
||||||
|
"prepublishOnly": "npm test"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/mistval/node-fetch-cache.git"
|
"url": "git+https://git.fascinated.cc/Fascinated/node-fetch-cache.git"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"node",
|
"node",
|
||||||
@ -22,15 +34,28 @@
|
|||||||
"author": "mistval",
|
"author": "mistval",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/mistval/node-fetch-cache/issues"
|
"url": "https://git.fascinated.cc/Fascinated/node-fetch-cache/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://github.com/mistval/node-fetch-cache#readme",
|
"homepage": "https://git.fascinated.cc/Fascinated/node-fetch-cache#readme",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"eslint": "^6.8.0",
|
"eslint": "^8.9.0",
|
||||||
"eslint-config-airbnb-base": "^14.1.0",
|
"eslint-config-airbnb-base": "^15.0.0",
|
||||||
"eslint-plugin-import": "^2.20.2"
|
"eslint-plugin-import": "^2.25.4",
|
||||||
|
"form-data": "^3.0.0",
|
||||||
|
"husky": "^8.0.3",
|
||||||
|
"mocha": "^9.2.1",
|
||||||
|
"nyc": "^15.1.0",
|
||||||
|
"rimraf": "^3.0.2",
|
||||||
|
"rollup": "^2.53.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"node-fetch": "*"
|
"cacache": "^15.2.0",
|
||||||
|
"locko": "^1.0.0",
|
||||||
|
"node-fetch": "2.6.11"
|
||||||
|
},
|
||||||
|
"husky": {
|
||||||
|
"hooks": {
|
||||||
|
"pre-commit": "npm run lint && npm test"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
6
renovate.json
Normal file
6
renovate.json
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
|
"extends": [
|
||||||
|
"local>Fascinated/renovate-config"
|
||||||
|
]
|
||||||
|
}
|
84
src/classes/caching/file_system_cache.js
Normal file
84
src/classes/caching/file_system_cache.js
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
import cacache from 'cacache';
|
||||||
|
import { Readable } from 'stream';
|
||||||
|
|
||||||
|
function getBodyAndMetaKeys(key) {
|
||||||
|
return [`${key}body`, `${key}meta`];
|
||||||
|
}
|
||||||
|
|
||||||
|
export class FileSystemCache {
|
||||||
|
constructor(options = {}) {
|
||||||
|
this.ttl = options.ttl;
|
||||||
|
this.cacheDirectory = options.cacheDirectory || '.cache';
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(key) {
|
||||||
|
const [, metaKey] = getBodyAndMetaKeys(key);
|
||||||
|
|
||||||
|
const metaInfo = await cacache.get.info(this.cacheDirectory, metaKey);
|
||||||
|
|
||||||
|
if (!metaInfo) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const metaBuffer = await cacache.get.byDigest(this.cacheDirectory, metaInfo.integrity);
|
||||||
|
const metaData = JSON.parse(metaBuffer);
|
||||||
|
const { bodyStreamIntegrity, empty, expiration } = metaData;
|
||||||
|
|
||||||
|
delete metaData.bodyStreamIntegrity;
|
||||||
|
delete metaData.empty;
|
||||||
|
delete metaData.expiration;
|
||||||
|
|
||||||
|
if (expiration && expiration < Date.now()) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const bodyStream = empty
|
||||||
|
? Readable.from(Buffer.alloc(0))
|
||||||
|
: cacache.get.stream.byDigest(this.cacheDirectory, bodyStreamIntegrity);
|
||||||
|
|
||||||
|
return {
|
||||||
|
bodyStream,
|
||||||
|
metaData,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
remove(key) {
|
||||||
|
const [bodyKey, metaKey] = getBodyAndMetaKeys(key);
|
||||||
|
|
||||||
|
return Promise.all([
|
||||||
|
cacache.rm.entry(this.cacheDirectory, bodyKey),
|
||||||
|
cacache.rm.entry(this.cacheDirectory, metaKey),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
async set(key, bodyStream, metaData) {
|
||||||
|
const [bodyKey, metaKey] = getBodyAndMetaKeys(key);
|
||||||
|
const metaCopy = { ...metaData };
|
||||||
|
|
||||||
|
if (typeof this.ttl === 'number') {
|
||||||
|
metaCopy.expiration = Date.now() + this.ttl;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
metaCopy.bodyStreamIntegrity = await new Promise((fulfill, reject) => {
|
||||||
|
bodyStream.pipe(cacache.put.stream(this.cacheDirectory, bodyKey))
|
||||||
|
.on('integrity', (i) => fulfill(i))
|
||||||
|
.on('error', (e) => {
|
||||||
|
reject(e);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
if (err.code !== 'ENODATA') {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
metaCopy.empty = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const metaBuffer = Buffer.from(JSON.stringify(metaCopy));
|
||||||
|
await cacache.put(this.cacheDirectory, metaKey, metaBuffer);
|
||||||
|
const cachedData = await this.get(key);
|
||||||
|
|
||||||
|
return cachedData;
|
||||||
|
}
|
||||||
|
}
|
16
src/classes/caching/key_timeout.js
Normal file
16
src/classes/caching/key_timeout.js
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
export class KeyTimeout {
|
||||||
|
constructor() {
|
||||||
|
this.timeoutHandleForKey = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
clearTimeout(key) {
|
||||||
|
clearTimeout(this.timeoutHandleForKey[key]);
|
||||||
|
}
|
||||||
|
|
||||||
|
updateTimeout(key, durationMs, callback) {
|
||||||
|
this.clearTimeout(key);
|
||||||
|
this.timeoutHandleForKey[key] = setTimeout(() => {
|
||||||
|
callback();
|
||||||
|
}, durationMs);
|
||||||
|
}
|
||||||
|
}
|
47
src/classes/caching/memory_cache.js
Normal file
47
src/classes/caching/memory_cache.js
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
import { Readable } from 'stream';
|
||||||
|
import { KeyTimeout } from './key_timeout.js';
|
||||||
|
|
||||||
|
function streamToBuffer(stream) {
|
||||||
|
const chunks = [];
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
|
||||||
|
stream.on('error', (err) => reject(err));
|
||||||
|
stream.on('end', () => resolve(Buffer.concat(chunks)));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MemoryCache {
|
||||||
|
constructor(options = {}) {
|
||||||
|
this.ttl = options.ttl;
|
||||||
|
this.keyTimeout = new KeyTimeout();
|
||||||
|
this.cache = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
get(key) {
|
||||||
|
const cachedValue = this.cache[key];
|
||||||
|
if (cachedValue) {
|
||||||
|
return {
|
||||||
|
bodyStream: Readable.from(cachedValue.bodyBuffer),
|
||||||
|
metaData: cachedValue.metaData,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
remove(key) {
|
||||||
|
this.keyTimeout.clearTimeout(key);
|
||||||
|
delete this.cache[key];
|
||||||
|
}
|
||||||
|
|
||||||
|
async set(key, bodyStream, metaData) {
|
||||||
|
const bodyBuffer = await streamToBuffer(bodyStream);
|
||||||
|
this.cache[key] = { bodyBuffer, metaData };
|
||||||
|
|
||||||
|
if (typeof this.ttl === 'number') {
|
||||||
|
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.get(key);
|
||||||
|
}
|
||||||
|
}
|
29
src/classes/response.js
Normal file
29
src/classes/response.js
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import { Response } from 'node-fetch';
|
||||||
|
|
||||||
|
const responseInternalSymbol = Object.getOwnPropertySymbols(new Response())[1];
|
||||||
|
|
||||||
|
export class NFCResponse extends Response {
|
||||||
|
constructor(bodyStream, metaData, ejectFromCache, fromCache) {
|
||||||
|
super(bodyStream, metaData);
|
||||||
|
this.ejectFromCache = ejectFromCache;
|
||||||
|
this.fromCache = fromCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
static serializeMetaFromNodeFetchResponse(res) {
|
||||||
|
const metaData = {
|
||||||
|
url: res.url,
|
||||||
|
status: res.status,
|
||||||
|
statusText: res.statusText,
|
||||||
|
headers: res.headers.raw(),
|
||||||
|
size: res.size,
|
||||||
|
timeout: res.timeout,
|
||||||
|
counter: res[responseInternalSymbol].counter,
|
||||||
|
};
|
||||||
|
|
||||||
|
return metaData;
|
||||||
|
}
|
||||||
|
|
||||||
|
ejectFromCache() {
|
||||||
|
return this.ejectSelfFromCache();
|
||||||
|
}
|
||||||
|
}
|
180
src/index.js
Normal file
180
src/index.js
Normal file
@ -0,0 +1,180 @@
|
|||||||
|
import fetch, { Request } from 'node-fetch';
|
||||||
|
import fs from 'fs';
|
||||||
|
import crypto from 'crypto';
|
||||||
|
import locko from 'locko';
|
||||||
|
import { NFCResponse } from './classes/response.js';
|
||||||
|
import { MemoryCache } from './classes/caching/memory_cache.js';
|
||||||
|
|
||||||
|
const CACHE_VERSION = 4;
|
||||||
|
|
||||||
|
function md5(str) {
|
||||||
|
return crypto.createHash('md5').update(str).digest('hex');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Since the bounday in FormData is random,
|
||||||
|
// we ignore it for purposes of calculating
|
||||||
|
// the cache key.
|
||||||
|
function getFormDataCacheKey(formData) {
|
||||||
|
const cacheKey = { ...formData };
|
||||||
|
const boundary = formData.getBoundary();
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-underscore-dangle
|
||||||
|
delete cacheKey._boundary;
|
||||||
|
|
||||||
|
const boundaryReplaceRegex = new RegExp(boundary, 'g');
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-underscore-dangle
|
||||||
|
cacheKey._streams = cacheKey._streams.map((s) => {
|
||||||
|
if (typeof s === 'string') {
|
||||||
|
return s.replace(boundaryReplaceRegex, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
return s;
|
||||||
|
});
|
||||||
|
|
||||||
|
return cacheKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getHeadersCacheKeyJson(headersObj) {
|
||||||
|
return Object.fromEntries(
|
||||||
|
Object.entries(headersObj)
|
||||||
|
.map(([key, value]) => [key.toLowerCase(), value])
|
||||||
|
.filter(([key, value]) => key !== 'cache-control' || value !== 'only-if-cached'),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getBodyCacheKeyJson(body) {
|
||||||
|
if (!body) {
|
||||||
|
return body;
|
||||||
|
} if (typeof body === 'string') {
|
||||||
|
return body;
|
||||||
|
} if (body instanceof URLSearchParams) {
|
||||||
|
return body.toString();
|
||||||
|
} if (body instanceof fs.ReadStream) {
|
||||||
|
return body.path;
|
||||||
|
} if (body.toString && body.toString() === '[object FormData]') {
|
||||||
|
return getFormDataCacheKey(body);
|
||||||
|
} if (body instanceof Buffer) {
|
||||||
|
return body.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Unsupported body type. Supported body types are: string, number, undefined, null, url.URLSearchParams, fs.ReadStream, FormData');
|
||||||
|
}
|
||||||
|
|
||||||
|
function getRequestCacheKey(req) {
|
||||||
|
const headersPojo = Object.fromEntries([...req.headers.entries()]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
cache: req.cache,
|
||||||
|
credentials: req.credentials,
|
||||||
|
destination: req.destination,
|
||||||
|
headers: getHeadersCacheKeyJson(headersPojo),
|
||||||
|
integrity: req.integrity,
|
||||||
|
method: req.method,
|
||||||
|
redirect: req.redirect,
|
||||||
|
referrer: req.referrer,
|
||||||
|
referrerPolicy: req.referrerPolicy,
|
||||||
|
url: req.url,
|
||||||
|
body: getBodyCacheKeyJson(req.body),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getCacheKey(resource, init = {}) {
|
||||||
|
const resourceCacheKeyJson = resource instanceof Request
|
||||||
|
? getRequestCacheKey(resource)
|
||||||
|
: { url: resource };
|
||||||
|
|
||||||
|
const initCacheKeyJson = {
|
||||||
|
...init,
|
||||||
|
headers: getHeadersCacheKeyJson(init.headers || {}),
|
||||||
|
};
|
||||||
|
|
||||||
|
resourceCacheKeyJson.body = getBodyCacheKeyJson(resourceCacheKeyJson.body);
|
||||||
|
initCacheKeyJson.body = getBodyCacheKeyJson(initCacheKeyJson.body);
|
||||||
|
|
||||||
|
delete initCacheKeyJson.agent;
|
||||||
|
|
||||||
|
return md5(JSON.stringify([resourceCacheKeyJson, initCacheKeyJson, CACHE_VERSION]));
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasOnlyWithCacheOption(resource, init) {
|
||||||
|
if (
|
||||||
|
init
|
||||||
|
&& init.headers
|
||||||
|
&& Object.entries(init.headers)
|
||||||
|
.some(([key, value]) => key.toLowerCase() === 'cache-control' && value === 'only-if-cached')
|
||||||
|
) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resource instanceof Request && resource.headers.get('Cache-Control') === 'only-if-cached') {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getResponse(cache, requestArguments) {
|
||||||
|
const cacheKey = getCacheKey(...requestArguments);
|
||||||
|
let cachedValue = await cache.get(cacheKey);
|
||||||
|
|
||||||
|
const ejectSelfFromCache = () => cache.remove(cacheKey);
|
||||||
|
|
||||||
|
if (cachedValue) {
|
||||||
|
return new NFCResponse(
|
||||||
|
cachedValue.bodyStream,
|
||||||
|
cachedValue.metaData,
|
||||||
|
ejectSelfFromCache,
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasOnlyWithCacheOption(...requestArguments)) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
await locko.lock(cacheKey);
|
||||||
|
try {
|
||||||
|
cachedValue = await cache.get(cacheKey);
|
||||||
|
if (cachedValue) {
|
||||||
|
return new NFCResponse(
|
||||||
|
cachedValue.bodyStream,
|
||||||
|
cachedValue.metaData,
|
||||||
|
ejectSelfFromCache,
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const fetchResponse = await fetch(...requestArguments);
|
||||||
|
const serializedMeta = NFCResponse.serializeMetaFromNodeFetchResponse(fetchResponse);
|
||||||
|
|
||||||
|
const newlyCachedData = await cache.set(
|
||||||
|
cacheKey,
|
||||||
|
fetchResponse.body,
|
||||||
|
serializedMeta,
|
||||||
|
);
|
||||||
|
|
||||||
|
return new NFCResponse(
|
||||||
|
newlyCachedData.bodyStream,
|
||||||
|
newlyCachedData.metaData,
|
||||||
|
ejectSelfFromCache,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
locko.unlock(cacheKey);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function createFetchWithCache(cache) {
|
||||||
|
const fetchCache = (...args) => getResponse(cache, args);
|
||||||
|
fetchCache.withCache = createFetchWithCache;
|
||||||
|
|
||||||
|
return fetchCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaultFetch = createFetchWithCache(new MemoryCache());
|
||||||
|
|
||||||
|
export default defaultFetch;
|
||||||
|
export const fetchBuilder = defaultFetch;
|
||||||
|
export { MemoryCache } from './classes/caching/memory_cache.js';
|
||||||
|
export { FileSystemCache } from './classes/caching/file_system_cache.js';
|
BIN
test/expected_png.png
Normal file
BIN
test/expected_png.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 7.9 KiB |
18
test/tests.cjs
Normal file
18
test/tests.cjs
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
const assert = require('assert');
|
||||||
|
const fetch = require('../commonjs/wrapper.cjs');
|
||||||
|
|
||||||
|
const TWO_HUNDRED_URL = 'https://httpbin.org/status/200';
|
||||||
|
|
||||||
|
describe('Commonjs module tests', function() {
|
||||||
|
it('Can make a request', async function() {
|
||||||
|
const res = await fetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.status, 200);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Has expected properties', function() {
|
||||||
|
assert(typeof fetch === 'function');
|
||||||
|
assert(fetch.MemoryCache);
|
||||||
|
assert(fetch.FileSystemCache);
|
||||||
|
assert(fetch.fetchBuilder);
|
||||||
|
});
|
||||||
|
});
|
531
test/tests.js
Normal file
531
test/tests.js
Normal file
@ -0,0 +1,531 @@
|
|||||||
|
import { dirname } from 'path';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
import fs from 'fs';
|
||||||
|
import FormData from 'form-data';
|
||||||
|
import assert from 'assert';
|
||||||
|
import rimraf from 'rimraf';
|
||||||
|
import path from 'path';
|
||||||
|
import { URLSearchParams } from 'url';
|
||||||
|
import standardFetch from 'node-fetch';
|
||||||
|
import FetchCache, { MemoryCache, FileSystemCache, getCacheKey } from '../src/index.js';
|
||||||
|
import { Agent } from 'http';
|
||||||
|
|
||||||
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||||
|
|
||||||
|
const CACHE_PATH = path.join(__dirname, '..', '.cache');
|
||||||
|
const expectedPngBuffer = fs.readFileSync(path.join(__dirname, 'expected_png.png'));
|
||||||
|
|
||||||
|
const TWO_HUNDRED_URL = 'https://httpbin.org/status/200';
|
||||||
|
const FOUR_HUNDRED_URL = 'https://httpbin.org/status/400';
|
||||||
|
const THREE_HUNDRED_TWO_URL = 'https://httpbin.org/status/302';
|
||||||
|
const TEXT_BODY_URL = 'https://httpbin.org/robots.txt';
|
||||||
|
const JSON_BODY_URL = 'https://httpbin.org/json';
|
||||||
|
const PNG_BODY_URL = 'https://httpbin.org/image/png';
|
||||||
|
|
||||||
|
const TEXT_BODY_EXPECTED = 'User-agent: *\nDisallow: /deny\n';
|
||||||
|
|
||||||
|
let cachedFetch;
|
||||||
|
let body;
|
||||||
|
|
||||||
|
function post(body) {
|
||||||
|
return { method: 'POST', body };
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeDates(arrOrObj) {
|
||||||
|
if (arrOrObj.date) {
|
||||||
|
const copy = { ...arrOrObj };
|
||||||
|
delete copy.date;
|
||||||
|
return copy;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(arrOrObj)) {
|
||||||
|
if (Array.isArray(arrOrObj[0])) {
|
||||||
|
return arrOrObj.filter(e => e[0] !== 'date');
|
||||||
|
}
|
||||||
|
|
||||||
|
return arrOrObj.filter(e => !Date.parse(e));
|
||||||
|
}
|
||||||
|
|
||||||
|
return arrOrObj;
|
||||||
|
}
|
||||||
|
|
||||||
|
function wait(ms) {
|
||||||
|
return new Promise((fulfill) => setTimeout(fulfill, ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function dualFetch(...args) {
|
||||||
|
const [cachedFetchResponse, standardFetchResponse] = await Promise.all([
|
||||||
|
cachedFetch(...args),
|
||||||
|
standardFetch(...args),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return { cachedFetchResponse, standardFetchResponse };
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach(async function() {
|
||||||
|
rimraf.sync(CACHE_PATH);
|
||||||
|
cachedFetch = FetchCache.withCache(new MemoryCache());
|
||||||
|
});
|
||||||
|
|
||||||
|
let res;
|
||||||
|
|
||||||
|
describe('Basic property tests', function() {
|
||||||
|
it('Has a status property', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Has a statusText property', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.statusText, standardFetchResponse.statusText);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.statusText, standardFetchResponse.statusText);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Has a url property', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.url, standardFetchResponse.url);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.url, standardFetchResponse.url);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Has an ok property', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(FOUR_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.ok, standardFetchResponse.ok);
|
||||||
|
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(FOUR_HUNDRED_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.ok, standardFetchResponse.ok);
|
||||||
|
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Has a redirected property', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(THREE_HUNDRED_TWO_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.redirected, standardFetchResponse.redirected);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(THREE_HUNDRED_TWO_URL);
|
||||||
|
assert.strictEqual(cachedFetchResponse.redirected, standardFetchResponse.redirected);
|
||||||
|
});
|
||||||
|
}).timeout(10000);
|
||||||
|
|
||||||
|
describe('Header tests', function() {
|
||||||
|
it('Gets correct raw headers', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates(cachedFetchResponse.headers.raw()),
|
||||||
|
removeDates(standardFetchResponse.headers.raw()),
|
||||||
|
);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates(cachedFetchResponse.headers.raw()),
|
||||||
|
removeDates(standardFetchResponse.headers.raw()),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gets correct header keys', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual([...cachedFetchResponse.headers.keys()], [...standardFetchResponse.headers.keys()]);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual([...cachedFetchResponse.headers.keys()], [...standardFetchResponse.headers.keys()]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gets correct header values', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates([...cachedFetchResponse.headers.values()]),
|
||||||
|
removeDates([...standardFetchResponse.headers.values()]),
|
||||||
|
);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates([...cachedFetchResponse.headers.values()]),
|
||||||
|
removeDates([...standardFetchResponse.headers.values()]),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gets correct header entries', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates([...cachedFetchResponse.headers.entries()]),
|
||||||
|
removeDates([...standardFetchResponse.headers.entries()]),
|
||||||
|
);
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
removeDates([...cachedFetchResponse.headers.entries()]),
|
||||||
|
removeDates([...standardFetchResponse.headers.entries()]),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get a header by value', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert(standardFetchResponse.headers.get('content-length'));
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.get('content-length'), standardFetchResponse.headers.get('content-length'));
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.get('content-length'), standardFetchResponse.headers.get('content-length'));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Returns undefined for non-existent header', async function() {
|
||||||
|
const headerName = 'zzzz';
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert(!standardFetchResponse.headers.get(headerName));
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.get(headerName), standardFetchResponse.headers.get(headerName));
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.get(headerName), standardFetchResponse.headers.get(headerName));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get whether a header is present', async function() {
|
||||||
|
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||||
|
assert(standardFetchResponse.headers.has('content-length'));
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.has('content-length'), standardFetchResponse.headers.has('content-length'));
|
||||||
|
|
||||||
|
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.deepStrictEqual(cachedFetchResponse.headers.has('content-length'), standardFetchResponse.headers.has('content-length'));
|
||||||
|
});
|
||||||
|
}).timeout(10000);
|
||||||
|
|
||||||
|
describe('Cache tests', function() {
|
||||||
|
it('Uses cache', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can eject from cache', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
|
||||||
|
await res.ejectFromCache();
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Does not error if ejecting from cache twice', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
await res.ejectFromCache();
|
||||||
|
await res.ejectFromCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives different string bodies different cache keys', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post('b'));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives same string bodies same cache keys', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives different URLSearchParams different cache keys', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=b')));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives same URLSearchParams same cache keys', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives different read streams different cache keys', async function() {
|
||||||
|
const s1 = fs.createReadStream(path.join(__dirname, 'expected_png.png'));
|
||||||
|
const s2 = fs.createReadStream(path.join(__dirname, '..', 'src', 'index.js'));
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(s2));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives the same read streams the same cache key', async function() {
|
||||||
|
const s1 = fs.createReadStream(path.join(__dirname, 'expected_png.png'));
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives different form data different cache keys', async function() {
|
||||||
|
const data1 = new FormData();
|
||||||
|
data1.append('a', 'a');
|
||||||
|
|
||||||
|
const data2 = new FormData();
|
||||||
|
data2.append('b', 'b');
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(data1));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(data2));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gives same form data same cache keys', async function() {
|
||||||
|
const data1 = new FormData();
|
||||||
|
data1.append('a', 'a');
|
||||||
|
|
||||||
|
const data2 = new FormData();
|
||||||
|
data2.append('a', 'a');
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(data1));
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, post(data2));
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Does not error with custom agent with circular properties', async function() {
|
||||||
|
const agent = new Agent();
|
||||||
|
agent.agent = agent;
|
||||||
|
|
||||||
|
await cachedFetch('http://httpbin.org/status/200', { agent });
|
||||||
|
})
|
||||||
|
}).timeout(10000);
|
||||||
|
|
||||||
|
describe('Data tests', function() {
|
||||||
|
it('Supports request objects', async function() {
|
||||||
|
let request = new standardFetch.Request('https://google.com', { body: 'test', method: 'POST' });
|
||||||
|
res = await cachedFetch(request);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
request = new standardFetch.Request('https://google.com', { body: 'test', method: 'POST' });
|
||||||
|
res = await cachedFetch(request);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Supports request objects with custom headers', async function() {
|
||||||
|
const request1 = new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'XXX': 'YYY' } });
|
||||||
|
const request2 = new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'XXX': 'ZZZ' } });
|
||||||
|
|
||||||
|
res = await cachedFetch(request1);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(request2);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Refuses to consume body twice', async function() {
|
||||||
|
res = await cachedFetch(TEXT_BODY_URL);
|
||||||
|
await res.text();
|
||||||
|
|
||||||
|
try {
|
||||||
|
await res.text();
|
||||||
|
throw new Error('The above line should have thrown.');
|
||||||
|
} catch (err) {
|
||||||
|
assert(err.message.includes('body used already for:'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get text body', async function() {
|
||||||
|
res = await cachedFetch(TEXT_BODY_URL);
|
||||||
|
body = await res.text();
|
||||||
|
assert.strictEqual(body, TEXT_BODY_EXPECTED);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TEXT_BODY_URL);
|
||||||
|
body = await res.text();
|
||||||
|
assert.strictEqual(body, TEXT_BODY_EXPECTED);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get JSON body', async function() {
|
||||||
|
res = await cachedFetch(JSON_BODY_URL);
|
||||||
|
body = await res.json();
|
||||||
|
assert(body.slideshow);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(JSON_BODY_URL);
|
||||||
|
body = await res.json();
|
||||||
|
assert(body.slideshow);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get PNG buffer body', async function() {
|
||||||
|
res = await cachedFetch(PNG_BODY_URL);
|
||||||
|
body = await res.buffer();
|
||||||
|
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(PNG_BODY_URL);
|
||||||
|
body = await res.buffer();
|
||||||
|
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can stream a body', async function() {
|
||||||
|
res = await cachedFetch(TEXT_BODY_URL);
|
||||||
|
body = '';
|
||||||
|
|
||||||
|
for await (const chunk of res.body) {
|
||||||
|
body += chunk.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.strictEqual(TEXT_BODY_EXPECTED, body);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TEXT_BODY_URL);
|
||||||
|
body = '';
|
||||||
|
|
||||||
|
for await (const chunk of res.body) {
|
||||||
|
body += chunk.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.strictEqual(TEXT_BODY_EXPECTED, body);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Errors if the body type is not supported', async function() {
|
||||||
|
try {
|
||||||
|
await cachedFetch(TEXT_BODY_URL, { body: {} });
|
||||||
|
throw new Error('It was supposed to throw');
|
||||||
|
} catch (err) {
|
||||||
|
assert(err.message.includes('Unsupported body type'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Uses cache even if you make multiple requests at the same time', async function() {
|
||||||
|
const [res1, res2] = await Promise.all([
|
||||||
|
cachedFetch('http://httpbin.org/status/200'),
|
||||||
|
cachedFetch('http://httpbin.org/status/200'),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// One should be false, the other should be true
|
||||||
|
assert(res1.fromCache !== res2.fromCache);
|
||||||
|
});
|
||||||
|
}).timeout(10000);
|
||||||
|
|
||||||
|
describe('Memory cache tests', function() {
|
||||||
|
it('Supports TTL', async function() {
|
||||||
|
cachedFetch = FetchCache.withCache(new MemoryCache({ ttl: 100 }));
|
||||||
|
let res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
|
||||||
|
await wait(200);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
}).timeout(10000);
|
||||||
|
|
||||||
|
describe('File system cache tests', function() {
|
||||||
|
it('Supports TTL', async function() {
|
||||||
|
cachedFetch = FetchCache.withCache(new FileSystemCache({ ttl: 100 }));
|
||||||
|
let res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
|
||||||
|
await wait(200);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can get PNG buffer body', async function() {
|
||||||
|
cachedFetch = FetchCache.withCache(new FileSystemCache());
|
||||||
|
res = await cachedFetch(PNG_BODY_URL);
|
||||||
|
body = await res.buffer();
|
||||||
|
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(PNG_BODY_URL);
|
||||||
|
body = await res.buffer();
|
||||||
|
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can eject from cache', async function() {
|
||||||
|
cachedFetch = FetchCache.withCache(new FileSystemCache());
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
|
||||||
|
await res.ejectFromCache();
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, false);
|
||||||
|
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert.strictEqual(res.fromCache, true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Cache mode tests', function() {
|
||||||
|
it('Can use the only-if-cached cache control setting via init', async function() {
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
|
||||||
|
assert(!res);
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
|
||||||
|
assert(!res);
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
assert(res && !res.fromCache);
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
|
||||||
|
assert(res && res.fromCache);
|
||||||
|
await res.ejectFromCache();
|
||||||
|
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
|
||||||
|
assert(!res);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Can use the only-if-cached cache control setting via resource', async function() {
|
||||||
|
res = await cachedFetch(new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } }));
|
||||||
|
assert(!res);
|
||||||
|
res = await cachedFetch(new standardFetch.Request(TWO_HUNDRED_URL));
|
||||||
|
assert(res && !res.fromCache);
|
||||||
|
res = await cachedFetch(new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } }));
|
||||||
|
assert(res && res.fromCache);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Cache key tests', function() {
|
||||||
|
it('Can calculate a cache key and check that it exists', async function() {
|
||||||
|
const cache = new MemoryCache();
|
||||||
|
cachedFetch = FetchCache.withCache(cache);
|
||||||
|
await cachedFetch(TWO_HUNDRED_URL);
|
||||||
|
|
||||||
|
const cacheKey = getCacheKey(TWO_HUNDRED_URL);
|
||||||
|
const nonExistentCacheKey = getCacheKey(TEXT_BODY_URL);
|
||||||
|
|
||||||
|
const cacheKeyResult = await cache.get(cacheKey);
|
||||||
|
const nonExistentCacheKeyResult = await cache.get(nonExistentCacheKey);
|
||||||
|
|
||||||
|
assert(cacheKeyResult);
|
||||||
|
assert(!nonExistentCacheKeyResult);
|
||||||
|
});
|
||||||
|
});
|
Reference in New Issue
Block a user