Compare commits
65 Commits
Author | SHA1 | Date | |
---|---|---|---|
360dc102bb | |||
0c643e9b87 | |||
a9f7d70cd0 | |||
07777a74f2 | |||
d5382860f3 | |||
73601127b2 | |||
eb86e3e4c5 | |||
![]() |
51e15ad046 | ||
![]() |
571b65cdd2 | ||
![]() |
b0348460a7 | ||
![]() |
44a0ffb8f3 | ||
![]() |
41d52f39c8 | ||
![]() |
3b92b39430 | ||
![]() |
b234de4b69 | ||
![]() |
44786dfb68 | ||
![]() |
5f48f0fc8a | ||
![]() |
4f93c9ba1c | ||
![]() |
faf73e49d8 | ||
![]() |
0173de3f6d | ||
![]() |
decb904331 | ||
![]() |
7f502a98a2 | ||
![]() |
18b276c5dd | ||
![]() |
cd6f4f01e3 | ||
![]() |
fee0cdd085 | ||
![]() |
2bc4cd6de0 | ||
![]() |
51e5754c3e | ||
![]() |
29de8ee068 | ||
![]() |
f1128f2f64 | ||
![]() |
0397d759dd | ||
![]() |
a68dd5b0d0 | ||
![]() |
ba8ff5d257 | ||
![]() |
9c40ddf786 | ||
![]() |
9cea80be47 | ||
![]() |
a386a54fbb | ||
![]() |
b7190b933a | ||
![]() |
55b92ee40f | ||
![]() |
6607a71af6 | ||
![]() |
0f5f1edb22 | ||
![]() |
d6f73c4778 | ||
![]() |
0c8c0d4520 | ||
![]() |
2f14847a01 | ||
![]() |
e3ecf3b981 | ||
![]() |
05a146fef1 | ||
![]() |
dba1e2fbd3 | ||
![]() |
ad572f3f29 | ||
![]() |
6023b56164 | ||
![]() |
675ed9bbf2 | ||
![]() |
998d18ae84 | ||
![]() |
d5ab4d4b18 | ||
![]() |
847b330489 | ||
![]() |
dae57d2604 | ||
![]() |
6cd42272c4 | ||
![]() |
e8ad8da0bb | ||
![]() |
45ca35f057 | ||
![]() |
af1e977620 | ||
![]() |
ef5939f571 | ||
![]() |
e356e485b6 | ||
![]() |
a11a135234 | ||
![]() |
524308aa61 | ||
![]() |
bffaa2aadc | ||
![]() |
b8da556091 | ||
![]() |
dea019c42c | ||
![]() |
eda9f98e3b | ||
![]() |
6d1369b50f | ||
![]() |
d97cb85f97 |
@ -1 +1 @@
|
||||
test
|
||||
test
|
||||
|
@ -1,4 +1,4 @@
|
||||
module.exports = {
|
||||
{
|
||||
"env": {
|
||||
"commonjs": true,
|
||||
"es6": true,
|
||||
@ -15,5 +15,7 @@ module.exports = {
|
||||
"ecmaVersion": 2018
|
||||
},
|
||||
"rules": {
|
||||
"import/extensions": "off",
|
||||
"import/prefer-default-export": "off"
|
||||
}
|
||||
};
|
||||
}
|
23
.gitea/workflows/ci.yml
Normal file
23
.gitea/workflows/ci.yml
Normal file
@ -0,0 +1,23 @@
|
||||
name: CI Pipeline
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- run: npm ci
|
||||
- run: npm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
17
.github/workspaces/ci.yml
vendored
17
.github/workspaces/ci.yml
vendored
@ -1,17 +0,0 @@
|
||||
name: CI Pipeline
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 14.x
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 14.x
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm run coverage
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -105,3 +105,5 @@ dist
|
||||
|
||||
# Other
|
||||
.cache
|
||||
|
||||
commonjs
|
||||
|
@ -1,4 +1,8 @@
|
||||
.eslintrc.js
|
||||
.eslintrc.json
|
||||
test
|
||||
.cache
|
||||
.nyc_output
|
||||
.nyc_output
|
||||
.github
|
||||
.eslintignore
|
||||
.vscode
|
||||
coverage
|
||||
|
184
README.md
184
README.md
@ -1,5 +1,7 @@
|
||||
# node-fetch-cache
|
||||
|
||||
Forked from: <https://github.com/mistval/node-fetch-cache>
|
||||
|
||||
node-fetch with caching of responses.
|
||||
|
||||
The first fetch with any given arguments will result in an HTTP request and any subsequent fetch with the same arguments will read the response from the cache.
|
||||
@ -11,97 +13,41 @@ By default responses are cached in memory, but you can also cache to files on di
|
||||
Require it and use it the same way you would use node-fetch:
|
||||
|
||||
```js
|
||||
const fetch = require('node-fetch-cache');
|
||||
import fetch from "node-fetch-cache";
|
||||
|
||||
fetch('http://google.com')
|
||||
.then(response => response.text())
|
||||
.then(text => console.log(text));
|
||||
fetch("http://google.com")
|
||||
.then((response) => response.text())
|
||||
.then((text) => console.log(text));
|
||||
```
|
||||
|
||||
The next time you `fetch('http://google.com')`, the response will be returned from the cache. No HTTP request will be made.
|
||||
|
||||
## API
|
||||
|
||||
This module aims to expose the same API as `node-fetch` does for the most common use cases, but may not support some of the less common functions, properties, and use cases.
|
||||
This module's fetch function has almost the exact same API as node-fetch, and you should consult [the node-fetch documentation](https://www.npmjs.com/package/node-fetch) for how to use it.
|
||||
|
||||
### const fetch = require('node-fetch-cache');
|
||||
This module just adds one extra function to the response object:
|
||||
|
||||
Load the module.
|
||||
### res.ejectFromCache(): Promise\<void\>
|
||||
|
||||
### async fetch(resource [, init])
|
||||
This function can be used to eject the response from the cache, so that the next request will perform a true HTTP request rather than returning a cached response.
|
||||
|
||||
Same arguments as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
This module caches ALL responses, even those with 4xx and 5xx response statuses. You can use this function to uncache such responses if desired. For example:
|
||||
|
||||
Returns a **CachedResponse**.
|
||||
```js
|
||||
import fetch from "node-fetch-cache";
|
||||
|
||||
### async CachedResponse.text()
|
||||
|
||||
Returns the body as a string, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### async CachedResponse.json()
|
||||
|
||||
Returns the body as a JavaScript object, parsed from JSON, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### async CachedResponse.buffer()
|
||||
|
||||
Returns the body as a Buffer, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### CachedResponse.status
|
||||
|
||||
Returns the HTTP status code of the response, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### CachedResponse.statusText
|
||||
|
||||
Returns a text represention of the response status, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### CachedResponse.ok
|
||||
|
||||
Returns true if the request returned a successful response status, false otherwise, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### CachedResponse.redirected
|
||||
|
||||
Returns true if the request was redirected, false otherwise, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### CachedResponse.headers
|
||||
|
||||
Returns a **ResponseHeaders** object representing the headers of the response, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### async CachedResponse.ejectFromCache()
|
||||
|
||||
Eject the response from the cache, so that the next request will perform a true HTTP request rather than returning a cached response.
|
||||
|
||||
Keep in mind that this module caches **all** responses, even if they return error status codes. You might want to use this function when `!response.ok`, so that you can retry requests.
|
||||
|
||||
### ResponseHeaders.entries()
|
||||
|
||||
Returns the raw headers as an array of `[key, value]` pairs, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### ResponseHeaders.keys()
|
||||
|
||||
Returns an array of all header keys, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### ResponseHeaders.values()
|
||||
|
||||
Returns an array of all header values, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### ResponseHeaders.get(key)
|
||||
|
||||
Returns the value of the header with the given key, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### ResponseHeaders.has(key)
|
||||
|
||||
Returns true if the headers has a value for the given key, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
### ResponseHeaders.raw
|
||||
|
||||
Returns the headers as an object of `{ "key": "value" }` pairs, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
|
||||
|
||||
## Streaming
|
||||
|
||||
This module supports streams like [node-fetch](https://www.npmjs.com/package/node-fetch) does, but with a couple of caveats you should be aware of if you want to use streams.
|
||||
|
||||
1. Response bodies are always read into memory even if you stream them to disk. That means if you need to stream large responses that don't fit into RAM, this module may be unsuitable.
|
||||
2. When streaming a request body with fs.ReadStream, the cache key is generated based only on the path of the stream, not its content. That means if you stream `/my/desktop/image.png` twice, you will get a cached response the second time, **even if the content of image.png has changed**. This module may be unsuitable if you need to stream files in requests and the content of those files can change.
|
||||
fetch("http://google.com")
|
||||
.then(async (response) => {
|
||||
if (!response.ok) {
|
||||
await response.ejectFromCache();
|
||||
throw new Error("Non-okay response from google.com");
|
||||
} else {
|
||||
return response.text();
|
||||
}
|
||||
})
|
||||
.then((text) => console.log(text));
|
||||
```
|
||||
|
||||
## Cache Customization
|
||||
|
||||
@ -114,7 +60,7 @@ This is the default cache delegate. It caches responses in-process in a POJO.
|
||||
Usage:
|
||||
|
||||
```js
|
||||
const fetchBuilder, { MemoryCache } = require('node-fetch-cache');
|
||||
import { fetchBuilder, MemoryCache } from "node-fetch-cache";
|
||||
const fetch = fetchBuilder.withCache(new MemoryCache(options));
|
||||
```
|
||||
|
||||
@ -126,7 +72,7 @@ Options:
|
||||
}
|
||||
```
|
||||
|
||||
Note that by default (if you don't use `withCache()`) a **shared** MemoryCache will be used (you can import this module in multiple files and they will all share the same cache). If you instantiate and provide a `new MemoryCache()` as shown above however, the cache is *NOT* shared unless you explicitly pass it around and pass it into `withCache()` in each of your source files.
|
||||
Note that by default (if you don't use `withCache()`) a **shared** MemoryCache will be used (you can import this module in multiple files and they will all share the same cache). If you instantiate and provide a `new MemoryCache()` as shown above however, the cache is _NOT_ shared unless you explicitly pass it around and pass it into `withCache()` in each of your source files.
|
||||
|
||||
### FileSystemCache
|
||||
|
||||
@ -135,10 +81,12 @@ Cache to a directory on disk. This allows the cache to survive the process exiti
|
||||
Usage:
|
||||
|
||||
```js
|
||||
const fetchBuilder, { FileSystemCache } = require('node-fetch-cache');
|
||||
import { fetchBuilder, FileSystemCache } from "node-fetch-cache";
|
||||
const fetch = fetchBuilder.withCache(new FileSystemCache(options));
|
||||
```
|
||||
|
||||
Options:
|
||||
|
||||
```js
|
||||
{
|
||||
cacheDirectory: '/my/cache/directory/path', // Specify where to keep the cache. If undefined, '.cache' is used by default. If this directory does not exist, it will be created.
|
||||
@ -148,41 +96,71 @@ const fetch = fetchBuilder.withCache(new FileSystemCache(options));
|
||||
|
||||
### Provide Your Own
|
||||
|
||||
You can implement a caching delegate yourself. The cache simply needs to be an object that has `set(key, value)`, `get(key)`, and `remove(key)` functions.
|
||||
You can implement a caching delegate yourself. The cache simply needs to be an object that has `set(key, bodyStream, bodyMeta)`, `get(key)`, and `remove(key)` functions.
|
||||
|
||||
The set function must accept a key (which will be a string) and a value (which will be a JSON-serializable JS object) and store them.
|
||||
Check the built-in [MemoryCache](https://github.com/mistval/node-fetch-cache/blob/master/src/classes/caching/memory_cache.js) and [FileSystemCache](https://github.com/mistval/node-fetch-cache/blob/master/src/classes/caching/file_system_cache.js) for examples.
|
||||
|
||||
The get function should accept a key and return whatever value was set for that key (or `undefined`/`null` if there is no value for that key).
|
||||
The set function must accept a key (which will be a string), a body stream, and a metadata object (which will be a JSON-serializable JS object). It must store these, and then return an object with a `bodyStream` property, containing a fresh, unread stream of the body content, as well as a `metaData` property, containing the same metaData that was passed in.
|
||||
|
||||
The remove function should accept a key and remove the cached value associated with that key, if any.
|
||||
The get function should accept a key and return undefined if no cached value is found, or else an object with a `bodyStream` property, containing a stream of the body content, as well as a `metaData` property, containing the metadata that was stored via the `set(key, bodyStream, bodyMeta)` function.
|
||||
|
||||
Both functions can be async.
|
||||
The remove function should accept a key and remove the cached value associated with that key, if any. It is also safe for your caching delegate to remove values from the cache arbitrarily if desired (for example if you want to implement a TTL in the caching delegate).
|
||||
|
||||
It is safe to remove values from the cache arbitrarily (for example if you implement a TTL in the caching delegate).
|
||||
All three functions may be async.
|
||||
|
||||
Example: you could make and use your own simple memory cache like this:
|
||||
## Misc Tips
|
||||
|
||||
### Streaming
|
||||
|
||||
This module does not support Stream request bodies, except for fs.ReadStream. And when using fs.ReadStream, the cache key is generated based only on the path of the stream, not its content. That means if you stream `/my/desktop/image.png` twice, you will get a cached response the second time, **even if the content of image.png has changed**.
|
||||
|
||||
Streams don't quite play nice with the concept of caching based on request characteristics, because we would have to read the stream to the end to find out what's in it and hash it into a proper cache key.
|
||||
|
||||
### Request Concurrency
|
||||
|
||||
Requests with the same cache key are queued. For example, you might wonder if making the same request 100 times simultaneously would result in 100 HTTP requests:
|
||||
|
||||
```js
|
||||
class MyMemoryCache {
|
||||
set(key, value) {
|
||||
this[key] = value;
|
||||
}
|
||||
import fetch from "node-fetch-cache";
|
||||
|
||||
get(key) {
|
||||
return this[key];
|
||||
}
|
||||
await Promise.all(
|
||||
Array(100)
|
||||
.fill()
|
||||
.map(() => fetch("https://google.com"))
|
||||
);
|
||||
```
|
||||
|
||||
remove(key) {
|
||||
delete this[key];
|
||||
}
|
||||
The answer is no. Only one request would be made, and 99 of the `fetch()`s will read the response from the cache.
|
||||
|
||||
### Cache-Control: only-if-cached Requests
|
||||
|
||||
The HTTP standard describes a [Cache-Control request header](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control#request_directives) to control certain aspects of cache behavior. Node-fetch ignores these, but node-fetch-cache respects the `Cache-Control: only-if-cached` directive. When `only-if-cached` is specified, node-fetch-cache will return `undefined` if there is no cached response. No HTTP request will be made. For example:
|
||||
|
||||
```js
|
||||
import fetch from "node-fetch-cache";
|
||||
|
||||
const response = await fetch("https://google.com", {
|
||||
headers: { "Cache-Control": "only-if-cached" },
|
||||
});
|
||||
|
||||
if (response === undefined) {
|
||||
// No response was found in the cache
|
||||
}
|
||||
```
|
||||
|
||||
const fetchBuilder = require('node-fetch-cache');
|
||||
fetch = fetchBuilder.withCache(new MyMemoryCache());
|
||||
Note that this is slightly different from browser fetch, which returns a `504 Gateway Timeout` response if no cached response is available.
|
||||
|
||||
fetch('http://google.com')
|
||||
.then(response => response.text())
|
||||
.then(text => console.log(text));
|
||||
### Calculating the Cache Key
|
||||
|
||||
This module exports a `getCacheKey()` function to calculate a cache key string from request parameters, which may be useful for enabling some advanced use cases (especially if you want to call cache functions directly). Call `getCacheKey()` exactly like you would call `fetch()`.
|
||||
|
||||
```js
|
||||
import { fetchBuilder, MemoryCache, getCacheKey } from "node-fetch-cache";
|
||||
|
||||
const cache = new MemoryCache();
|
||||
const fetch = fetchBuilder.withCache(cache);
|
||||
|
||||
const rawCacheData = await cache.get(getCacheKey("https://google.com"));
|
||||
```
|
||||
|
||||
## Bugs / Help / Feature Requests / Contributing
|
||||
|
@ -1,29 +0,0 @@
|
||||
const FPersist = require('fpersist');
|
||||
const KeyTimeout = require('./key_timeout.js');
|
||||
|
||||
module.exports = class FileSystemCache {
|
||||
constructor(options = {}) {
|
||||
this.ttl = options.ttl;
|
||||
this.keyTimeout = new KeyTimeout();
|
||||
|
||||
const cacheDirectory = options.cacheDirectory || '.cache';
|
||||
this.cache = new FPersist(cacheDirectory);
|
||||
}
|
||||
|
||||
get(key) {
|
||||
return this.cache.getItem(key);
|
||||
}
|
||||
|
||||
remove(key) {
|
||||
this.keyTimeout.clearTimeout(key);
|
||||
return this.cache.deleteItem(key);
|
||||
}
|
||||
|
||||
async set(key, value) {
|
||||
await this.cache.setItem(key, value);
|
||||
|
||||
if (typeof this.ttl === 'number') {
|
||||
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
|
||||
}
|
||||
}
|
||||
};
|
@ -1,26 +0,0 @@
|
||||
const KeyTimeout = require('./key_timeout.js');
|
||||
|
||||
module.exports = class MemoryCache {
|
||||
constructor(options = {}) {
|
||||
this.ttl = options.ttl;
|
||||
this.keyTimeout = new KeyTimeout();
|
||||
this.cache = {};
|
||||
}
|
||||
|
||||
get(key) {
|
||||
return this.cache[key];
|
||||
}
|
||||
|
||||
remove(key) {
|
||||
this.keyTimeout.clearTimeout(key);
|
||||
delete this.cache[key];
|
||||
}
|
||||
|
||||
set(key, value) {
|
||||
this.cache[key] = value;
|
||||
|
||||
if (typeof this.ttl === 'number') {
|
||||
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
|
||||
}
|
||||
}
|
||||
};
|
@ -1,31 +0,0 @@
|
||||
class Headers {
|
||||
constructor(rawHeaders) {
|
||||
this.rawHeaders = rawHeaders;
|
||||
}
|
||||
|
||||
entries() {
|
||||
return Object.entries(this.rawHeaders);
|
||||
}
|
||||
|
||||
keys() {
|
||||
return Object.keys(this.rawHeaders);
|
||||
}
|
||||
|
||||
values() {
|
||||
return Object.values(this.rawHeaders);
|
||||
}
|
||||
|
||||
get(name) {
|
||||
return this.rawHeaders[name.toLowerCase()] || null;
|
||||
}
|
||||
|
||||
has(name) {
|
||||
return !!this.get(name);
|
||||
}
|
||||
|
||||
raw() {
|
||||
return this.rawHeaders;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Headers;
|
@ -1,47 +0,0 @@
|
||||
const stream = require('stream');
|
||||
const Headers = require('./headers.js');
|
||||
|
||||
class Response {
|
||||
constructor(raw, ejectSelfFromCache, fromCache) {
|
||||
Object.assign(this, raw);
|
||||
this.ejectSelfFromCache = ejectSelfFromCache;
|
||||
this.headers = new Headers(raw.headers);
|
||||
this.fromCache = fromCache;
|
||||
this.bodyUsed = false;
|
||||
|
||||
if (this.bodyBuffer.type === 'Buffer') {
|
||||
this.bodyBuffer = Buffer.from(this.bodyBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
get body() {
|
||||
return stream.Readable.from(this.bodyBuffer);
|
||||
}
|
||||
|
||||
consumeBody() {
|
||||
if (this.bodyUsed) {
|
||||
throw new Error('Error: body used already');
|
||||
}
|
||||
|
||||
this.bodyUsed = true;
|
||||
return this.bodyBuffer;
|
||||
}
|
||||
|
||||
text() {
|
||||
return this.consumeBody().toString();
|
||||
}
|
||||
|
||||
json() {
|
||||
return JSON.parse(this.consumeBody().toString());
|
||||
}
|
||||
|
||||
buffer() {
|
||||
return this.consumeBody();
|
||||
}
|
||||
|
||||
ejectFromCache() {
|
||||
return this.ejectSelfFromCache();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Response;
|
4
commonjs/wrapper.cjs
Normal file
4
commonjs/wrapper.cjs
Normal file
@ -0,0 +1,4 @@
|
||||
const mod = require('./index.cjs');
|
||||
|
||||
module.exports = mod.default;
|
||||
Object.assign(module.exports, mod);
|
114
index.js
114
index.js
@ -1,114 +0,0 @@
|
||||
const fetch = require('node-fetch');
|
||||
const fs = require('fs');
|
||||
const { URLSearchParams } = require('url');
|
||||
const crypto = require('crypto');
|
||||
const Response = require('./classes/response.js');
|
||||
const MemoryCache = require('./classes/caching/memory_cache.js');
|
||||
|
||||
const CACHE_VERSION = 2;
|
||||
|
||||
function md5(str) {
|
||||
return crypto.createHash('md5').update(str).digest('hex');
|
||||
}
|
||||
|
||||
// Since the bounday in FormData is random,
|
||||
// we ignore it for purposes of calculating
|
||||
// the cache key.
|
||||
function getFormDataCacheKey(formData) {
|
||||
const cacheKey = { ...formData };
|
||||
|
||||
if (typeof formData.getBoundary === 'function') {
|
||||
const boundary = formData.getBoundary();
|
||||
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
delete cacheKey._boundary;
|
||||
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
if (Array.isArray(cacheKey._streams)) {
|
||||
const boundaryReplaceRegex = new RegExp(boundary, 'g');
|
||||
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
cacheKey._streams = cacheKey._streams.map((s) => {
|
||||
if (typeof s === 'string') {
|
||||
return s.replace(boundaryReplaceRegex, '');
|
||||
}
|
||||
|
||||
return s;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return cacheKey;
|
||||
}
|
||||
|
||||
function getBodyCacheKeyJson(body) {
|
||||
if (!body) {
|
||||
return body;
|
||||
} if (typeof body === 'string') {
|
||||
return body;
|
||||
} if (body instanceof URLSearchParams) {
|
||||
return body.toString();
|
||||
} if (body instanceof fs.ReadStream) {
|
||||
return body.path;
|
||||
} if (body.toString && body.toString() === '[object FormData]') {
|
||||
return getFormDataCacheKey(body);
|
||||
}
|
||||
|
||||
throw new Error('Unsupported body type');
|
||||
}
|
||||
|
||||
function getCacheKey(requestArguments) {
|
||||
const resource = requestArguments[0];
|
||||
const init = requestArguments[1] || {};
|
||||
|
||||
const resourceCacheKeyJson = typeof resource === 'string' ? { url: resource } : { ...resource };
|
||||
const initCacheKeyJson = { ...init };
|
||||
|
||||
resourceCacheKeyJson.body = getBodyCacheKeyJson(resourceCacheKeyJson.body);
|
||||
initCacheKeyJson.body = getBodyCacheKeyJson(initCacheKeyJson.body);
|
||||
|
||||
return md5(JSON.stringify([resourceCacheKeyJson, initCacheKeyJson, CACHE_VERSION]));
|
||||
}
|
||||
|
||||
async function createRawResponse(fetchRes) {
|
||||
const buffer = await fetchRes.buffer();
|
||||
|
||||
const rawHeaders = Array.from(fetchRes.headers.entries())
|
||||
.reduce((aggregate, entry) => ({ ...aggregate, [entry[0]]: entry[1] }), {});
|
||||
|
||||
return {
|
||||
status: fetchRes.status,
|
||||
statusText: fetchRes.statusText,
|
||||
type: fetchRes.type,
|
||||
url: fetchRes.url,
|
||||
ok: fetchRes.ok,
|
||||
headers: rawHeaders,
|
||||
redirected: fetchRes.redirected,
|
||||
bodyBuffer: buffer,
|
||||
};
|
||||
}
|
||||
|
||||
async function getResponse(cache, requestArguments) {
|
||||
const cacheKey = getCacheKey(requestArguments);
|
||||
const cachedValue = await cache.get(cacheKey);
|
||||
|
||||
const ejectSelfFromCache = () => cache.remove(cacheKey);
|
||||
|
||||
if (cachedValue) {
|
||||
return new Response(cachedValue, ejectSelfFromCache, true);
|
||||
}
|
||||
|
||||
const fetchResponse = await fetch(...requestArguments);
|
||||
const rawResponse = await createRawResponse(fetchResponse);
|
||||
await cache.set(cacheKey, rawResponse);
|
||||
return new Response(rawResponse, ejectSelfFromCache, false);
|
||||
}
|
||||
|
||||
function createFetchWithCache(cache) {
|
||||
const fetchCache = (...args) => getResponse(cache, args);
|
||||
fetchCache.withCache = createFetchWithCache;
|
||||
|
||||
return fetchCache;
|
||||
}
|
||||
|
||||
module.exports = createFetchWithCache(new MemoryCache());
|
7178
package-lock.json
generated
7178
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
51
package.json
51
package.json
@ -1,17 +1,28 @@
|
||||
{
|
||||
"name": "node-fetch-cache",
|
||||
"version": "1.0.6",
|
||||
"description": "node-fetch with a persistent cache.",
|
||||
"main": "index.js",
|
||||
"name": "@fascinated/node-fetch-cache",
|
||||
"version": "3.1.3",
|
||||
"description": "node-fetch with caching.",
|
||||
"main": "src/index.js",
|
||||
"type": "module",
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
"registry": "https://registry.npmjs.org/"
|
||||
},
|
||||
"exports": {
|
||||
"import": "./src/index.js",
|
||||
"require": "./commonjs/wrapper.cjs"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha --timeout 10000 --exit",
|
||||
"coverage": "nyc --reporter=lcov npm test",
|
||||
"lint": "./node_modules/.bin/eslint .",
|
||||
"lintfix": "./node_modules/.bin/eslint . --fix"
|
||||
"buildcjs": "rollup src/index.js --file commonjs/index.cjs --format cjs",
|
||||
"test": "npm run lintfix && npm run buildcjs && mocha --timeout 10000 --exit",
|
||||
"coverage": "nyc --reporter=lcov --reporter=text npm test",
|
||||
"lint": "eslint .",
|
||||
"lintfix": "eslint . --fix",
|
||||
"prepublishOnly": "npm test"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/mistval/node-fetch-cache.git"
|
||||
"url": "git+https://git.fascinated.cc/Fascinated/node-fetch-cache.git"
|
||||
},
|
||||
"keywords": [
|
||||
"node",
|
||||
@ -23,22 +34,24 @@
|
||||
"author": "mistval",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/mistval/node-fetch-cache/issues"
|
||||
"url": "https://git.fascinated.cc/Fascinated/node-fetch-cache/issues"
|
||||
},
|
||||
"homepage": "https://github.com/mistval/node-fetch-cache#readme",
|
||||
"homepage": "https://git.fascinated.cc/Fascinated/node-fetch-cache#readme",
|
||||
"devDependencies": {
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-config-airbnb-base": "^14.1.0",
|
||||
"eslint-plugin-import": "^2.20.2",
|
||||
"eslint": "^8.9.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-plugin-import": "^2.25.4",
|
||||
"form-data": "^3.0.0",
|
||||
"husky": "^4.3.0",
|
||||
"mocha": "^8.2.1",
|
||||
"husky": "^8.0.3",
|
||||
"mocha": "^9.2.1",
|
||||
"nyc": "^15.1.0",
|
||||
"rimraf": "^3.0.2"
|
||||
"rimraf": "^3.0.2",
|
||||
"rollup": "^2.53.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"fpersist": "^1.0.5",
|
||||
"node-fetch": "*"
|
||||
"cacache": "^15.2.0",
|
||||
"locko": "^1.0.0",
|
||||
"node-fetch": "2.6.11"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
|
84
src/classes/caching/file_system_cache.js
Normal file
84
src/classes/caching/file_system_cache.js
Normal file
@ -0,0 +1,84 @@
|
||||
import cacache from 'cacache';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
function getBodyAndMetaKeys(key) {
|
||||
return [`${key}body`, `${key}meta`];
|
||||
}
|
||||
|
||||
export class FileSystemCache {
|
||||
constructor(options = {}) {
|
||||
this.ttl = options.ttl;
|
||||
this.cacheDirectory = options.cacheDirectory || '.cache';
|
||||
}
|
||||
|
||||
async get(key) {
|
||||
const [, metaKey] = getBodyAndMetaKeys(key);
|
||||
|
||||
const metaInfo = await cacache.get.info(this.cacheDirectory, metaKey);
|
||||
|
||||
if (!metaInfo) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const metaBuffer = await cacache.get.byDigest(this.cacheDirectory, metaInfo.integrity);
|
||||
const metaData = JSON.parse(metaBuffer);
|
||||
const { bodyStreamIntegrity, empty, expiration } = metaData;
|
||||
|
||||
delete metaData.bodyStreamIntegrity;
|
||||
delete metaData.empty;
|
||||
delete metaData.expiration;
|
||||
|
||||
if (expiration && expiration < Date.now()) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const bodyStream = empty
|
||||
? Readable.from(Buffer.alloc(0))
|
||||
: cacache.get.stream.byDigest(this.cacheDirectory, bodyStreamIntegrity);
|
||||
|
||||
return {
|
||||
bodyStream,
|
||||
metaData,
|
||||
};
|
||||
}
|
||||
|
||||
remove(key) {
|
||||
const [bodyKey, metaKey] = getBodyAndMetaKeys(key);
|
||||
|
||||
return Promise.all([
|
||||
cacache.rm.entry(this.cacheDirectory, bodyKey),
|
||||
cacache.rm.entry(this.cacheDirectory, metaKey),
|
||||
]);
|
||||
}
|
||||
|
||||
async set(key, bodyStream, metaData) {
|
||||
const [bodyKey, metaKey] = getBodyAndMetaKeys(key);
|
||||
const metaCopy = { ...metaData };
|
||||
|
||||
if (typeof this.ttl === 'number') {
|
||||
metaCopy.expiration = Date.now() + this.ttl;
|
||||
}
|
||||
|
||||
try {
|
||||
metaCopy.bodyStreamIntegrity = await new Promise((fulfill, reject) => {
|
||||
bodyStream.pipe(cacache.put.stream(this.cacheDirectory, bodyKey))
|
||||
.on('integrity', (i) => fulfill(i))
|
||||
.on('error', (e) => {
|
||||
reject(e);
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENODATA') {
|
||||
throw err;
|
||||
}
|
||||
|
||||
metaCopy.empty = true;
|
||||
}
|
||||
|
||||
const metaBuffer = Buffer.from(JSON.stringify(metaCopy));
|
||||
await cacache.put(this.cacheDirectory, metaKey, metaBuffer);
|
||||
const cachedData = await this.get(key);
|
||||
|
||||
return cachedData;
|
||||
}
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
module.exports = class KeyTimeout {
|
||||
export class KeyTimeout {
|
||||
constructor() {
|
||||
this.timeoutHandleForKey = {};
|
||||
}
|
||||
@ -13,4 +13,4 @@ module.exports = class KeyTimeout {
|
||||
callback();
|
||||
}, durationMs);
|
||||
}
|
||||
};
|
||||
}
|
47
src/classes/caching/memory_cache.js
Normal file
47
src/classes/caching/memory_cache.js
Normal file
@ -0,0 +1,47 @@
|
||||
import { Readable } from 'stream';
|
||||
import { KeyTimeout } from './key_timeout.js';
|
||||
|
||||
function streamToBuffer(stream) {
|
||||
const chunks = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
|
||||
stream.on('error', (err) => reject(err));
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
});
|
||||
}
|
||||
|
||||
export class MemoryCache {
|
||||
constructor(options = {}) {
|
||||
this.ttl = options.ttl;
|
||||
this.keyTimeout = new KeyTimeout();
|
||||
this.cache = {};
|
||||
}
|
||||
|
||||
get(key) {
|
||||
const cachedValue = this.cache[key];
|
||||
if (cachedValue) {
|
||||
return {
|
||||
bodyStream: Readable.from(cachedValue.bodyBuffer),
|
||||
metaData: cachedValue.metaData,
|
||||
};
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
remove(key) {
|
||||
this.keyTimeout.clearTimeout(key);
|
||||
delete this.cache[key];
|
||||
}
|
||||
|
||||
async set(key, bodyStream, metaData) {
|
||||
const bodyBuffer = await streamToBuffer(bodyStream);
|
||||
this.cache[key] = { bodyBuffer, metaData };
|
||||
|
||||
if (typeof this.ttl === 'number') {
|
||||
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
|
||||
}
|
||||
|
||||
return this.get(key);
|
||||
}
|
||||
}
|
29
src/classes/response.js
Normal file
29
src/classes/response.js
Normal file
@ -0,0 +1,29 @@
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
const responseInternalSymbol = Object.getOwnPropertySymbols(new Response())[1];
|
||||
|
||||
export class NFCResponse extends Response {
|
||||
constructor(bodyStream, metaData, ejectFromCache, fromCache) {
|
||||
super(bodyStream, metaData);
|
||||
this.ejectFromCache = ejectFromCache;
|
||||
this.fromCache = fromCache;
|
||||
}
|
||||
|
||||
static serializeMetaFromNodeFetchResponse(res) {
|
||||
const metaData = {
|
||||
url: res.url,
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: res.headers.raw(),
|
||||
size: res.size,
|
||||
timeout: res.timeout,
|
||||
counter: res[responseInternalSymbol].counter,
|
||||
};
|
||||
|
||||
return metaData;
|
||||
}
|
||||
|
||||
ejectFromCache() {
|
||||
return this.ejectSelfFromCache();
|
||||
}
|
||||
}
|
180
src/index.js
Normal file
180
src/index.js
Normal file
@ -0,0 +1,180 @@
|
||||
import fetch, { Request } from 'node-fetch';
|
||||
import fs from 'fs';
|
||||
import crypto from 'crypto';
|
||||
import locko from 'locko';
|
||||
import { NFCResponse } from './classes/response.js';
|
||||
import { MemoryCache } from './classes/caching/memory_cache.js';
|
||||
|
||||
const CACHE_VERSION = 4;
|
||||
|
||||
function md5(str) {
|
||||
return crypto.createHash('md5').update(str).digest('hex');
|
||||
}
|
||||
|
||||
// Since the bounday in FormData is random,
|
||||
// we ignore it for purposes of calculating
|
||||
// the cache key.
|
||||
function getFormDataCacheKey(formData) {
|
||||
const cacheKey = { ...formData };
|
||||
const boundary = formData.getBoundary();
|
||||
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
delete cacheKey._boundary;
|
||||
|
||||
const boundaryReplaceRegex = new RegExp(boundary, 'g');
|
||||
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
cacheKey._streams = cacheKey._streams.map((s) => {
|
||||
if (typeof s === 'string') {
|
||||
return s.replace(boundaryReplaceRegex, '');
|
||||
}
|
||||
|
||||
return s;
|
||||
});
|
||||
|
||||
return cacheKey;
|
||||
}
|
||||
|
||||
function getHeadersCacheKeyJson(headersObj) {
|
||||
return Object.fromEntries(
|
||||
Object.entries(headersObj)
|
||||
.map(([key, value]) => [key.toLowerCase(), value])
|
||||
.filter(([key, value]) => key !== 'cache-control' || value !== 'only-if-cached'),
|
||||
);
|
||||
}
|
||||
|
||||
function getBodyCacheKeyJson(body) {
|
||||
if (!body) {
|
||||
return body;
|
||||
} if (typeof body === 'string') {
|
||||
return body;
|
||||
} if (body instanceof URLSearchParams) {
|
||||
return body.toString();
|
||||
} if (body instanceof fs.ReadStream) {
|
||||
return body.path;
|
||||
} if (body.toString && body.toString() === '[object FormData]') {
|
||||
return getFormDataCacheKey(body);
|
||||
} if (body instanceof Buffer) {
|
||||
return body.toString();
|
||||
}
|
||||
|
||||
throw new Error('Unsupported body type. Supported body types are: string, number, undefined, null, url.URLSearchParams, fs.ReadStream, FormData');
|
||||
}
|
||||
|
||||
function getRequestCacheKey(req) {
|
||||
const headersPojo = Object.fromEntries([...req.headers.entries()]);
|
||||
|
||||
return {
|
||||
cache: req.cache,
|
||||
credentials: req.credentials,
|
||||
destination: req.destination,
|
||||
headers: getHeadersCacheKeyJson(headersPojo),
|
||||
integrity: req.integrity,
|
||||
method: req.method,
|
||||
redirect: req.redirect,
|
||||
referrer: req.referrer,
|
||||
referrerPolicy: req.referrerPolicy,
|
||||
url: req.url,
|
||||
body: getBodyCacheKeyJson(req.body),
|
||||
};
|
||||
}
|
||||
|
||||
export function getCacheKey(resource, init = {}) {
|
||||
const resourceCacheKeyJson = resource instanceof Request
|
||||
? getRequestCacheKey(resource)
|
||||
: { url: resource };
|
||||
|
||||
const initCacheKeyJson = {
|
||||
...init,
|
||||
headers: getHeadersCacheKeyJson(init.headers || {}),
|
||||
};
|
||||
|
||||
resourceCacheKeyJson.body = getBodyCacheKeyJson(resourceCacheKeyJson.body);
|
||||
initCacheKeyJson.body = getBodyCacheKeyJson(initCacheKeyJson.body);
|
||||
|
||||
delete initCacheKeyJson.agent;
|
||||
|
||||
return md5(JSON.stringify([resourceCacheKeyJson, initCacheKeyJson, CACHE_VERSION]));
|
||||
}
|
||||
|
||||
function hasOnlyWithCacheOption(resource, init) {
|
||||
if (
|
||||
init
|
||||
&& init.headers
|
||||
&& Object.entries(init.headers)
|
||||
.some(([key, value]) => key.toLowerCase() === 'cache-control' && value === 'only-if-cached')
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (resource instanceof Request && resource.headers.get('Cache-Control') === 'only-if-cached') {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
async function getResponse(cache, requestArguments) {
|
||||
const cacheKey = getCacheKey(...requestArguments);
|
||||
let cachedValue = await cache.get(cacheKey);
|
||||
|
||||
const ejectSelfFromCache = () => cache.remove(cacheKey);
|
||||
|
||||
if (cachedValue) {
|
||||
return new NFCResponse(
|
||||
cachedValue.bodyStream,
|
||||
cachedValue.metaData,
|
||||
ejectSelfFromCache,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
if (hasOnlyWithCacheOption(...requestArguments)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
await locko.lock(cacheKey);
|
||||
try {
|
||||
cachedValue = await cache.get(cacheKey);
|
||||
if (cachedValue) {
|
||||
return new NFCResponse(
|
||||
cachedValue.bodyStream,
|
||||
cachedValue.metaData,
|
||||
ejectSelfFromCache,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
const fetchResponse = await fetch(...requestArguments);
|
||||
const serializedMeta = NFCResponse.serializeMetaFromNodeFetchResponse(fetchResponse);
|
||||
|
||||
const newlyCachedData = await cache.set(
|
||||
cacheKey,
|
||||
fetchResponse.body,
|
||||
serializedMeta,
|
||||
);
|
||||
|
||||
return new NFCResponse(
|
||||
newlyCachedData.bodyStream,
|
||||
newlyCachedData.metaData,
|
||||
ejectSelfFromCache,
|
||||
false,
|
||||
);
|
||||
} finally {
|
||||
locko.unlock(cacheKey);
|
||||
}
|
||||
}
|
||||
|
||||
function createFetchWithCache(cache) {
|
||||
const fetchCache = (...args) => getResponse(cache, args);
|
||||
fetchCache.withCache = createFetchWithCache;
|
||||
|
||||
return fetchCache;
|
||||
}
|
||||
|
||||
const defaultFetch = createFetchWithCache(new MemoryCache());
|
||||
|
||||
export default defaultFetch;
|
||||
export const fetchBuilder = defaultFetch;
|
||||
export { MemoryCache } from './classes/caching/memory_cache.js';
|
||||
export { FileSystemCache } from './classes/caching/file_system_cache.js';
|
18
test/tests.cjs
Normal file
18
test/tests.cjs
Normal file
@ -0,0 +1,18 @@
|
||||
const assert = require('assert');
|
||||
const fetch = require('../commonjs/wrapper.cjs');
|
||||
|
||||
const TWO_HUNDRED_URL = 'https://httpbin.org/status/200';
|
||||
|
||||
describe('Commonjs module tests', function() {
|
||||
it('Can make a request', async function() {
|
||||
const res = await fetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.status, 200);
|
||||
});
|
||||
|
||||
it('Has expected properties', function() {
|
||||
assert(typeof fetch === 'function');
|
||||
assert(fetch.MemoryCache);
|
||||
assert(fetch.FileSystemCache);
|
||||
assert(fetch.fetchBuilder);
|
||||
});
|
||||
});
|
415
test/tests.js
415
test/tests.js
@ -1,11 +1,16 @@
|
||||
const fs = require('fs');
|
||||
const FormData = require('form-data');
|
||||
const assert = require('assert');
|
||||
const rimraf = require('rimraf');
|
||||
const path = require('path');
|
||||
const FetchCache = require('../index.js');
|
||||
const { URLSearchParams } = require('url');
|
||||
const MemoryCache = require('../classes/caching/memory_cache.js');
|
||||
import { dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import fs from 'fs';
|
||||
import FormData from 'form-data';
|
||||
import assert from 'assert';
|
||||
import rimraf from 'rimraf';
|
||||
import path from 'path';
|
||||
import { URLSearchParams } from 'url';
|
||||
import standardFetch from 'node-fetch';
|
||||
import FetchCache, { MemoryCache, FileSystemCache, getCacheKey } from '../src/index.js';
|
||||
import { Agent } from 'http';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const CACHE_PATH = path.join(__dirname, '..', '.cache');
|
||||
const expectedPngBuffer = fs.readFileSync(path.join(__dirname, 'expected_png.png'));
|
||||
@ -19,98 +24,202 @@ const PNG_BODY_URL = 'https://httpbin.org/image/png';
|
||||
|
||||
const TEXT_BODY_EXPECTED = 'User-agent: *\nDisallow: /deny\n';
|
||||
|
||||
let fetch;
|
||||
let res;
|
||||
let cachedFetch;
|
||||
let body;
|
||||
|
||||
function post(body) {
|
||||
return { method: 'POST', body };
|
||||
}
|
||||
|
||||
function removeDates(arrOrObj) {
|
||||
if (arrOrObj.date) {
|
||||
const copy = { ...arrOrObj };
|
||||
delete copy.date;
|
||||
return copy;
|
||||
}
|
||||
|
||||
if (Array.isArray(arrOrObj)) {
|
||||
if (Array.isArray(arrOrObj[0])) {
|
||||
return arrOrObj.filter(e => e[0] !== 'date');
|
||||
}
|
||||
|
||||
return arrOrObj.filter(e => !Date.parse(e));
|
||||
}
|
||||
|
||||
return arrOrObj;
|
||||
}
|
||||
|
||||
function wait(ms) {
|
||||
return new Promise((fulfill) => setTimeout(fulfill, ms));
|
||||
}
|
||||
|
||||
async function dualFetch(...args) {
|
||||
const [cachedFetchResponse, standardFetchResponse] = await Promise.all([
|
||||
cachedFetch(...args),
|
||||
standardFetch(...args),
|
||||
]);
|
||||
|
||||
return { cachedFetchResponse, standardFetchResponse };
|
||||
}
|
||||
|
||||
beforeEach(async function() {
|
||||
rimraf.sync(CACHE_PATH);
|
||||
fetch = FetchCache.withCache(new MemoryCache());
|
||||
cachedFetch = FetchCache.withCache(new MemoryCache());
|
||||
});
|
||||
|
||||
let res;
|
||||
|
||||
describe('Basic property tests', function() {
|
||||
it('Has a status property', async function() {
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.status, 200);
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.status, 200);
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||
});
|
||||
|
||||
it('Has a statusText property', async function() {
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.statusText, 'OK');
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.statusText, standardFetchResponse.statusText);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.statusText, 'OK');
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.statusText, standardFetchResponse.statusText);
|
||||
});
|
||||
|
||||
it('Has a url property', async function() {
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.url, TWO_HUNDRED_URL);
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.url, standardFetchResponse.url);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.url, TWO_HUNDRED_URL);
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.url, standardFetchResponse.url);
|
||||
});
|
||||
|
||||
it('Has an ok property', async function() {
|
||||
res = await fetch(FOUR_HUNDRED_URL);
|
||||
assert.strictEqual(res.ok, false);
|
||||
assert.strictEqual(res.status, 400);
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(FOUR_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.ok, standardFetchResponse.ok);
|
||||
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||
|
||||
res = await fetch(FOUR_HUNDRED_URL);
|
||||
assert.strictEqual(res.ok, false);
|
||||
assert.strictEqual(res.status, 400);
|
||||
});
|
||||
|
||||
it('Has a headers property', async function() {
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
assert.notStrictEqual(res.headers, undefined);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
assert.notStrictEqual(res.headers, undefined);
|
||||
cachedFetchResponse = await cachedFetch(FOUR_HUNDRED_URL);
|
||||
assert.strictEqual(cachedFetchResponse.ok, standardFetchResponse.ok);
|
||||
assert.strictEqual(cachedFetchResponse.status, standardFetchResponse.status);
|
||||
});
|
||||
|
||||
it('Has a redirected property', async function() {
|
||||
res = await fetch(THREE_HUNDRED_TWO_URL);
|
||||
assert.strictEqual(res.redirected, true);
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(THREE_HUNDRED_TWO_URL);
|
||||
assert.strictEqual(cachedFetchResponse.redirected, standardFetchResponse.redirected);
|
||||
|
||||
res = await fetch(THREE_HUNDRED_TWO_URL);
|
||||
assert.strictEqual(res.redirected, true);
|
||||
cachedFetchResponse = await cachedFetch(THREE_HUNDRED_TWO_URL);
|
||||
assert.strictEqual(cachedFetchResponse.redirected, standardFetchResponse.redirected);
|
||||
});
|
||||
}).timeout(10000);
|
||||
|
||||
describe('Header tests', function() {
|
||||
it('Gets correct raw headers', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates(cachedFetchResponse.headers.raw()),
|
||||
removeDates(standardFetchResponse.headers.raw()),
|
||||
);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates(cachedFetchResponse.headers.raw()),
|
||||
removeDates(standardFetchResponse.headers.raw()),
|
||||
);
|
||||
});
|
||||
|
||||
it('Gets correct header keys', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual([...cachedFetchResponse.headers.keys()], [...standardFetchResponse.headers.keys()]);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual([...cachedFetchResponse.headers.keys()], [...standardFetchResponse.headers.keys()]);
|
||||
});
|
||||
|
||||
it('Gets correct header values', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates([...cachedFetchResponse.headers.values()]),
|
||||
removeDates([...standardFetchResponse.headers.values()]),
|
||||
);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates([...cachedFetchResponse.headers.values()]),
|
||||
removeDates([...standardFetchResponse.headers.values()]),
|
||||
);
|
||||
});
|
||||
|
||||
it('Gets correct header entries', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates([...cachedFetchResponse.headers.entries()]),
|
||||
removeDates([...standardFetchResponse.headers.entries()]),
|
||||
);
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(
|
||||
removeDates([...cachedFetchResponse.headers.entries()]),
|
||||
removeDates([...standardFetchResponse.headers.entries()]),
|
||||
);
|
||||
});
|
||||
|
||||
it('Can get a header by value', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert(standardFetchResponse.headers.get('content-length'));
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.get('content-length'), standardFetchResponse.headers.get('content-length'));
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.get('content-length'), standardFetchResponse.headers.get('content-length'));
|
||||
});
|
||||
|
||||
it('Returns undefined for non-existent header', async function() {
|
||||
const headerName = 'zzzz';
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert(!standardFetchResponse.headers.get(headerName));
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.get(headerName), standardFetchResponse.headers.get(headerName));
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.get(headerName), standardFetchResponse.headers.get(headerName));
|
||||
});
|
||||
|
||||
it('Can get whether a header is present', async function() {
|
||||
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
|
||||
assert(standardFetchResponse.headers.has('content-length'));
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.has('content-length'), standardFetchResponse.headers.has('content-length'));
|
||||
|
||||
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.deepStrictEqual(cachedFetchResponse.headers.has('content-length'), standardFetchResponse.headers.has('content-length'));
|
||||
});
|
||||
}).timeout(10000);
|
||||
|
||||
describe('Cache tests', function() {
|
||||
it('Uses cache', async function() {
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Can eject from cache', async function() {
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
|
||||
await res.ejectFromCache();
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Does not error if rejecting from cache twice', async function() {
|
||||
res = await fetch(TWO_HUNDRED_URL);
|
||||
it('Does not error if ejecting from cache twice', async function() {
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
await res.ejectFromCache();
|
||||
@ -118,55 +227,55 @@ describe('Cache tests', function() {
|
||||
});
|
||||
|
||||
it('Gives different string bodies different cache keys', async function() {
|
||||
res = await fetch(TWO_HUNDRED_URL, post('a'));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post('b'));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post('b'));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
|
||||
it('Gives same string bodies same cache keys', async function() {
|
||||
res = await fetch(TWO_HUNDRED_URL, post('a'));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post('a'));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post('a'));
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Gives different URLSearchParams different cache keys', async function() {
|
||||
res = await fetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=b')));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=b')));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
|
||||
it('Gives same URLSearchParams same cache keys', async function() {
|
||||
res = await fetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(new URLSearchParams('a=a')));
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Gives different read streams different cache keys', async function() {
|
||||
const s1 = fs.createReadStream(__filename);
|
||||
const s2 = fs.createReadStream(path.join(__dirname, '..', 'index.js'));
|
||||
const s1 = fs.createReadStream(path.join(__dirname, 'expected_png.png'));
|
||||
const s2 = fs.createReadStream(path.join(__dirname, '..', 'src', 'index.js'));
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post(s1));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post(s2));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(s2));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
|
||||
it('Gives the same read streams the same cache key', async function() {
|
||||
const s1 = fs.createReadStream(__filename);
|
||||
const s1 = fs.createReadStream(path.join(__dirname, 'expected_png.png'));
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post(s1));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post(s1));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
@ -177,10 +286,10 @@ describe('Cache tests', function() {
|
||||
const data2 = new FormData();
|
||||
data2.append('b', 'b');
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post(data1));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(data1));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post(data2));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(data2));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
|
||||
@ -191,65 +300,93 @@ describe('Cache tests', function() {
|
||||
const data2 = new FormData();
|
||||
data2.append('a', 'a');
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post(data1));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(data1));
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TWO_HUNDRED_URL, post(data2));
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, post(data2));
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Does not error with custom agent with circular properties', async function() {
|
||||
const agent = new Agent();
|
||||
agent.agent = agent;
|
||||
|
||||
await cachedFetch('http://httpbin.org/status/200', { agent });
|
||||
})
|
||||
}).timeout(10000);
|
||||
|
||||
describe('Data tests', function() {
|
||||
it('Supports request objects', async function() {
|
||||
let request = new standardFetch.Request('https://google.com', { body: 'test', method: 'POST' });
|
||||
res = await cachedFetch(request);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
request = new standardFetch.Request('https://google.com', { body: 'test', method: 'POST' });
|
||||
res = await cachedFetch(request);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Supports request objects with custom headers', async function() {
|
||||
const request1 = new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'XXX': 'YYY' } });
|
||||
const request2 = new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'XXX': 'ZZZ' } });
|
||||
|
||||
res = await cachedFetch(request1);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(request2);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
|
||||
it('Refuses to consume body twice', async function() {
|
||||
res = await fetch(TEXT_BODY_URL);
|
||||
res = await cachedFetch(TEXT_BODY_URL);
|
||||
await res.text();
|
||||
|
||||
try {
|
||||
await res.text();
|
||||
throw new Error('The above line should have thrown.');
|
||||
} catch (err) {
|
||||
// It threw
|
||||
assert(err.message.includes('body used already for:'));
|
||||
}
|
||||
});
|
||||
|
||||
it('Can get text body', async function() {
|
||||
res = await fetch(TEXT_BODY_URL);
|
||||
res = await cachedFetch(TEXT_BODY_URL);
|
||||
body = await res.text();
|
||||
assert.strictEqual(body, TEXT_BODY_EXPECTED);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TEXT_BODY_URL);
|
||||
res = await cachedFetch(TEXT_BODY_URL);
|
||||
body = await res.text();
|
||||
assert.strictEqual(body, TEXT_BODY_EXPECTED);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Can get JSON body', async function() {
|
||||
res = await fetch(JSON_BODY_URL);
|
||||
res = await cachedFetch(JSON_BODY_URL);
|
||||
body = await res.json();
|
||||
assert(body.slideshow);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(JSON_BODY_URL);
|
||||
res = await cachedFetch(JSON_BODY_URL);
|
||||
body = await res.json();
|
||||
assert(body.slideshow);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Can get PNG buffer body', async function() {
|
||||
res = await fetch(PNG_BODY_URL);
|
||||
res = await cachedFetch(PNG_BODY_URL);
|
||||
body = await res.buffer();
|
||||
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(PNG_BODY_URL);
|
||||
res = await cachedFetch(PNG_BODY_URL);
|
||||
body = await res.buffer();
|
||||
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Can stream a body', async function() {
|
||||
res = await fetch(TEXT_BODY_URL);
|
||||
res = await cachedFetch(TEXT_BODY_URL);
|
||||
body = '';
|
||||
|
||||
for await (const chunk of res.body) {
|
||||
@ -259,7 +396,7 @@ describe('Data tests', function() {
|
||||
assert.strictEqual(TEXT_BODY_EXPECTED, body);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await fetch(TEXT_BODY_URL);
|
||||
res = await cachedFetch(TEXT_BODY_URL);
|
||||
body = '';
|
||||
|
||||
for await (const chunk of res.body) {
|
||||
@ -269,4 +406,126 @@ describe('Data tests', function() {
|
||||
assert.strictEqual(TEXT_BODY_EXPECTED, body);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Errors if the body type is not supported', async function() {
|
||||
try {
|
||||
await cachedFetch(TEXT_BODY_URL, { body: {} });
|
||||
throw new Error('It was supposed to throw');
|
||||
} catch (err) {
|
||||
assert(err.message.includes('Unsupported body type'));
|
||||
}
|
||||
});
|
||||
|
||||
it('Uses cache even if you make multiple requests at the same time', async function() {
|
||||
const [res1, res2] = await Promise.all([
|
||||
cachedFetch('http://httpbin.org/status/200'),
|
||||
cachedFetch('http://httpbin.org/status/200'),
|
||||
]);
|
||||
|
||||
// One should be false, the other should be true
|
||||
assert(res1.fromCache !== res2.fromCache);
|
||||
});
|
||||
}).timeout(10000);
|
||||
|
||||
describe('Memory cache tests', function() {
|
||||
it('Supports TTL', async function() {
|
||||
cachedFetch = FetchCache.withCache(new MemoryCache({ ttl: 100 }));
|
||||
let res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
|
||||
await wait(200);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
}).timeout(10000);
|
||||
|
||||
describe('File system cache tests', function() {
|
||||
it('Supports TTL', async function() {
|
||||
cachedFetch = FetchCache.withCache(new FileSystemCache({ ttl: 100 }));
|
||||
let res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
|
||||
await wait(200);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
});
|
||||
|
||||
it('Can get PNG buffer body', async function() {
|
||||
cachedFetch = FetchCache.withCache(new FileSystemCache());
|
||||
res = await cachedFetch(PNG_BODY_URL);
|
||||
body = await res.buffer();
|
||||
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(PNG_BODY_URL);
|
||||
body = await res.buffer();
|
||||
assert.strictEqual(expectedPngBuffer.equals(body), true);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
|
||||
it('Can eject from cache', async function() {
|
||||
cachedFetch = FetchCache.withCache(new FileSystemCache());
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
|
||||
await res.ejectFromCache();
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, false);
|
||||
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert.strictEqual(res.fromCache, true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Cache mode tests', function() {
|
||||
it('Can use the only-if-cached cache control setting via init', async function() {
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
|
||||
assert(!res);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
|
||||
assert(!res);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL);
|
||||
assert(res && !res.fromCache);
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
|
||||
assert(res && res.fromCache);
|
||||
await res.ejectFromCache();
|
||||
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
|
||||
assert(!res);
|
||||
});
|
||||
|
||||
it('Can use the only-if-cached cache control setting via resource', async function() {
|
||||
res = await cachedFetch(new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } }));
|
||||
assert(!res);
|
||||
res = await cachedFetch(new standardFetch.Request(TWO_HUNDRED_URL));
|
||||
assert(res && !res.fromCache);
|
||||
res = await cachedFetch(new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } }));
|
||||
assert(res && res.fromCache);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Cache key tests', function() {
|
||||
it('Can calculate a cache key and check that it exists', async function() {
|
||||
const cache = new MemoryCache();
|
||||
cachedFetch = FetchCache.withCache(cache);
|
||||
await cachedFetch(TWO_HUNDRED_URL);
|
||||
|
||||
const cacheKey = getCacheKey(TWO_HUNDRED_URL);
|
||||
const nonExistentCacheKey = getCacheKey(TEXT_BODY_URL);
|
||||
|
||||
const cacheKeyResult = await cache.get(cacheKey);
|
||||
const nonExistentCacheKeyResult = await cache.get(nonExistentCacheKey);
|
||||
|
||||
assert(cacheKeyResult);
|
||||
assert(!nonExistentCacheKeyResult);
|
||||
});
|
||||
});
|
||||
|
Reference in New Issue
Block a user