54 Commits

Author SHA1 Message Date
Randall
ea30b6b38e Stop testing on Node v16, start testing on Node v21 2023-10-17 18:58:41 -04:00
dependabot[bot]
571b65cdd2 Bump @babel/traverse from 7.14.5 to 7.23.2
Bumps [@babel/traverse](https://github.com/babel/babel/tree/HEAD/packages/babel-traverse) from 7.14.5 to 7.23.2.
- [Release notes](https://github.com/babel/babel/releases)
- [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md)
- [Commits](https://github.com/babel/babel/commits/v7.23.2/packages/babel-traverse)

---
updated-dependencies:
- dependency-name: "@babel/traverse"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-10-17 18:57:45 -04:00
dependabot[bot]
b0348460a7 Bump word-wrap from 1.2.3 to 1.2.4
Bumps [word-wrap](https://github.com/jonschlinkert/word-wrap) from 1.2.3 to 1.2.4.
- [Release notes](https://github.com/jonschlinkert/word-wrap/releases)
- [Commits](https://github.com/jonschlinkert/word-wrap/compare/1.2.3...1.2.4)

---
updated-dependencies:
- dependency-name: word-wrap
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-07-18 19:01:50 -04:00
mistval
44a0ffb8f3 Update node-fetch to 2.6.11 2023-05-20 12:57:02 -04:00
Randall
41d52f39c8 Stop testing on Node 12 and 19, start testing on 16 and 20. 2023-05-12 23:22:59 -04:00
dependabot[bot]
3b92b39430 Bump yaml and husky
Removes [yaml](https://github.com/eemeli/yaml). It's no longer used after updating ancestor dependency [husky](https://github.com/typicode/husky). These dependencies need to be updated together.


Removes `yaml`

Updates `husky` from 4.3.8 to 8.0.3
- [Release notes](https://github.com/typicode/husky/releases)
- [Commits](https://github.com/typicode/husky/compare/v4.3.8...v8.0.3)

---
updated-dependencies:
- dependency-name: yaml
  dependency-type: indirect
- dependency-name: husky
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-04-24 19:42:26 -04:00
mistval
b234de4b69 Update dependency version to get fix for memory leak 2023-04-07 19:19:39 -04:00
mistval
44786dfb68 readme formatting 2023-03-13 21:00:02 -04:00
mistval
5f48f0fc8a add support for only-if-cached and expose cache key calculation function 2023-03-13 20:59:09 -04:00
mistval
4f93c9ba1c test on different node versions 2023-03-13 20:11:09 -04:00
mistval
faf73e49d8 test on different node versions 2023-03-13 20:09:41 -04:00
mistval
0173de3f6d update CI pipeline to Node 18 2023-01-07 10:27:03 -05:00
dependabot[bot]
decb904331 Bump minimatch from 3.0.4 to 3.1.2
Bumps [minimatch](https://github.com/isaacs/minimatch) from 3.0.4 to 3.1.2.
- [Release notes](https://github.com/isaacs/minimatch/releases)
- [Changelog](https://github.com/isaacs/minimatch/blob/main/changelog.md)
- [Commits](https://github.com/isaacs/minimatch/compare/v3.0.4...v3.1.2)

---
updated-dependencies:
- dependency-name: minimatch
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-01-07 10:23:57 -05:00
dependabot[bot]
7f502a98a2 Bump json5 from 1.0.1 to 1.0.2
Bumps [json5](https://github.com/json5/json5) from 1.0.1 to 1.0.2.
- [Release notes](https://github.com/json5/json5/releases)
- [Changelog](https://github.com/json5/json5/blob/main/CHANGELOG.md)
- [Commits](https://github.com/json5/json5/compare/v1.0.1...v1.0.2)

---
updated-dependencies:
- dependency-name: json5
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-01-07 10:22:41 -05:00
dependabot[bot]
18b276c5dd Bump semver-regex from 3.1.3 to 3.1.4
Bumps [semver-regex](https://github.com/sindresorhus/semver-regex) from 3.1.3 to 3.1.4.
- [Release notes](https://github.com/sindresorhus/semver-regex/releases)
- [Commits](https://github.com/sindresorhus/semver-regex/commits/v3.1.4)

---
updated-dependencies:
- dependency-name: semver-regex
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-07-03 23:40:31 -04:00
dependabot[bot]
cd6f4f01e3 Bump minimist from 1.2.5 to 1.2.6
Bumps [minimist](https://github.com/substack/minimist) from 1.2.5 to 1.2.6.
- [Release notes](https://github.com/substack/minimist/releases)
- [Commits](https://github.com/substack/minimist/compare/1.2.5...1.2.6)

---
updated-dependencies:
- dependency-name: minimist
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-03-26 09:12:28 -04:00
mistval
fee0cdd085 remove url import 2022-02-21 10:37:23 -05:00
mistval
2bc4cd6de0 update version 2022-02-19 08:55:21 -05:00
mistval
51e5754c3e update eslint 2022-02-19 08:54:14 -05:00
mistval
29de8ee068 update dependencies 2022-02-19 08:51:56 -05:00
dependabot[bot]
f1128f2f64 Bump node-fetch from 2.6.1 to 2.6.7 (#19)
Bumps [node-fetch](https://github.com/node-fetch/node-fetch) from 2.6.1 to 2.6.7.
- [Release notes](https://github.com/node-fetch/node-fetch/releases)
- [Commits](https://github.com/node-fetch/node-fetch/compare/v2.6.1...v2.6.7)

---
updated-dependencies:
- dependency-name: node-fetch
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-02-19 08:49:08 -05:00
mistval
0397d759dd update mocha 2022-02-19 08:48:33 -05:00
mistval
a68dd5b0d0 npm audit fix 2022-02-19 08:46:34 -05:00
dependabot[bot]
ba8ff5d257 Bump ajv from 6.12.0 to 6.12.6 (#18)
Bumps [ajv](https://github.com/ajv-validator/ajv) from 6.12.0 to 6.12.6.
- [Release notes](https://github.com/ajv-validator/ajv/releases)
- [Commits](https://github.com/ajv-validator/ajv/compare/v6.12.0...v6.12.6)

---
updated-dependencies:
- dependency-name: ajv
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-02-19 08:44:53 -05:00
dependabot[bot]
9c40ddf786 Bump tar from 6.1.5 to 6.1.11 (#14)
Bumps [tar](https://github.com/npm/node-tar) from 6.1.5 to 6.1.11.
- [Release notes](https://github.com/npm/node-tar/releases)
- [Changelog](https://github.com/npm/node-tar/blob/main/CHANGELOG.md)
- [Commits](https://github.com/npm/node-tar/compare/v6.1.5...v6.1.11)

---
updated-dependencies:
- dependency-name: tar
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-08-31 20:34:16 -04:00
Randall Schmidt
9cea80be47 fix typo in readme 2021-08-19 15:09:44 -04:00
dependabot[bot]
a386a54fbb Bump path-parse from 1.0.6 to 1.0.7 (#13)
Bumps [path-parse](https://github.com/jbgutierrez/path-parse) from 1.0.6 to 1.0.7.
- [Release notes](https://github.com/jbgutierrez/path-parse/releases)
- [Commits](https://github.com/jbgutierrez/path-parse/commits/v1.0.7)

---
updated-dependencies:
- dependency-name: path-parse
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-08-12 08:55:50 -04:00
Randall Schmidt
b7190b933a bump version 2021-08-07 17:32:46 -04:00
Randall Schmidt
55b92ee40f have FileSystemCache TTLs survive the process exiting 2021-08-07 17:32:13 -04:00
dependabot[bot]
6607a71af6 Bump tar from 6.1.0 to 6.1.5 (#12)
Bumps [tar](https://github.com/npm/node-tar) from 6.1.0 to 6.1.5.
- [Release notes](https://github.com/npm/node-tar/releases)
- [Changelog](https://github.com/npm/node-tar/blob/main/CHANGELOG.md)
- [Commits](https://github.com/npm/node-tar/compare/v6.1.0...v6.1.5)

---
updated-dependencies:
- dependency-name: tar
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-08-04 09:03:10 -04:00
Randall Schmidt
0f5f1edb22 switch to ESM examples 2021-07-11 09:11:26 -04:00
Randall Schmidt
d6f73c4778 update description 2021-07-11 09:08:46 -04:00
Randall Schmidt
0c8c0d4520 update readme 2021-07-11 08:58:18 -04:00
Randall Schmidt
2f14847a01 update readme 2021-07-11 08:57:39 -04:00
Randall Schmidt
e3ecf3b981 update readme 2021-07-11 08:52:32 -04:00
Randall Schmidt
05a146fef1 update readme 2021-07-11 08:49:52 -04:00
Randall Schmidt
dba1e2fbd3 bump version 2021-07-11 08:48:48 -04:00
Randall Schmidt
ad572f3f29 update readme 2021-07-11 08:47:20 -04:00
Randall Schmidt
6023b56164 fix weird stream behavior in some cases 2021-07-09 15:21:11 -04:00
Randall Schmidt
675ed9bbf2 compile to commonjs 2021-07-09 14:36:43 -04:00
Randall Schmidt
998d18ae84 move source into src dir 2021-07-09 14:12:55 -04:00
Randall Schmidt
d5ab4d4b18 update README 2021-07-09 13:52:50 -04:00
Randall Schmidt
847b330489 support Request object 2021-07-09 13:41:18 -04:00
Randall Schmidt
dae57d2604 make cache get used even if you make a bunch of identical requests immediately 2021-07-05 22:00:12 -04:00
Randall Schmidt
6cd42272c4 add filesystem caching with cacache 2021-07-05 21:40:53 -04:00
Randall Schmidt
e8ad8da0bb overhaul to use node-fetch internals 2021-07-05 18:14:42 -04:00
Randall Schmidt
45ca35f057 convert to ESM 2021-07-05 14:17:21 -04:00
Randall Schmidt
af1e977620 update README 2021-07-05 11:07:08 -04:00
Randall Schmidt
ef5939f571 update README 2021-07-05 11:04:15 -04:00
Randall Schmidt
e356e485b6 bump version 2021-07-03 10:25:00 -04:00
Randall Schmidt
a11a135234 update readme 2021-07-03 10:24:26 -04:00
Randall Schmidt
524308aa61 fix error with custom agent with circular reference 2021-07-03 10:20:32 -04:00
Randall Schmidt
bffaa2aadc fix GitHub actions dir name 2021-06-13 09:52:51 -04:00
Randall
b8da556091 Version 2.0 2021-06-12 19:26:05 -04:00
20 changed files with 6107 additions and 2066 deletions

View File

@ -1,4 +1,4 @@
module.exports = {
{
"env": {
"commonjs": true,
"es6": true,
@ -15,5 +15,7 @@ module.exports = {
"ecmaVersion": 2018
},
"rules": {
"import/extensions": "off",
"import/prefer-default-export": "off"
}
}
};

View File

@ -6,12 +6,16 @@ jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [18.x, 20.x, 21.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js 14.x
- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: 14.x
node-version: ${{ matrix.node-version }}
- run: npm ci
- run: npm run lint
- run: npm run coverage

2
.gitignore vendored
View File

@ -105,3 +105,5 @@ dist
# Other
.cache
commonjs

View File

@ -1,4 +1,4 @@
.eslintrc.js
.eslintrc.json
test
.cache
.nyc_output

173
README.md
View File

@ -11,7 +11,7 @@ By default responses are cached in memory, but you can also cache to files on di
Require it and use it the same way you would use node-fetch:
```js
const fetch = require('node-fetch-cache');
import fetch from 'node-fetch-cache';
fetch('http://google.com')
.then(response => response.text())
@ -22,86 +22,29 @@ The next time you `fetch('http://google.com')`, the response will be returned fr
## API
This module aims to expose the same API as `node-fetch` does for the most common use cases, but may not support some of the less common functions, properties, and use cases.
This module's fetch function has almost the exact same API as node-fetch, and you should consult [the node-fetch documentation](https://www.npmjs.com/package/node-fetch) for how to use it.
### const fetch = require('node-fetch-cache');
This module just adds one extra function to the response object:
Load the module.
### res.ejectFromCache(): Promise\<void\>
### await fetch(resource [, init])
This function can be used to eject the response from the cache, so that the next request will perform a true HTTP request rather than returning a cached response.
Same arguments as [node-fetch](https://www.npmjs.com/package/node-fetch).
This module caches ALL responses, even those with 4xx and 5xx response statuses. You can use this function to uncache such responses if desired. For example:
Returns a **CachedResponse**.
```js
import fetch from 'node-fetch-cache';
### await CachedResponse.ejectFromCache()
Eject the response from the cache, so that the next request will perform a true HTTP request rather than returning a cached response.
Keep in mind that this module caches **all** responses, even if they return errors. You might want to use this function in certain cases like receiving a 5xx response status, so that you can retry requests.
### await CachedResponse.text()
Returns the body as a string, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### await CachedResponse.json()
Returns the body as a JavaScript object, parsed from JSON, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### await CachedResponse.buffer()
Returns the body as a Buffer, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### CachedResponse.status
Returns the HTTP status code of the response, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### CachedResponse.statusText
Returns a text represention of the response status, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### CachedResponse.ok
Returns true if the request returned a successful response status, false otherwise, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### CachedResponse.redirected
Returns true if the request was redirected, false otherwise, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### CachedResponse.headers
Returns a **ResponseHeaders** object representing the headers of the response, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### ResponseHeaders.entries()
Returns the raw headers as an array of `[key, value]` pairs, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### ResponseHeaders.keys()
Returns an array of all header keys, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### ResponseHeaders.values()
Returns an array of all header values, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### ResponseHeaders.get(key)
Returns the value of the header with the given key, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### ResponseHeaders.has(key)
Returns true if the headers has a value for the given key, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
### ResponseHeaders.raw
Returns the headers as an object of `{ "key": "value" }` pairs, same as [node-fetch](https://www.npmjs.com/package/node-fetch).
## Streaming
This module supports streams like [node-fetch](https://www.npmjs.com/package/node-fetch) does, but with a couple of caveats you should be aware of if you want to use streams.
1. Response bodies are always read into memory even if you stream them to disk. That means if you need to stream large responses that don't fit into RAM, this module may be unsuitable.
2. When streaming a request body with fs.ReadStream, the cache key is generated based only on the path of the stream, not its content. That means if you stream `/my/desktop/image.png` twice, you will get a cached response the second time, **even if the content of image.png has changed**. This module may be unsuitable if you need to stream files in requests and the content of those files can change.
fetch('http://google.com')
.then(async response => {
if (!response.ok) {
await response.ejectFromCache();
throw new Error('Non-okay response from google.com');
} else {
return response.text();
}
}).then(text => console.log(text));
```
## Cache Customization
@ -114,7 +57,7 @@ This is the default cache delegate. It caches responses in-process in a POJO.
Usage:
```js
const { fetchBuilder, MemoryCache } = require('node-fetch-cache');
import { fetchBuilder, MemoryCache } from 'node-fetch-cache';
const fetch = fetchBuilder.withCache(new MemoryCache(options));
```
@ -135,10 +78,12 @@ Cache to a directory on disk. This allows the cache to survive the process exiti
Usage:
```js
const { fetchBuilder, FileSystemCache } = require('node-fetch-cache');
import { fetchBuilder, FileSystemCache } from 'node-fetch-cache';
const fetch = fetchBuilder.withCache(new FileSystemCache(options));
```
Options:
```js
{
cacheDirectory: '/my/cache/directory/path', // Specify where to keep the cache. If undefined, '.cache' is used by default. If this directory does not exist, it will be created.
@ -148,41 +93,69 @@ const fetch = fetchBuilder.withCache(new FileSystemCache(options));
### Provide Your Own
You can implement a caching delegate yourself. The cache simply needs to be an object that has `set(key, value)`, `get(key)`, and `remove(key)` functions.
You can implement a caching delegate yourself. The cache simply needs to be an object that has `set(key, bodyStream, bodyMeta)`, `get(key)`, and `remove(key)` functions.
The set function must accept a key (which will be a string) and a value (which will be a JSON-serializable JS object) and store them.
Check the built-in [MemoryCache](https://github.com/mistval/node-fetch-cache/blob/master/src/classes/caching/memory_cache.js) and [FileSystemCache](https://github.com/mistval/node-fetch-cache/blob/master/src/classes/caching/file_system_cache.js) for examples.
The get function should accept a key and return whatever value was set for that key (or `undefined`/`null` if there is no value for that key).
The set function must accept a key (which will be a string), a body stream, and a metadata object (which will be a JSON-serializable JS object). It must store these, and then return an object with a `bodyStream` property, containing a fresh, unread stream of the body content, as well as a `metaData` property, containing the same metaData that was passed in.
The remove function should accept a key and remove the cached value associated with that key, if any.
The get function should accept a key and return undefined if no cached value is found, or else an object with a `bodyStream` property, containing a stream of the body content, as well as a `metaData` property, containing the metadata that was stored via the `set(key, bodyStream, bodyMeta)` function.
Both functions can be async.
The remove function should accept a key and remove the cached value associated with that key, if any. It is also safe for your caching delegate to remove values from the cache arbitrarily if desired (for example if you want to implement a TTL in the caching delegate).
It is safe to remove values from the cache arbitrarily (for example if you implement a TTL in the caching delegate).
All three functions may be async.
Example: you could make and use your own simple memory cache like this:
## Misc Tips
### Streaming
This module does not support Stream request bodies, except for fs.ReadStream. And when using fs.ReadStream, the cache key is generated based only on the path of the stream, not its content. That means if you stream `/my/desktop/image.png` twice, you will get a cached response the second time, **even if the content of image.png has changed**.
Streams don't quite play nice with the concept of caching based on request characteristics, because we would have to read the stream to the end to find out what's in it and hash it into a proper cache key.
### Request Concurrency
Requests with the same cache key are queued. For example, you might wonder if making the same request 100 times simultaneously would result in 100 HTTP requests:
```js
class MyMemoryCache {
set(key, value) {
this[key] = value;
}
import fetch from 'node-fetch-cache';
get(key) {
return this[key];
}
await Promise.all(
Array(100).fill().map(() => fetch('https://google.com')),
);
```
remove(key) {
delete this[key];
}
}
The answer is no. Only one request would be made, and 99 of the `fetch()`s will read the response from the cache.
const fetchBuilder = require('node-fetch-cache');
fetch = fetchBuilder.withCache(new MyMemoryCache());
### Cache-Control: only-if-cached Requests
fetch('http://google.com')
.then(response => response.text())
.then(text => console.log(text));
The HTTP standard describes a [Cache-Control request header](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control#request_directives) to control certain aspects of cache behavior. Node-fetch ignores these, but node-fetch-cache respects the `Cache-Control: only-if-cached` directive. When `only-if-cached` is specified, node-fetch-cache will return `undefined` if there is no cached response. No HTTP request will be made. For example:
```js
import fetch from 'node-fetch-cache';
const response = await fetch('https://google.com', {
headers: { 'Cache-Control': 'only-if-cached' }
});
if (response === undefined) {
// No response was found in the cache
}
```
Note that this is slightly different from browser fetch, which returns a `504 Gateway Timeout` response if no cached response is available.
### Calculating the Cache Key
This module exports a `getCacheKey()` function to calculate a cache key string from request parameters, which may be useful for enabling some advanced use cases (especially if you want to call cache functions directly). Call `getCacheKey()` exactly like you would call `fetch()`.
```js
import { fetchBuilder, MemoryCache, getCacheKey } from 'node-fetch-cache';
const cache = new MemoryCache();
const fetch = fetchBuilder.withCache(cache);
const rawCacheData = await cache.get(getCacheKey('https://google.com'));
```
## Bugs / Help / Feature Requests / Contributing

View File

@ -1,29 +0,0 @@
const FPersist = require('fpersist');
const KeyTimeout = require('./key_timeout.js');
module.exports = class FileSystemCache {
constructor(options = {}) {
this.ttl = options.ttl;
this.keyTimeout = new KeyTimeout();
const cacheDirectory = options.cacheDirectory || '.cache';
this.cache = new FPersist(cacheDirectory);
}
get(key) {
return this.cache.getItem(key);
}
remove(key) {
this.keyTimeout.clearTimeout(key);
return this.cache.deleteItem(key);
}
async set(key, value) {
await this.cache.setItem(key, value);
if (typeof this.ttl === 'number') {
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
}
}
};

View File

@ -1,26 +0,0 @@
const KeyTimeout = require('./key_timeout.js');
module.exports = class MemoryCache {
constructor(options = {}) {
this.ttl = options.ttl;
this.keyTimeout = new KeyTimeout();
this.cache = {};
}
get(key) {
return this.cache[key];
}
remove(key) {
this.keyTimeout.clearTimeout(key);
delete this.cache[key];
}
set(key, value) {
this.cache[key] = value;
if (typeof this.ttl === 'number') {
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
}
}
};

View File

@ -1,33 +0,0 @@
class Headers {
constructor(rawHeaders) {
this.rawHeaders = rawHeaders;
}
entries() {
return Object.entries(this.rawHeaders)
.sort((e1, e2) => e1[0].localeCompare(e2[0]))
.map(([key, val]) => [key, val[0]]);
}
keys() {
return this.entries().map((e) => e[0]);
}
values() {
return this.entries().map((e) => e[1]);
}
get(name) {
return (this.rawHeaders[name.toLowerCase()] || [])[0] || null;
}
has(name) {
return !!this.get(name);
}
raw() {
return this.rawHeaders;
}
}
module.exports = Headers;

View File

@ -1,47 +0,0 @@
const stream = require('stream');
const Headers = require('./headers.js');
class Response {
constructor(raw, ejectSelfFromCache, fromCache) {
Object.assign(this, raw);
this.ejectSelfFromCache = ejectSelfFromCache;
this.headers = new Headers(raw.headers);
this.fromCache = fromCache;
this.bodyUsed = false;
if (this.bodyBuffer.type === 'Buffer') {
this.bodyBuffer = Buffer.from(this.bodyBuffer);
}
}
get body() {
return stream.Readable.from(this.bodyBuffer);
}
consumeBody() {
if (this.bodyUsed) {
throw new Error('Error: body used already');
}
this.bodyUsed = true;
return this.bodyBuffer;
}
async text() {
return this.consumeBody().toString();
}
async json() {
return JSON.parse(this.consumeBody().toString());
}
async buffer() {
return this.consumeBody();
}
ejectFromCache() {
return this.ejectSelfFromCache();
}
}
module.exports = Response;

4
commonjs/wrapper.cjs Normal file
View File

@ -0,0 +1,4 @@
const mod = require('./index.cjs');
module.exports = mod.default;
Object.assign(module.exports, mod);

115
index.js
View File

@ -1,115 +0,0 @@
const fetch = require('node-fetch');
const fs = require('fs');
const { URLSearchParams } = require('url');
const crypto = require('crypto');
const Response = require('./classes/response.js');
const MemoryCache = require('./classes/caching/memory_cache.js');
const FileSystemCache = require('./classes/caching/file_system_cache.js');
const CACHE_VERSION = 2;
function md5(str) {
return crypto.createHash('md5').update(str).digest('hex');
}
// Since the bounday in FormData is random,
// we ignore it for purposes of calculating
// the cache key.
function getFormDataCacheKey(formData) {
const cacheKey = { ...formData };
const boundary = formData.getBoundary();
// eslint-disable-next-line no-underscore-dangle
delete cacheKey._boundary;
const boundaryReplaceRegex = new RegExp(boundary, 'g');
// eslint-disable-next-line no-underscore-dangle
cacheKey._streams = cacheKey._streams.map((s) => {
if (typeof s === 'string') {
return s.replace(boundaryReplaceRegex, '');
}
return s;
});
return cacheKey;
}
function getBodyCacheKeyJson(body) {
if (!body) {
return body;
} if (typeof body === 'string') {
return body;
} if (body instanceof URLSearchParams) {
return body.toString();
} if (body instanceof fs.ReadStream) {
return body.path;
} if (body.toString && body.toString() === '[object FormData]') {
return getFormDataCacheKey(body);
}
throw new Error('Unsupported body type. Supported body types are: string, number, undefined, null, url.URLSearchParams, fs.ReadStream, FormData');
}
function getCacheKey(requestArguments) {
const resource = requestArguments[0];
const init = requestArguments[1] || {};
if (typeof resource !== 'string') {
throw new Error('The first argument must be a string (fetch.Request is not supported).');
}
const resourceCacheKeyJson = { url: resource };
const initCacheKeyJson = { ...init };
resourceCacheKeyJson.body = getBodyCacheKeyJson(resourceCacheKeyJson.body);
initCacheKeyJson.body = getBodyCacheKeyJson(initCacheKeyJson.body);
return md5(JSON.stringify([resourceCacheKeyJson, initCacheKeyJson, CACHE_VERSION]));
}
async function createRawResponse(fetchRes) {
const buffer = await fetchRes.buffer();
return {
status: fetchRes.status,
statusText: fetchRes.statusText,
type: fetchRes.type,
url: fetchRes.url,
ok: fetchRes.ok,
headers: fetchRes.headers.raw(),
redirected: fetchRes.redirected,
bodyBuffer: buffer,
};
}
async function getResponse(cache, requestArguments) {
const cacheKey = getCacheKey(requestArguments);
const cachedValue = await cache.get(cacheKey);
const ejectSelfFromCache = () => cache.remove(cacheKey);
if (cachedValue) {
return new Response(cachedValue, ejectSelfFromCache, true);
}
const fetchResponse = await fetch(...requestArguments);
const rawResponse = await createRawResponse(fetchResponse);
await cache.set(cacheKey, rawResponse);
return new Response(rawResponse, ejectSelfFromCache, false);
}
function createFetchWithCache(cache) {
const fetchCache = (...args) => getResponse(cache, args);
fetchCache.withCache = createFetchWithCache;
return fetchCache;
}
const defaultFetch = createFetchWithCache(new MemoryCache());
module.exports = defaultFetch;
module.exports.fetchBuilder = defaultFetch;
module.exports.MemoryCache = MemoryCache;
module.exports.FileSystemCache = FileSystemCache;

7172
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,20 @@
{
"name": "node-fetch-cache",
"version": "2.0.0",
"description": "node-fetch with a persistent cache.",
"main": "index.js",
"version": "3.1.3",
"description": "node-fetch with caching.",
"main": "src/index.js",
"type": "module",
"exports": {
"import": "./src/index.js",
"require": "./commonjs/wrapper.cjs"
},
"scripts": {
"test": "mocha --timeout 10000 --exit",
"buildcjs": "rollup src/index.js --file commonjs/index.cjs --format cjs",
"test": "npm run lintfix && npm run buildcjs && mocha --timeout 10000 --exit",
"coverage": "nyc --reporter=lcov --reporter=text npm test",
"lint": "./node_modules/.bin/eslint .",
"lintfix": "./node_modules/.bin/eslint . --fix"
"lintfix": "./node_modules/.bin/eslint . --fix",
"prepublishOnly": "npm test"
},
"repository": {
"type": "git",
@ -27,18 +34,20 @@
},
"homepage": "https://github.com/mistval/node-fetch-cache#readme",
"devDependencies": {
"eslint": "^6.8.0",
"eslint-config-airbnb-base": "^14.1.0",
"eslint-plugin-import": "^2.20.2",
"eslint": "^8.9.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-plugin-import": "^2.25.4",
"form-data": "^3.0.0",
"husky": "^4.3.0",
"mocha": "^8.2.1",
"husky": "^8.0.3",
"mocha": "^9.2.1",
"nyc": "^15.1.0",
"rimraf": "^3.0.2"
"rimraf": "^3.0.2",
"rollup": "^2.53.0"
},
"dependencies": {
"fpersist": "^1.0.5",
"node-fetch": "*"
"cacache": "^15.2.0",
"locko": "^1.0.0",
"node-fetch": "2.6.11"
},
"husky": {
"hooks": {

View File

@ -0,0 +1,84 @@
import cacache from 'cacache';
import { Readable } from 'stream';
function getBodyAndMetaKeys(key) {
return [`${key}body`, `${key}meta`];
}
export class FileSystemCache {
constructor(options = {}) {
this.ttl = options.ttl;
this.cacheDirectory = options.cacheDirectory || '.cache';
}
async get(key) {
const [, metaKey] = getBodyAndMetaKeys(key);
const metaInfo = await cacache.get.info(this.cacheDirectory, metaKey);
if (!metaInfo) {
return undefined;
}
const metaBuffer = await cacache.get.byDigest(this.cacheDirectory, metaInfo.integrity);
const metaData = JSON.parse(metaBuffer);
const { bodyStreamIntegrity, empty, expiration } = metaData;
delete metaData.bodyStreamIntegrity;
delete metaData.empty;
delete metaData.expiration;
if (expiration && expiration < Date.now()) {
return undefined;
}
const bodyStream = empty
? Readable.from(Buffer.alloc(0))
: cacache.get.stream.byDigest(this.cacheDirectory, bodyStreamIntegrity);
return {
bodyStream,
metaData,
};
}
remove(key) {
const [bodyKey, metaKey] = getBodyAndMetaKeys(key);
return Promise.all([
cacache.rm.entry(this.cacheDirectory, bodyKey),
cacache.rm.entry(this.cacheDirectory, metaKey),
]);
}
async set(key, bodyStream, metaData) {
const [bodyKey, metaKey] = getBodyAndMetaKeys(key);
const metaCopy = { ...metaData };
if (typeof this.ttl === 'number') {
metaCopy.expiration = Date.now() + this.ttl;
}
try {
metaCopy.bodyStreamIntegrity = await new Promise((fulfill, reject) => {
bodyStream.pipe(cacache.put.stream(this.cacheDirectory, bodyKey))
.on('integrity', (i) => fulfill(i))
.on('error', (e) => {
reject(e);
});
});
} catch (err) {
if (err.code !== 'ENODATA') {
throw err;
}
metaCopy.empty = true;
}
const metaBuffer = Buffer.from(JSON.stringify(metaCopy));
await cacache.put(this.cacheDirectory, metaKey, metaBuffer);
const cachedData = await this.get(key);
return cachedData;
}
}

View File

@ -1,4 +1,4 @@
module.exports = class KeyTimeout {
export class KeyTimeout {
constructor() {
this.timeoutHandleForKey = {};
}
@ -13,4 +13,4 @@ module.exports = class KeyTimeout {
callback();
}, durationMs);
}
};
}

View File

@ -0,0 +1,47 @@
import { Readable } from 'stream';
import { KeyTimeout } from './key_timeout.js';
function streamToBuffer(stream) {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (err) => reject(err));
stream.on('end', () => resolve(Buffer.concat(chunks)));
});
}
export class MemoryCache {
constructor(options = {}) {
this.ttl = options.ttl;
this.keyTimeout = new KeyTimeout();
this.cache = {};
}
get(key) {
const cachedValue = this.cache[key];
if (cachedValue) {
return {
bodyStream: Readable.from(cachedValue.bodyBuffer),
metaData: cachedValue.metaData,
};
}
return undefined;
}
remove(key) {
this.keyTimeout.clearTimeout(key);
delete this.cache[key];
}
async set(key, bodyStream, metaData) {
const bodyBuffer = await streamToBuffer(bodyStream);
this.cache[key] = { bodyBuffer, metaData };
if (typeof this.ttl === 'number') {
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
}
return this.get(key);
}
}

29
src/classes/response.js Normal file
View File

@ -0,0 +1,29 @@
import { Response } from 'node-fetch';
const responseInternalSymbol = Object.getOwnPropertySymbols(new Response())[1];
export class NFCResponse extends Response {
constructor(bodyStream, metaData, ejectFromCache, fromCache) {
super(bodyStream, metaData);
this.ejectFromCache = ejectFromCache;
this.fromCache = fromCache;
}
static serializeMetaFromNodeFetchResponse(res) {
const metaData = {
url: res.url,
status: res.status,
statusText: res.statusText,
headers: res.headers.raw(),
size: res.size,
timeout: res.timeout,
counter: res[responseInternalSymbol].counter,
};
return metaData;
}
ejectFromCache() {
return this.ejectSelfFromCache();
}
}

180
src/index.js Normal file
View File

@ -0,0 +1,180 @@
import fetch, { Request } from 'node-fetch';
import fs from 'fs';
import crypto from 'crypto';
import locko from 'locko';
import { NFCResponse } from './classes/response.js';
import { MemoryCache } from './classes/caching/memory_cache.js';
const CACHE_VERSION = 4;
function md5(str) {
return crypto.createHash('md5').update(str).digest('hex');
}
// Since the bounday in FormData is random,
// we ignore it for purposes of calculating
// the cache key.
function getFormDataCacheKey(formData) {
const cacheKey = { ...formData };
const boundary = formData.getBoundary();
// eslint-disable-next-line no-underscore-dangle
delete cacheKey._boundary;
const boundaryReplaceRegex = new RegExp(boundary, 'g');
// eslint-disable-next-line no-underscore-dangle
cacheKey._streams = cacheKey._streams.map((s) => {
if (typeof s === 'string') {
return s.replace(boundaryReplaceRegex, '');
}
return s;
});
return cacheKey;
}
function getHeadersCacheKeyJson(headersObj) {
return Object.fromEntries(
Object.entries(headersObj)
.map(([key, value]) => [key.toLowerCase(), value])
.filter(([key, value]) => key !== 'cache-control' || value !== 'only-if-cached'),
);
}
function getBodyCacheKeyJson(body) {
if (!body) {
return body;
} if (typeof body === 'string') {
return body;
} if (body instanceof URLSearchParams) {
return body.toString();
} if (body instanceof fs.ReadStream) {
return body.path;
} if (body.toString && body.toString() === '[object FormData]') {
return getFormDataCacheKey(body);
} if (body instanceof Buffer) {
return body.toString();
}
throw new Error('Unsupported body type. Supported body types are: string, number, undefined, null, url.URLSearchParams, fs.ReadStream, FormData');
}
function getRequestCacheKey(req) {
const headersPojo = Object.fromEntries([...req.headers.entries()]);
return {
cache: req.cache,
credentials: req.credentials,
destination: req.destination,
headers: getHeadersCacheKeyJson(headersPojo),
integrity: req.integrity,
method: req.method,
redirect: req.redirect,
referrer: req.referrer,
referrerPolicy: req.referrerPolicy,
url: req.url,
body: getBodyCacheKeyJson(req.body),
};
}
export function getCacheKey(resource, init = {}) {
const resourceCacheKeyJson = resource instanceof Request
? getRequestCacheKey(resource)
: { url: resource };
const initCacheKeyJson = {
...init,
headers: getHeadersCacheKeyJson(init.headers || {}),
};
resourceCacheKeyJson.body = getBodyCacheKeyJson(resourceCacheKeyJson.body);
initCacheKeyJson.body = getBodyCacheKeyJson(initCacheKeyJson.body);
delete initCacheKeyJson.agent;
return md5(JSON.stringify([resourceCacheKeyJson, initCacheKeyJson, CACHE_VERSION]));
}
function hasOnlyWithCacheOption(resource, init) {
if (
init
&& init.headers
&& Object.entries(init.headers)
.some(([key, value]) => key.toLowerCase() === 'cache-control' && value === 'only-if-cached')
) {
return true;
}
if (resource instanceof Request && resource.headers.get('Cache-Control') === 'only-if-cached') {
return true;
}
return false;
}
async function getResponse(cache, requestArguments) {
const cacheKey = getCacheKey(...requestArguments);
let cachedValue = await cache.get(cacheKey);
const ejectSelfFromCache = () => cache.remove(cacheKey);
if (cachedValue) {
return new NFCResponse(
cachedValue.bodyStream,
cachedValue.metaData,
ejectSelfFromCache,
true,
);
}
if (hasOnlyWithCacheOption(...requestArguments)) {
return undefined;
}
await locko.lock(cacheKey);
try {
cachedValue = await cache.get(cacheKey);
if (cachedValue) {
return new NFCResponse(
cachedValue.bodyStream,
cachedValue.metaData,
ejectSelfFromCache,
true,
);
}
const fetchResponse = await fetch(...requestArguments);
const serializedMeta = NFCResponse.serializeMetaFromNodeFetchResponse(fetchResponse);
const newlyCachedData = await cache.set(
cacheKey,
fetchResponse.body,
serializedMeta,
);
return new NFCResponse(
newlyCachedData.bodyStream,
newlyCachedData.metaData,
ejectSelfFromCache,
false,
);
} finally {
locko.unlock(cacheKey);
}
}
function createFetchWithCache(cache) {
const fetchCache = (...args) => getResponse(cache, args);
fetchCache.withCache = createFetchWithCache;
return fetchCache;
}
const defaultFetch = createFetchWithCache(new MemoryCache());
export default defaultFetch;
export const fetchBuilder = defaultFetch;
export { MemoryCache } from './classes/caching/memory_cache.js';
export { FileSystemCache } from './classes/caching/file_system_cache.js';

18
test/tests.cjs Normal file
View File

@ -0,0 +1,18 @@
const assert = require('assert');
const fetch = require('../commonjs/wrapper.cjs');
const TWO_HUNDRED_URL = 'https://httpbin.org/status/200';
describe('Commonjs module tests', function() {
it('Can make a request', async function() {
const res = await fetch(TWO_HUNDRED_URL);
assert.strictEqual(res.status, 200);
});
it('Has expected properties', function() {
assert(typeof fetch === 'function');
assert(fetch.MemoryCache);
assert(fetch.FileSystemCache);
assert(fetch.fetchBuilder);
});
});

View File

@ -1,11 +1,16 @@
const fs = require('fs');
const FormData = require('form-data');
const assert = require('assert');
const rimraf = require('rimraf');
const path = require('path');
const { URLSearchParams } = require('url');
const standardFetch = require('node-fetch');
const FetchCache = require('../index.js');
import { dirname } from 'path';
import { fileURLToPath } from 'url';
import fs from 'fs';
import FormData from 'form-data';
import assert from 'assert';
import rimraf from 'rimraf';
import path from 'path';
import { URLSearchParams } from 'url';
import standardFetch from 'node-fetch';
import FetchCache, { MemoryCache, FileSystemCache, getCacheKey } from '../src/index.js';
import { Agent } from 'http';
const __dirname = dirname(fileURLToPath(import.meta.url));
const CACHE_PATH = path.join(__dirname, '..', '.cache');
const expectedPngBuffer = fs.readFileSync(path.join(__dirname, 'expected_png.png'));
@ -59,9 +64,11 @@ async function dualFetch(...args) {
beforeEach(async function() {
rimraf.sync(CACHE_PATH);
cachedFetch = FetchCache.withCache(new FetchCache.MemoryCache());
cachedFetch = FetchCache.withCache(new MemoryCache());
});
let res;
describe('Basic property tests', function() {
it('Has a status property', async function() {
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
@ -123,22 +130,22 @@ describe('Header tests', function() {
it('Gets correct header keys', async function() {
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
assert.deepStrictEqual(cachedFetchResponse.headers.keys(), [...standardFetchResponse.headers.keys()]);
assert.deepStrictEqual([...cachedFetchResponse.headers.keys()], [...standardFetchResponse.headers.keys()]);
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
assert.deepStrictEqual(cachedFetchResponse.headers.keys(), [...standardFetchResponse.headers.keys()]);
assert.deepStrictEqual([...cachedFetchResponse.headers.keys()], [...standardFetchResponse.headers.keys()]);
});
it('Gets correct header values', async function() {
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
assert.deepStrictEqual(
removeDates(cachedFetchResponse.headers.values()),
removeDates([...cachedFetchResponse.headers.values()]),
removeDates([...standardFetchResponse.headers.values()]),
);
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
assert.deepStrictEqual(
removeDates(cachedFetchResponse.headers.values()),
removeDates([...cachedFetchResponse.headers.values()]),
removeDates([...standardFetchResponse.headers.values()]),
);
});
@ -146,13 +153,13 @@ describe('Header tests', function() {
it('Gets correct header entries', async function() {
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
assert.deepStrictEqual(
removeDates(cachedFetchResponse.headers.entries()),
removeDates([...cachedFetchResponse.headers.entries()]),
removeDates([...standardFetchResponse.headers.entries()]),
);
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
assert.deepStrictEqual(
removeDates(cachedFetchResponse.headers.entries()),
removeDates([...cachedFetchResponse.headers.entries()]),
removeDates([...standardFetchResponse.headers.entries()]),
);
});
@ -252,8 +259,8 @@ describe('Cache tests', function() {
});
it('Gives different read streams different cache keys', async function() {
const s1 = fs.createReadStream(__filename);
const s2 = fs.createReadStream(path.join(__dirname, '..', 'index.js'));
const s1 = fs.createReadStream(path.join(__dirname, 'expected_png.png'));
const s2 = fs.createReadStream(path.join(__dirname, '..', 'src', 'index.js'));
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
assert.strictEqual(res.fromCache, false);
@ -263,7 +270,7 @@ describe('Cache tests', function() {
});
it('Gives the same read streams the same cache key', async function() {
const s1 = fs.createReadStream(__filename);
const s1 = fs.createReadStream(path.join(__dirname, 'expected_png.png'));
res = await cachedFetch(TWO_HUNDRED_URL, post(s1));
assert.strictEqual(res.fromCache, false);
@ -299,17 +306,35 @@ describe('Cache tests', function() {
res = await cachedFetch(TWO_HUNDRED_URL, post(data2));
assert.strictEqual(res.fromCache, true);
});
it('Does not error with custom agent with circular properties', async function() {
const agent = new Agent();
agent.agent = agent;
await cachedFetch('http://httpbin.org/status/200', { agent });
})
}).timeout(10000);
describe('Data tests', function() {
it('Does not support Request objects', async function() {
try {
const request = new standardFetch.Request('https://google.com');
await cachedFetch(request);
throw new Error('The above line should have thrown.');
} catch (err) {
assert(err.message.includes('The first argument must be a string (fetch.Request is not supported).'));
}
it('Supports request objects', async function() {
let request = new standardFetch.Request('https://google.com', { body: 'test', method: 'POST' });
res = await cachedFetch(request);
assert.strictEqual(res.fromCache, false);
request = new standardFetch.Request('https://google.com', { body: 'test', method: 'POST' });
res = await cachedFetch(request);
assert.strictEqual(res.fromCache, true);
});
it('Supports request objects with custom headers', async function() {
const request1 = new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'XXX': 'YYY' } });
const request2 = new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'XXX': 'ZZZ' } });
res = await cachedFetch(request1);
assert.strictEqual(res.fromCache, false);
res = await cachedFetch(request2);
assert.strictEqual(res.fromCache, false);
});
it('Refuses to consume body twice', async function() {
@ -320,7 +345,7 @@ describe('Data tests', function() {
await res.text();
throw new Error('The above line should have thrown.');
} catch (err) {
assert(err.message.includes('Error: body used already'));
assert(err.message.includes('body used already for:'));
}
});
@ -390,11 +415,21 @@ describe('Data tests', function() {
assert(err.message.includes('Unsupported body type'));
}
});
it('Uses cache even if you make multiple requests at the same time', async function() {
const [res1, res2] = await Promise.all([
cachedFetch('http://httpbin.org/status/200'),
cachedFetch('http://httpbin.org/status/200'),
]);
// One should be false, the other should be true
assert(res1.fromCache !== res2.fromCache);
});
}).timeout(10000);
describe('Memory cache tests', function() {
it('Supports TTL', async function() {
cachedFetch = FetchCache.withCache(new FetchCache.MemoryCache({ ttl: 100 }));
cachedFetch = FetchCache.withCache(new MemoryCache({ ttl: 100 }));
let res = await cachedFetch(TWO_HUNDRED_URL);
assert.strictEqual(res.fromCache, false);
res = await cachedFetch(TWO_HUNDRED_URL);
@ -409,7 +444,7 @@ describe('Memory cache tests', function() {
describe('File system cache tests', function() {
it('Supports TTL', async function() {
cachedFetch = FetchCache.withCache(new FetchCache.FileSystemCache({ ttl: 100 }));
cachedFetch = FetchCache.withCache(new FileSystemCache({ ttl: 100 }));
let res = await cachedFetch(TWO_HUNDRED_URL);
assert.strictEqual(res.fromCache, false);
res = await cachedFetch(TWO_HUNDRED_URL);
@ -422,7 +457,7 @@ describe('File system cache tests', function() {
});
it('Can get PNG buffer body', async function() {
cachedFetch = FetchCache.withCache(new FetchCache.FileSystemCache());
cachedFetch = FetchCache.withCache(new FileSystemCache());
res = await cachedFetch(PNG_BODY_URL);
body = await res.buffer();
assert.strictEqual(expectedPngBuffer.equals(body), true);
@ -433,4 +468,64 @@ describe('File system cache tests', function() {
assert.strictEqual(expectedPngBuffer.equals(body), true);
assert.strictEqual(res.fromCache, true);
});
it('Can eject from cache', async function() {
cachedFetch = FetchCache.withCache(new FileSystemCache());
res = await cachedFetch(TWO_HUNDRED_URL);
assert.strictEqual(res.fromCache, false);
res = await cachedFetch(TWO_HUNDRED_URL);
assert.strictEqual(res.fromCache, true);
await res.ejectFromCache();
res = await cachedFetch(TWO_HUNDRED_URL);
assert.strictEqual(res.fromCache, false);
res = await cachedFetch(TWO_HUNDRED_URL);
assert.strictEqual(res.fromCache, true);
});
});
describe('Cache mode tests', function() {
it('Can use the only-if-cached cache control setting via init', async function() {
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
assert(!res);
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
assert(!res);
res = await cachedFetch(TWO_HUNDRED_URL);
assert(res && !res.fromCache);
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
assert(res && res.fromCache);
await res.ejectFromCache();
res = await cachedFetch(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } });
assert(!res);
});
it('Can use the only-if-cached cache control setting via resource', async function() {
res = await cachedFetch(new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } }));
assert(!res);
res = await cachedFetch(new standardFetch.Request(TWO_HUNDRED_URL));
assert(res && !res.fromCache);
res = await cachedFetch(new standardFetch.Request(TWO_HUNDRED_URL, { headers: { 'Cache-Control': 'only-if-cached' } }));
assert(res && res.fromCache);
});
});
describe('Cache key tests', function() {
it('Can calculate a cache key and check that it exists', async function() {
const cache = new MemoryCache();
cachedFetch = FetchCache.withCache(cache);
await cachedFetch(TWO_HUNDRED_URL);
const cacheKey = getCacheKey(TWO_HUNDRED_URL);
const nonExistentCacheKey = getCacheKey(TEXT_BODY_URL);
const cacheKeyResult = await cache.get(cacheKey);
const nonExistentCacheKeyResult = await cache.get(nonExistentCacheKey);
assert(cacheKeyResult);
assert(!nonExistentCacheKeyResult);
});
});