overhaul to use node-fetch internals
This commit is contained in:
@ -1,5 +1,15 @@
|
||||
import { Readable } from 'stream';
|
||||
import { KeyTimeout } from './key_timeout.js';
|
||||
|
||||
function streamToBuffer(stream) {
|
||||
const chunks = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
|
||||
stream.on('error', (err) => reject(err));
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
});
|
||||
}
|
||||
|
||||
export class MemoryCache {
|
||||
constructor(options = {}) {
|
||||
this.ttl = options.ttl;
|
||||
@ -8,7 +18,15 @@ export class MemoryCache {
|
||||
}
|
||||
|
||||
get(key) {
|
||||
return this.cache[key];
|
||||
const cachedValue = this.cache[key];
|
||||
if (cachedValue) {
|
||||
return {
|
||||
bodyStream: Readable.from(cachedValue.bodyBuffer),
|
||||
metaData: cachedValue.metaData,
|
||||
};
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
remove(key) {
|
||||
@ -16,8 +34,9 @@ export class MemoryCache {
|
||||
delete this.cache[key];
|
||||
}
|
||||
|
||||
set(key, value) {
|
||||
this.cache[key] = value;
|
||||
async set(key, bodyStream, metaData) {
|
||||
const bodyBuffer = await streamToBuffer(bodyStream);
|
||||
this.cache[key] = { bodyBuffer, metaData };
|
||||
|
||||
if (typeof this.ttl === 'number') {
|
||||
this.keyTimeout.updateTimeout(key, this.ttl, () => this.remove(key));
|
||||
|
@ -1,42 +1,54 @@
|
||||
import stream from 'stream';
|
||||
import { Headers } from './headers.js';
|
||||
import { Response } from 'node-fetch';
|
||||
import { PassThrough } from 'stream';
|
||||
|
||||
export class Response {
|
||||
constructor(raw, ejectSelfFromCache, fromCache) {
|
||||
Object.assign(this, raw);
|
||||
this.ejectSelfFromCache = ejectSelfFromCache;
|
||||
this.headers = new Headers(raw.headers);
|
||||
const responseInternalSymbol = Object.getOwnPropertySymbols(new Response())[1];
|
||||
|
||||
export class NFCResponse extends Response {
|
||||
constructor(bodyStream, metaData, ejectFromCache, fromCache) {
|
||||
const stream1 = new PassThrough();
|
||||
const stream2 = new PassThrough();
|
||||
|
||||
bodyStream.pipe(stream1);
|
||||
bodyStream.pipe(stream2);
|
||||
|
||||
super(stream1, metaData);
|
||||
this.ejectFromCache = ejectFromCache;
|
||||
this.fromCache = fromCache;
|
||||
this.bodyUsed = false;
|
||||
|
||||
if (this.bodyBuffer.type === 'Buffer') {
|
||||
this.bodyBuffer = Buffer.from(this.bodyBuffer);
|
||||
}
|
||||
this.serializationStream = stream2;
|
||||
}
|
||||
|
||||
get body() {
|
||||
return stream.Readable.from(this.bodyBuffer);
|
||||
static fromNodeFetchResponse(res, ejectFromCache) {
|
||||
const bodyStream = res.body;
|
||||
const metaData = {
|
||||
url: res.url,
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: res.headers.raw(),
|
||||
size: res.size,
|
||||
timeout: res.timeout,
|
||||
counter: res[responseInternalSymbol].counter,
|
||||
};
|
||||
|
||||
return new NFCResponse(bodyStream, metaData, ejectFromCache, false);
|
||||
}
|
||||
|
||||
consumeBody() {
|
||||
if (this.bodyUsed) {
|
||||
throw new Error('Error: body used already');
|
||||
}
|
||||
|
||||
this.bodyUsed = true;
|
||||
return this.bodyBuffer;
|
||||
static fromCachedResponse(bodyStream, rawMetaData, ejectSelfFromCache) {
|
||||
return new NFCResponse(bodyStream, rawMetaData, ejectSelfFromCache, true);
|
||||
}
|
||||
|
||||
async text() {
|
||||
return this.consumeBody().toString();
|
||||
}
|
||||
|
||||
async json() {
|
||||
return JSON.parse(this.consumeBody().toString());
|
||||
}
|
||||
|
||||
async buffer() {
|
||||
return this.consumeBody();
|
||||
serialize() {
|
||||
return {
|
||||
bodyStream: this.serializationStream,
|
||||
metaData: {
|
||||
url: this.url,
|
||||
status: this.status,
|
||||
statusText: this.statusText,
|
||||
headers: this.headers.raw(),
|
||||
size: this.size,
|
||||
timeout: this.timeout,
|
||||
counter: this[responseInternalSymbol].counter,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
ejectFromCache() {
|
||||
|
Reference in New Issue
Block a user