add blob support

This commit is contained in:
Randall Schmidt 2021-06-11 16:37:51 -04:00
parent 2f5409c25c
commit 3896ae2832
4 changed files with 167 additions and 7 deletions

@ -1 +1,2 @@
test test
classes/blob.js

129
classes/blob.js Normal file

@ -0,0 +1,129 @@
/*
This code has been adapted from code in node-fetch.
License text from node-fetch:
The MIT License (MIT)
Copyright (c) 2016 David Frank
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
const Stream = require('stream');
const Readable = Stream.Readable;
const BUFFER = Symbol('buffer');
const TYPE = Symbol('type');
class Blob {
constructor() {
this[TYPE] = '';
const blobParts = arguments[0];
const options = arguments[1];
const buffers = [];
let size = 0;
if (blobParts) {
const a = blobParts;
const length = Number(a.length);
for (let i = 0; i < length; i++) {
const element = a[i];
let buffer;
if (element instanceof Buffer) {
buffer = element;
} else if (ArrayBuffer.isView(element)) {
buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
} else if (element instanceof ArrayBuffer) {
buffer = Buffer.from(element);
} else if (element instanceof Blob) {
buffer = element[BUFFER];
} else {
buffer = Buffer.from(typeof element === 'string' ? element : String(element));
}
size += buffer.length;
buffers.push(buffer);
}
}
this[BUFFER] = Buffer.concat(buffers);
let type = options && options.type !== undefined && String(options.type).toLowerCase();
if (type && !/[^\u0020-\u007E]/.test(type)) {
this[TYPE] = type;
}
}
get size() {
return this[BUFFER].length;
}
get type() {
return this[TYPE];
}
text() {
return Promise.resolve(this[BUFFER].toString());
}
arrayBuffer() {
const buf = this[BUFFER];
const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
return Promise.resolve(ab);
}
stream() {
const readable = new Readable();
readable._read = function () {};
readable.push(this[BUFFER]);
readable.push(null);
return readable;
}
toString() {
return '[object Blob]';
}
slice() {
const size = this.size;
const start = arguments[0];
const end = arguments[1];
let relativeStart, relativeEnd;
if (start === undefined) {
relativeStart = 0;
} else if (start < 0) {
relativeStart = Math.max(size + start, 0);
} else {
relativeStart = Math.min(start, size);
}
if (end === undefined) {
relativeEnd = size;
} else if (end < 0) {
relativeEnd = Math.max(size + end, 0);
} else {
relativeEnd = Math.min(end, size);
}
const span = Math.max(relativeEnd - relativeStart, 0);
const buffer = this[BUFFER];
const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
const blob = new Blob([], { type: arguments[2] });
blob[BUFFER] = slicedBuffer;
return blob;
}
}
Object.defineProperties(Blob.prototype, {
size: { enumerable: true },
type: { enumerable: true },
slice: { enumerable: true }
});
Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
value: 'Blob',
writable: false,
enumerable: false,
configurable: true
});
module.exports = Blob;

@ -1,4 +1,5 @@
const stream = require('stream'); const stream = require('stream');
const Blob = require('./blob.js');
const Headers = require('./headers.js'); const Headers = require('./headers.js');
class Response { class Response {
@ -27,18 +28,23 @@ class Response {
return this.bodyBuffer; return this.bodyBuffer;
} }
text() { async text() {
return this.consumeBody().toString(); return this.consumeBody().toString();
} }
json() { async json() {
return JSON.parse(this.consumeBody().toString()); return JSON.parse(this.consumeBody().toString());
} }
buffer() { async buffer() {
return this.consumeBody(); return this.consumeBody();
} }
async blob() {
const type = this.headers.get('content-type');
return new Blob([this.consumeBody()], { type });
}
ejectFromCache() { ejectFromCache() {
return this.ejectSelfFromCache(); return this.ejectSelfFromCache();
} }

@ -7,6 +7,7 @@ const { URLSearchParams } = require('url');
const standardFetch = require('node-fetch'); const standardFetch = require('node-fetch');
const FetchCache = require('../index.js'); const FetchCache = require('../index.js');
const MemoryCache = require('../classes/caching/memory_cache.js'); const MemoryCache = require('../classes/caching/memory_cache.js');
const FileSystemCache = require('../classes/caching/file_system_cache.js');
const CACHE_PATH = path.join(__dirname, '..', '.cache'); const CACHE_PATH = path.join(__dirname, '..', '.cache');
const expectedPngBuffer = fs.readFileSync(path.join(__dirname, 'expected_png.png')); const expectedPngBuffer = fs.readFileSync(path.join(__dirname, 'expected_png.png'));
@ -35,7 +36,11 @@ function removeDates(arrOrObj) {
} }
if (Array.isArray(arrOrObj)) { if (Array.isArray(arrOrObj)) {
return [...arrOrObj].filter(e => !Date.parse(e)); if (Array.isArray(arrOrObj[0])) {
return arrOrObj.filter(e => e[0] !== 'date');
}
return arrOrObj.filter(e => !Date.parse(e));
} }
return arrOrObj; return arrOrObj;
@ -142,10 +147,16 @@ describe('Header tests', function() {
it('Gets correct header entries', async function() { it('Gets correct header entries', async function() {
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL); let { cachedFetchResponse, standardFetchResponse } = await dualFetch(TWO_HUNDRED_URL);
assert.deepStrictEqual(cachedFetchResponse.headers.entries(), [...standardFetchResponse.headers.entries()]); assert.deepStrictEqual(
removeDates(cachedFetchResponse.headers.entries()),
removeDates([...standardFetchResponse.headers.entries()]),
);
cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL); cachedFetchResponse = await cachedFetch(TWO_HUNDRED_URL);
assert.deepStrictEqual(cachedFetchResponse.headers.entries(), [...standardFetchResponse.headers.entries()]); assert.deepStrictEqual(
removeDates(cachedFetchResponse.headers.entries()),
removeDates([...standardFetchResponse.headers.entries()]),
);
}); });
it('Can get a header by value', async function() { it('Can get a header by value', async function() {
@ -373,6 +384,19 @@ describe('Data tests', function() {
assert.strictEqual(res.fromCache, true); assert.strictEqual(res.fromCache, true);
}); });
it('Can get PNG blob body', async function() {
let { cachedFetchResponse, standardFetchResponse } = await dualFetch(PNG_BODY_URL);
let cachedBody = await cachedFetchResponse.blob();
let standardBody = await standardFetchResponse.blob();
assert.strictEqual(cachedBody.size, standardBody.size);
assert.strictEqual(cachedBody.type, standardBody.type);
cachedFetchResponse = await cachedFetch(PNG_BODY_URL);
cachedBody = await cachedFetchResponse.blob();
assert.strictEqual(cachedBody.size, standardBody.size);
assert.strictEqual(cachedBody.type, standardBody.type);
});
it('Errors if the body type is not supported', async function() { it('Errors if the body type is not supported', async function() {
try { try {
await cachedFetch(TEXT_BODY_URL, { body: {} }); await cachedFetch(TEXT_BODY_URL, { body: {} });