add caching of responses (for 5mins)

This commit is contained in:
Lee
2023-11-16 15:54:59 +00:00
parent 355a76845e
commit 6b08b8fc7a
5 changed files with 75 additions and 8 deletions

@ -26,7 +26,6 @@ export default class ProxyRoute extends Route {
res.status(400).json(RouteMessages.badRequest("No URL provided"));
return;
}
// TODO: handle rate limiting? and/or caching?
try {
const response = await axios.get(url, {
headers: {

@ -17,6 +17,7 @@
"server": "workspace:*",
"ts-node": "^10.9.1",
"typescript": "^5.2.2",
"utils": "workspace:*"
"utils": "workspace:*",
"node-cache": "^5.1.2"
}
}

@ -1,9 +1,41 @@
import { Request, Response } from "express";
import Cache from "node-cache";
import { Route, RouteMessages } from "server";
import { nodeManager } from "..";
const IGNORED_PATHS = ["/favicon.ico"];
const cache = new Cache({
stdTTL: 300, // 5 minutes
});
type CachedRequest = {
nodeId: string;
status: number;
headers: any;
data: any;
};
/**
* Logs a request
*
* @param nodeId the node ID that handled the request
* @param url the URL of the request
* @param status the status code of the request
* @param time the time it took to handle the request (or true if it was cached)
*/
function log(
nodeId: string,
url: string,
status: number,
time: number | boolean
) {
console.log(
`[node: ${nodeId}] ${url} - ${status} (${
time === true ? "cached" : time + "ms"
})`
);
}
export default class ProxyRoute extends Route {
constructor() {
super({ path: "/" });
@ -29,8 +61,19 @@ export default class ProxyRoute extends Route {
}
try {
const cachedRequest = cache.get<CachedRequest>(url);
if (cachedRequest) {
log(cachedRequest.nodeId, url, cachedRequest.status, true);
res
.status(cachedRequest.status)
.set(cachedRequest.headers)
.json(cachedRequest.data);
return;
}
const before = Date.now();
const response = await node.fetch(url);
const nodeId = response.headers["x-proxy-node"];
const data = response.data;
if (response.status === 500) {
@ -38,15 +81,16 @@ export default class ProxyRoute extends Route {
return;
}
// Log the request
console.log(
`[node: ${response.headers["x-proxy-node"]}] ${url} - ${
response.status
} (${Date.now() - before}ms)`
);
log(nodeId, url, response.status, Date.now() - before);
// Return the JSON response
res.status(response.status).set(response.headers).json(data);
cache.set(url, {
nodeId: nodeId,
status: response.status,
headers: response.headers,
data: data,
} as CachedRequest);
} catch (ex: any) {
res.status(500).json(RouteMessages.internalServerError(ex.message || ex));
}