2020-07-21 13:20:51 +00:00
|
|
|
import {RequestWithLogin} from 'app/server/lib/Authorizer';
|
2022-07-04 14:14:55 +00:00
|
|
|
import log from 'app/server/lib/log';
|
2020-07-21 13:20:51 +00:00
|
|
|
import * as express from 'express';
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Wrapper for async express endpoints to catch errors and forward them to the error handler.
|
|
|
|
*/
|
|
|
|
export function expressWrap(callback: express.RequestHandler): express.RequestHandler {
|
|
|
|
return async (req, res, next) => {
|
|
|
|
try {
|
|
|
|
await callback(req, res, next);
|
|
|
|
} catch (err) {
|
|
|
|
next(err);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2022-01-07 18:11:52 +00:00
|
|
|
interface JsonErrorHandlerOptions {
|
|
|
|
shouldLogBody?: boolean;
|
|
|
|
shouldLogParams?: boolean;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Returns a custom error-handling middleware that responds to errors in json.
|
|
|
|
*
|
|
|
|
* Currently allows for toggling of logging request bodies and params.
|
|
|
|
*/
|
|
|
|
const buildJsonErrorHandler = (options: JsonErrorHandlerOptions = {}): express.ErrorRequestHandler => {
|
2022-04-08 18:00:43 +00:00
|
|
|
const {shouldLogBody, shouldLogParams} = options;
|
2022-01-07 18:11:52 +00:00
|
|
|
return (err, req, res, _next) => {
|
|
|
|
const mreq = req as RequestWithLogin;
|
2022-04-08 18:00:43 +00:00
|
|
|
const meta = {
|
|
|
|
path: mreq.path,
|
|
|
|
userId: mreq.userId,
|
|
|
|
altSessionId: mreq.altSessionId,
|
|
|
|
body: shouldLogBody !== false ? req.body : undefined,
|
|
|
|
params: shouldLogParams !== false ? req.params : undefined,
|
|
|
|
};
|
(core) For exporting XLSX, do it memory-efficiently in a worker thread.
Summary:
- Excel exports were awfully memory-inefficient, causing occasional docWorker
crashes. The fix is to use the "streaming writer" option of ExcelJS
https://github.com/exceljs/exceljs#streaming-xlsx-writercontents. (Empirically
on one example, max memory went down from 3G to 100M)
- It's also CPU intensive and synchronous, and can block node for tens of
seconds. The fix is to use a worker-thread. This diff uses "piscina" library
for a pool of threads.
- Additionally, adds ProcessMonitor that logs memory and cpu usage,
particularly when those change significantly.
- Also introduces request cancellation, so that a long download cancelled by
the user will cancel the work being done in the worker thread.
Test Plan:
Updated previous export tests; memory and CPU performance tested
manually by watching output of ProcessMonitor.
Difference visible in these log excerpts:
Before (total time to serve request 22 sec):
```
Telemetry processMonitor heapUsedMB=2187, heapTotalMB=2234, cpuAverage=1.13, intervalMs=17911
Telemetry processMonitor heapUsedMB=2188, heapTotalMB=2234, cpuAverage=0.66, intervalMs=5005
Telemetry processMonitor heapUsedMB=2188, heapTotalMB=2234, cpuAverage=0, intervalMs=5005
Telemetry processMonitor heapUsedMB=71, heapTotalMB=75, cpuAverage=0.13, intervalMs=5002
```
After (total time to server request 18 sec):
```
Telemetry processMonitor heapUsedMB=109, heapTotalMB=144, cpuAverage=0.5, intervalMs=5001
Telemetry processMonitor heapUsedMB=109, heapTotalMB=144, cpuAverage=1.39, intervalMs=5002
Telemetry processMonitor heapUsedMB=94, heapTotalMB=131, cpuAverage=1.13, intervalMs=5000
Telemetry processMonitor heapUsedMB=94, heapTotalMB=131, cpuAverage=1.35, intervalMs=5001
```
Note in "Before" that heapTotalMB goes up to 2GB in the first case, and "intervalMs" of 17 seconds indicates that node was unresponsive for that long. In the second case, heapTotalMB stays low, and the main thread remains responsive the whole time.
Reviewers: jarek
Reviewed By: jarek
Differential Revision: https://phab.getgrist.com/D3906
2023-06-01 13:09:50 +00:00
|
|
|
const headersNote = res.headersSent ? " (headersSent)" : "";
|
|
|
|
log.rawWarn(`Error during api call to ${meta.path}${headersNote}: ${err.message}`, meta);
|
2022-01-07 18:11:52 +00:00
|
|
|
let details = err.details && {...err.details};
|
|
|
|
const status = details?.status || err.status || 500;
|
|
|
|
if (details) {
|
|
|
|
// Remove some details exposed for websocket API only.
|
|
|
|
delete details.accessMode;
|
|
|
|
delete details.status; // TODO: reconcile err.status and details.status, no need for both.
|
|
|
|
if (Object.keys(details).length === 0) { details = undefined; }
|
|
|
|
}
|
(core) For exporting XLSX, do it memory-efficiently in a worker thread.
Summary:
- Excel exports were awfully memory-inefficient, causing occasional docWorker
crashes. The fix is to use the "streaming writer" option of ExcelJS
https://github.com/exceljs/exceljs#streaming-xlsx-writercontents. (Empirically
on one example, max memory went down from 3G to 100M)
- It's also CPU intensive and synchronous, and can block node for tens of
seconds. The fix is to use a worker-thread. This diff uses "piscina" library
for a pool of threads.
- Additionally, adds ProcessMonitor that logs memory and cpu usage,
particularly when those change significantly.
- Also introduces request cancellation, so that a long download cancelled by
the user will cancel the work being done in the worker thread.
Test Plan:
Updated previous export tests; memory and CPU performance tested
manually by watching output of ProcessMonitor.
Difference visible in these log excerpts:
Before (total time to serve request 22 sec):
```
Telemetry processMonitor heapUsedMB=2187, heapTotalMB=2234, cpuAverage=1.13, intervalMs=17911
Telemetry processMonitor heapUsedMB=2188, heapTotalMB=2234, cpuAverage=0.66, intervalMs=5005
Telemetry processMonitor heapUsedMB=2188, heapTotalMB=2234, cpuAverage=0, intervalMs=5005
Telemetry processMonitor heapUsedMB=71, heapTotalMB=75, cpuAverage=0.13, intervalMs=5002
```
After (total time to server request 18 sec):
```
Telemetry processMonitor heapUsedMB=109, heapTotalMB=144, cpuAverage=0.5, intervalMs=5001
Telemetry processMonitor heapUsedMB=109, heapTotalMB=144, cpuAverage=1.39, intervalMs=5002
Telemetry processMonitor heapUsedMB=94, heapTotalMB=131, cpuAverage=1.13, intervalMs=5000
Telemetry processMonitor heapUsedMB=94, heapTotalMB=131, cpuAverage=1.35, intervalMs=5001
```
Note in "Before" that heapTotalMB goes up to 2GB in the first case, and "intervalMs" of 17 seconds indicates that node was unresponsive for that long. In the second case, heapTotalMB stays low, and the main thread remains responsive the whole time.
Reviewers: jarek
Reviewed By: jarek
Differential Revision: https://phab.getgrist.com/D3906
2023-06-01 13:09:50 +00:00
|
|
|
if (res.headersSent) {
|
|
|
|
// If we've already sent headers, attempt to set them to something else will fail. E.g. this
|
|
|
|
// can happen with downloads if a request gets aborted. If so, just close the response; we
|
|
|
|
// already reported the error above.
|
|
|
|
res.end();
|
|
|
|
} else {
|
|
|
|
res.status(status).json({error: err.message || 'internal error', details});
|
|
|
|
}
|
2022-01-07 18:11:52 +00:00
|
|
|
};
|
|
|
|
};
|
|
|
|
|
2020-07-21 13:20:51 +00:00
|
|
|
/**
|
|
|
|
* Error-handling middleware that responds to errors in json. The status code is taken from
|
|
|
|
* error.status property (for which ApiError is convenient), and defaults to 500.
|
|
|
|
*/
|
2022-01-07 18:11:52 +00:00
|
|
|
export const jsonErrorHandler: express.ErrorRequestHandler = buildJsonErrorHandler();
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Variant of `jsonErrorHandler` that skips logging request bodies and params.
|
|
|
|
*
|
|
|
|
* Should be used for sensitive routes, such as those under '/api/auth/'.
|
|
|
|
*/
|
|
|
|
export const secureJsonErrorHandler: express.ErrorRequestHandler = buildJsonErrorHandler({
|
|
|
|
shouldLogBody: false,
|
|
|
|
shouldLogParams: false,
|
|
|
|
});
|
2020-07-21 13:20:51 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Middleware that responds with a 404 status and a json error object.
|
|
|
|
*/
|
|
|
|
export const jsonNotFoundHandler: express.RequestHandler = (req, res, next) => {
|
|
|
|
res.status(404).json({error: `not found: ${req.url}`});
|
|
|
|
};
|