diff --git a/app/gen-server/lib/DocApiForwarder.ts b/app/gen-server/lib/DocApiForwarder.ts index f4834cfb..6ffc4c1e 100644 --- a/app/gen-server/lib/DocApiForwarder.ts +++ b/app/gen-server/lib/DocApiForwarder.ts @@ -55,6 +55,7 @@ export class DocApiForwarder { app.use('/api/docs/:docId/states', withDoc); app.use('/api/docs/:docId/compare', withDoc); app.use('/api/docs/:docId/assign', withDocWithoutAuth); + app.use('/api/docs/:docId/webhooks/queue', withDoc); app.use('^/api/docs$', withoutDoc); } diff --git a/app/server/lib/ActiveDoc.ts b/app/server/lib/ActiveDoc.ts index 2ee01b40..cf24c4d0 100644 --- a/app/server/lib/ActiveDoc.ts +++ b/app/server/lib/ActiveDoc.ts @@ -1712,6 +1712,13 @@ export class ActiveDoc extends EventEmitter { } } + /** + * Clears all outgoing webhook requests queued for this document. + */ + public async clearWebhookQueue() { + await this._triggers.clearWebhookQueue(); + } + /** * Loads an open document from DocStorage. Returns a list of the tables it contains. */ diff --git a/app/server/lib/Authorizer.ts b/app/server/lib/Authorizer.ts index 98632f85..29d90506 100644 --- a/app/server/lib/Authorizer.ts +++ b/app/server/lib/Authorizer.ts @@ -651,6 +651,10 @@ export function assertAccess( throw new ErrorWithCode("AUTH_NO_EDIT", "No write access", details); } } + + if (role === 'owners' && docAuth.access !== 'owners') { + throw new ErrorWithCode("AUTH_NO_OWNER", "No owner access", details); + } } /** diff --git a/app/server/lib/DocApi.ts b/app/server/lib/DocApi.ts index 5a2f0aa9..c3782ce4 100644 --- a/app/server/lib/DocApi.ts +++ b/app/server/lib/DocApi.ts @@ -618,6 +618,14 @@ export class DocWorkerApi { }) ); + // Clears all outgoing webhooks in the queue for this document. + this._app.delete('/api/docs/:docId/webhooks/queue', isOwner, + withDoc(async (activeDoc, req, res) => { + await activeDoc.clearWebhookQueue(); + res.json({success: true}); + }) + ); + // Reload a document forcibly (in fact this closes the doc, it will be automatically // reopened on use). this._app.post('/api/docs/:docId/force-reload', canEdit, throttled(async (req, res) => { diff --git a/app/server/lib/GoogleImport.ts b/app/server/lib/GoogleImport.ts index 82d59366..51ed2610 100644 --- a/app/server/lib/GoogleImport.ts +++ b/app/server/lib/GoogleImport.ts @@ -71,7 +71,7 @@ async function asFetchResponse(req: GaxiosPromise, filename?: string | if (!error.response) { // Fetch throws exception on network error. // https://github.com/node-fetch/node-fetch/blob/master/docs/ERROR-HANDLING.md - throw new FetchError(error.message, "system", error.code || "unknown"); + throw new FetchError(error.message, "system", error); } else { // Fetch returns failure response on http error const resInit = error.response ? { diff --git a/app/server/lib/Triggers.ts b/app/server/lib/Triggers.ts index cac25651..02c7f2cc 100644 --- a/app/server/lib/Triggers.ts +++ b/app/server/lib/Triggers.ts @@ -1,7 +1,6 @@ import {LocalActionBundle} from 'app/common/ActionBundle'; import {ActionSummary, TableDelta} from 'app/common/ActionSummary'; import {MapWithTTL} from 'app/common/AsyncCreate'; -import {delay} from 'app/common/delay'; import {fromTableDataAction, RowRecord, TableColValues, TableDataAction} from 'app/common/DocActions'; import {StringUnion} from 'app/common/StringUnion'; import {MetaRowRecord} from 'app/common/TableData'; @@ -10,11 +9,13 @@ import {summarizeAction} from 'app/server/lib/ActionSummary'; import {ActiveDoc} from 'app/server/lib/ActiveDoc'; import {makeExceptionalDocSession} from 'app/server/lib/DocSession'; import log from 'app/server/lib/log'; +import {matchesBaseDomain} from 'app/server/lib/requestUtils'; +import {delayAbort} from 'app/server/lib/serverUtils'; import {promisifyAll} from 'bluebird'; import * as _ from 'lodash'; +import {AbortController, AbortSignal} from 'node-abort-controller'; import fetch from 'node-fetch'; import {createClient, Multi, RedisClient} from 'redis'; -import {matchesBaseDomain} from 'app/server/lib/requestUtils'; promisifyAll(RedisClient.prototype); @@ -72,6 +73,10 @@ const MAX_QUEUE_SIZE = 1000; const WEBHOOK_CACHE_TTL = 10000; +// A time to wait for between retries of a webhook. Exposed for tests. +const TRIGGER_WAIT_DELAY = + process.env.GRIST_TRIGGER_WAIT_DELAY ? parseInt(process.env.GRIST_TRIGGER_WAIT_DELAY, 10) : 1000; + // Processes triggers for records changed as described in action bundles. // initiating webhooks and automations. // The interesting stuff starts in the handle() method. @@ -107,6 +112,9 @@ export class DocTriggers { // Promise which resolves after we finish fetching the backup queue from redis on startup. private _getRedisQueuePromise: Promise | undefined; + // Abort controller for the loop that sends webhooks. + private _loopAbort: AbortController|undefined; + constructor(private _activeDoc: ActiveDoc) { const redisUrl = process.env.REDIS_URL; if (redisUrl) { @@ -118,6 +126,7 @@ export class DocTriggers { public shutdown() { this._shuttingDown = true; + this._loopAbort?.abort(); if (!this._sending) { void(this._redisClientField?.quitAsync()); } @@ -210,12 +219,30 @@ export class DocTriggers { // Prevent further document activity while the queue is too full. while (this._drainingQueue && !this._shuttingDown) { - await delay(1000); + await delayAbort(1000, this._loopAbort?.signal); } return summary; } + + public async clearWebhookQueue() { + // Make sure we are after start and in sync with redis. + if (this._getRedisQueuePromise) { + await this._getRedisQueuePromise; + } + // Clear in-memory queue. + const removed = this._webHookEventQueue.splice(0, this._webHookEventQueue.length).length; + // Notify the loop that it should restart. + this._loopAbort?.abort(); + // If we have backup in redis, clear it also. + // NOTE: this is subject to a race condition, currently it is not possible, but any future modification probably + // will require some kind of locking over the queue (or a rewrite) + if (removed && this._redisClient) { + await this._redisClient.multi().ltrim(this._redisQueueKey, 0, -1).execAsync(); + } + } + private get _docId() { return this._activeDoc.docName; } @@ -421,14 +448,18 @@ export class DocTriggers { // TODO delay/prevent shutting down while queue isn't empty? while (!this._shuttingDown) { + this._loopAbort = new AbortController(); if (!this._webHookEventQueue.length) { - await delay(1000); + await delayAbort(TRIGGER_WAIT_DELAY, this._loopAbort.signal).catch(() => {}); continue; } const id = this._webHookEventQueue[0].id; const batch = _.takeWhile(this._webHookEventQueue.slice(0, 100), {id}); const body = JSON.stringify(batch.map(e => e.payload)); const url = await this._getWebHookUrl(id); + if (this._loopAbort.signal.aborted) { + continue; + } let meta: Record|undefined; let success: boolean; @@ -437,7 +468,11 @@ export class DocTriggers { } else { meta = {numEvents: batch.length, webhookId: id, host: new URL(url).host}; this._log("Sending batch of webhook events", meta); - success = await this._sendWebhookWithRetries(url, body); + success = await this._sendWebhookWithRetries(url, body, this._loopAbort.signal); + } + + if (this._loopAbort.signal.aborted) { + continue; } this._webHookEventQueue.splice(0, batch.length); @@ -493,7 +528,7 @@ export class DocTriggers { return this._drainingQueue ? 5 : 20; } - private async _sendWebhookWithRetries(url: string, body: string) { + private async _sendWebhookWithRetries(url: string, body: string, signal: AbortSignal) { const maxWait = 64; let wait = 1; for (let attempt = 0; attempt < this._maxWebhookAttempts; attempt++) { @@ -507,6 +542,7 @@ export class DocTriggers { headers: { 'Content-Type': 'application/json', }, + signal, }); if (response.status === 200) { return true; @@ -516,6 +552,10 @@ export class DocTriggers { this._log(`Webhook sending error: ${e}`, {level: 'warn', attempt}); } + if (signal.aborted) { + return false; + } + // Don't wait any more if this is the last attempt. if (attempt >= this._maxWebhookAttempts - 1) { return false; @@ -526,7 +566,11 @@ export class DocTriggers { if (this._shuttingDown) { return false; } - await delay(1000); + try { + await delayAbort(TRIGGER_WAIT_DELAY, signal); + } catch (e) { + return false; + } } if (wait < maxWait) { wait *= 2; diff --git a/app/server/lib/serverUtils.ts b/app/server/lib/serverUtils.ts index 86bb9045..b234fd2a 100644 --- a/app/server/lib/serverUtils.ts +++ b/app/server/lib/serverUtils.ts @@ -4,11 +4,12 @@ import * as net from 'net'; import * as path from 'path'; import { ConnectionOptions } from 'typeorm'; import uuidv4 from 'uuid/v4'; +import {AbortSignal} from 'node-abort-controller'; import {EngineCode} from 'app/common/DocumentSettings'; import log from 'app/server/lib/log'; -import { OpenMode, SQLiteDB } from 'app/server/lib/SQLiteDB'; -import { getDocSessionAccessOrNull, getDocSessionUser, OptDocSession } from './DocSession'; +import {OpenMode, SQLiteDB} from 'app/server/lib/SQLiteDB'; +import {getDocSessionAccessOrNull, getDocSessionUser, OptDocSession} from './DocSession'; /** * Promisify a node-style callback function. E.g. @@ -168,3 +169,28 @@ export function getSupportedEngineChoices(): EngineCode[]|undefined { } return undefined; } + +/** + * Returns a promise that resolves in the given number of milliseconds or rejects + * when the given signal is raised. + */ + export function delayAbort(msec: number, signal?: AbortSignal): Promise { + return new Promise((resolve, reject) => { + let resolved = false; + const timeout = setTimeout(() => { + if (!resolved) { + resolved = true; + resolve(); + } + }, msec); + if (signal?.addEventListener) { + signal.addEventListener('abort', (ev) => { + if (!resolved) { + resolved = true; + clearTimeout(timeout); + reject(ev); + } + }); + } + }); +} diff --git a/package.json b/package.json index da1b77cb..bb920599 100644 --- a/package.json +++ b/package.json @@ -56,7 +56,7 @@ "@types/mocha": "5.2.5", "@types/moment-timezone": "0.5.9", "@types/node": "^14", - "@types/node-fetch": "2.1.2", + "@types/node-fetch": "2.6.2", "@types/pidusage": "2.0.1", "@types/plotly.js": "2.12.1", "@types/qrcode": "1.4.2", @@ -147,6 +147,7 @@ "morgan": "1.9.1", "mousetrap": "1.6.2", "multiparty": "4.2.2", + "node-abort-controller": "3.0.1", "node-fetch": "2.6.7", "pg": "8.6.0", "plotly.js-basic-dist": "2.13.2", diff --git a/test/server/lib/DocApi.ts b/test/server/lib/DocApi.ts index 1e7c8ea6..4e0006fb 100644 --- a/test/server/lib/DocApi.ts +++ b/test/server/lib/DocApi.ts @@ -5,14 +5,10 @@ import {DocState, UserAPIImpl} from 'app/common/UserAPI'; import {testDailyApiLimitFeatures} from 'app/gen-server/entity/Product'; import {AddOrUpdateRecord, Record as ApiRecord} from 'app/plugin/DocApiTypes'; import {CellValue, GristObjCode} from 'app/plugin/GristData'; -import { - applyQueryParameters, - docApiUsagePeriods, - docPeriodicApiUsageKey, - getDocApiUsageKeysToIncr -} from 'app/server/lib/DocApi'; +import {applyQueryParameters, docApiUsagePeriods, docPeriodicApiUsageKey, + getDocApiUsageKeysToIncr} from 'app/server/lib/DocApi'; import log from 'app/server/lib/log'; -import {exitPromise} from 'app/server/lib/serverUtils'; +import {delayAbort, exitPromise} from 'app/server/lib/serverUtils'; import {connectTestingHooks, TestingHooksClient} from 'app/server/lib/TestingHooks'; import axios, {AxiosResponse} from 'axios'; import {delay} from 'bluebird'; @@ -28,6 +24,7 @@ import fetch from 'node-fetch'; import {tmpdir} from 'os'; import * as path from 'path'; import {createClient, RedisClient} from 'redis'; +import {AbortController} from 'node-abort-controller'; import {configForUser} from 'test/gen-server/testUtils'; import {serveSomething, Serving} from 'test/server/customUtil'; import * as testUtils from 'test/server/testUtils'; @@ -2890,7 +2887,19 @@ function testDocApi() { let redisMonitor: any; let redisCalls: any[]; + // Create couple of promises that can be used to monitor + // if the endpoint was called. + const successCalled = signal(); + const notFoundCalled = signal(); + const longStarted = signal(); + const longFinished = signal(); + + // Create an abort controller for the latest request. We will + // use it to abort the delay on the longEndpoint. + let controller = new AbortController(); + before(async function() { + this.timeout(20000); if (!process.env.TEST_REDIS_URL) { this.skip(); } requests = { "add,update": [], @@ -2906,6 +2915,33 @@ function testDocApi() { // TODO test retries on failure and slowness in a new test serving = await serveSomething(app => { app.use(bodyParser.json()); + app.post('/200', ({body}, res) => { + successCalled.emit(body[0].A); + res.sendStatus(200); + res.end(); + }); + app.post('/404', ({body}, res) => { + notFoundCalled.emit(body[0].A); + res.sendStatus(404); // Webhooks treats it as an error and will retry. Probably it shouldn't work this way. + res.end(); + }); + app.post('/long', async ({body}, res) => { + longStarted.emit(body[0].A); + // We are scoping the controller to this call, so any subsequent + // call will have a new controller. Caller can save this value to abort the previous calls. + const scoped = new AbortController(); + controller = scoped; + try { + await delayAbort(2000, scoped.signal); // We don't expect to wait for this. + res.sendStatus(200); + res.end(); + longFinished.emit(body[0].A); + } catch(exc) { + res.sendStatus(200); // Send ok, so that it won't be seen as an error. + res.end(); + longFinished.emit([408, body[0].A]); // We will signal that this is success but after aborting timeout. + } + }); app.post('/:eventTypes', async ({body, params: {eventTypes}}, res) => { requests[eventTypes as keyof WebhookRequests].push(body); res.sendStatus(200); @@ -3062,6 +3098,181 @@ function testDocApi() { ); }); + + it("should clear the outgoing queue", async() => { + // Create a test document. + const ws1 = (await userApi.getOrgWorkspaces('current'))[0].id; + const docId = await userApi.newDoc({name: 'testdoc2'}, ws1); + const doc = userApi.getDocAPI(docId); + await axios.post(`${serverUrl}/api/docs/${docId}/apply`, [ + ['ModifyColumn', 'Table1', 'B', {type: 'Bool'}], + ], chimpy); + // Subscribe helper that returns a method to unsubscribe. + const subscribe = async (endpoint: string) => { + const {data, status} = await axios.post( + `${serverUrl}/api/docs/${docId}/tables/Table1/_subscribe`, + {eventTypes: ['add', 'update'], url: `${serving.url}/${endpoint}`, isReadyColumn: "B"}, chimpy + ); + assert.equal(status, 200); + return () => axios.post( + `${serverUrl}/api/docs/${docId}/tables/Table1/_unsubscribe`, + data, chimpy + ); + }; + + // Try to clear the queue, even if it is empty. + const deleteResult = await axios.delete( + `${serverUrl}/api/docs/${docId}/webhooks/queue`, chimpy + ); + assert.equal(deleteResult.status, 200); + + const cleanup: (() => Promise)[] = []; + + // Subscribe a valid webhook endpoint. + cleanup.push(await subscribe('200')); + // Subscribe an invalid webhook endpoint. + cleanup.push(await subscribe('404')); + + // Prepare signals, we will be waiting for those two to be called. + successCalled.reset(); + notFoundCalled.reset(); + // Trigger both events. + await doc.addRows("Table1", { + A: [1], + B: [true], + }); + + // Wait for both of them to be called (this is correct order) + await successCalled.waitAndReset(); + await notFoundCalled.waitAndReset(); + + // Broken endpoint will be called multiple times here, and any subsequent triggers for working + // endpoint won't be called. + await notFoundCalled.waitAndReset(); + + // But the working endpoint won't be called more then once. + assert.isFalse(successCalled.called()); + + // Trigger second event. + await doc.addRows("Table1", { + A: [2], + B: [true], + }); + // Error endpoint will be called with the first row (still). + const firstRow = await notFoundCalled.waitAndReset(); + assert.deepEqual(firstRow, 1); + + // But the working endpoint won't be called till we reset the queue. + assert.isFalse(successCalled.called()); + + // Now reset the queue. + await axios.delete( + `${serverUrl}/api/docs/${docId}/webhooks/queue`, chimpy + ); + assert.isFalse(successCalled.called()); + assert.isFalse(notFoundCalled.called()); + + // Prepare for new calls. + successCalled.reset(); + notFoundCalled.reset(); + // Trigger them. + await doc.addRows("Table1", { + A: [3], + B: [true], + }); + // We will receive data from the 3rd row only (the second one was omitted). + let thirdRow = await successCalled.waitAndReset(); + assert.deepEqual(thirdRow, 3); + thirdRow = await notFoundCalled.waitAndReset(); + assert.deepEqual(thirdRow, 3); + // And the situation will be the same, the working endpoint won't be called till we reset the queue, but + // the error endpoint will be called with the third row multiple times. + await notFoundCalled.waitAndReset(); + assert.isFalse(successCalled.called()); + + // Cleanup everything, we will now test request timeouts. + await Promise.all(cleanup.map(fn => fn())).finally(() => cleanup.length = 0); + assert.isTrue((await axios.delete( + `${serverUrl}/api/docs/${docId}/webhooks/queue`, chimpy + )).status === 200); + + // Create 2 webhooks, one that is very long. + cleanup.push(await subscribe('200')); + cleanup.push(await subscribe('long')); + successCalled.reset(); + longFinished.reset(); + longStarted.reset(); + // Trigger them. + await doc.addRows("Table1", { + A: [4], + B: [true], + }); + // 200 will be called immediately. + await successCalled.waitAndReset(); + // Long will be started immediately. + await longStarted.waitAndReset(); + // But it won't be finished. + assert.isFalse(longFinished.called()); + // It will be aborted. + controller.abort(); + assert.deepEqual(await longFinished.waitAndReset(), [408, 4]); + + // Trigger another event. + await doc.addRows("Table1", { + A: [5], + B: [true], + }); + // We are stuck once again on the long call. But this time we won't + // abort it till the end of this test. + assert.deepEqual(await successCalled.waitAndReset(), 5); + assert.deepEqual(await longStarted.waitAndReset(), 5); + assert.isFalse(longFinished.called()); + + // Remember this controller for cleanup. + const controller5 = controller; + // Trigger another event. + await doc.addRows("Table1", { + A: [6], + B: [true], + }); + // We are now completely stuck on the 5th row webhook. + assert.isFalse(successCalled.called()); + assert.isFalse(longFinished.called()); + // Clear the queue, it will free webhooks requests, but it won't cancel long handler on the external server + // so it is still waiting. + assert.isTrue((await axios.delete( + `${serverUrl}/api/docs/${docId}/webhooks/queue`, chimpy + )).status === 200); + // Now we can release the stuck request. + controller5.abort(); + // We will be cancelled from the 5th row. + assert.deepEqual(await longFinished.waitAndReset(), [408, 5]); + + // We won't be called for the 6th row at all, as it was stuck and the queue was purged. + assert.isFalse(successCalled.called()); + assert.isFalse(longStarted.called()); + + // Trigger next event. + await doc.addRows("Table1", { + A: [7], + B: [true], + }); + // We will be called once again with a new 7th row. + assert.deepEqual(await successCalled.waitAndReset(), 7); + assert.deepEqual(await longStarted.waitAndReset(), 7); + // But we are stuck again. + assert.isFalse(longFinished.called()); + // And we can abort current request from 7th row (6th row was skipped). + controller.abort(); + assert.deepEqual(await longFinished.waitAndReset(), [408, 7]); + + // Cleanup all + await Promise.all(cleanup.map(fn => fn())).finally(() => cleanup.length = 0); + assert.isTrue((await axios.delete( + `${serverUrl}/api/docs/${docId}/webhooks/queue`, chimpy + )).status === 200); + }); + }); describe("Allowed Origin", () => { @@ -3252,6 +3463,7 @@ class TestServer { APP_HOME_URL: _homeUrl, ALLOWED_WEBHOOK_DOMAINS: `example.com,localhost:${webhooksTestPort}`, GRIST_ALLOWED_HOSTS: `example.com,localhost`, + GRIST_TRIGGER_WAIT_DELAY: '200', ...process.env }; @@ -3339,3 +3551,41 @@ async function setupDataDir(dir: string) { 'ApiDataRecordsTest.grist', path.resolve(dir, docIds.ApiDataRecordsTest + '.grist')); } + +/** + * Helper that creates a promise that can be resolved from outside. + */ +function signal() { + let resolve: null | ((data: any) => void) = null; + let promise: null | Promise = null; + let called = false; + return { + emit(data: any) { + if (!resolve) { + throw new Error("signal.emit() called before signal.reset()"); + } + called = true; + resolve(data); + }, + async wait() { + if (!promise) { + throw new Error("signal.wait() called before signal.reset()"); + } + return await promise; + }, + async waitAndReset() { + try { + return await this.wait(); + } finally { + this.reset(); + } + }, + called() { + return called; + }, + reset() { + called = false; + promise = new Promise((res) => { resolve = res; }); + } + }; +} diff --git a/yarn.lock b/yarn.lock index d82c2c08..e07792cf 100644 --- a/yarn.lock +++ b/yarn.lock @@ -481,12 +481,13 @@ dependencies: moment ">=2.14.0" -"@types/node-fetch@2.1.2": - version "2.1.2" - resolved "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.1.2.tgz" - integrity sha512-XroxUzLpKuL+CVkQqXlffRkEPi4Gh3Oui/mWyS7ztKiyqVxiU+h3imCW5I2NQmde5jK+3q++36/Q96cyRWsweg== +"@types/node-fetch@2.6.2": + version "2.6.2" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.2.tgz#d1a9c5fd049d9415dce61571557104dec3ec81da" + integrity sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A== dependencies: "@types/node" "*" + form-data "^3.0.0" "@types/node@*": version "14.0.1" @@ -1873,9 +1874,9 @@ combine-source-map@^0.8.0, combine-source-map@~0.8.0: lodash.memoize "~3.0.3" source-map "~0.5.3" -combined-stream@^1.0.6, combined-stream@~1.0.6: +combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6: version "1.0.8" - resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" @@ -3039,6 +3040,15 @@ forever-agent@~0.6.1: resolved "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + form-data@~2.3.2: version "2.3.3" resolved "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz" @@ -4932,6 +4942,11 @@ nise@^1.4.6: lolex "^5.0.1" path-to-regexp "^1.7.0" +node-abort-controller@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/node-abort-controller/-/node-abort-controller-3.0.1.tgz#f91fa50b1dee3f909afabb7e261b1e1d6b0cb74e" + integrity sha512-/ujIVxthRs+7q6hsdjHMaj8hRG9NuWmwrz+JdRwZ14jdFoKSkm+vDsCbF9PLpnSqjaWQJuTmVtcWHNLr+vrOFw== + node-addon-api@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/node-addon-api/-/node-addon-api-2.0.0.tgz"