mirror of
https://github.com/gristlabs/grist-core.git
synced 2024-10-27 20:44:07 +00:00
(core) Adding DELETE /api/docs/webhooks/queue endpoint to clear the queue
Summary: Creating an API endpoint to cancel any queued webhook messages from a document. Test Plan: Updated Reviewers: paulfitz, georgegevoian Reviewed By: paulfitz, georgegevoian Differential Revision: https://phab.getgrist.com/D3713
This commit is contained in:
parent
29a7eadb85
commit
92d4fca855
@ -55,6 +55,7 @@ export class DocApiForwarder {
|
|||||||
app.use('/api/docs/:docId/states', withDoc);
|
app.use('/api/docs/:docId/states', withDoc);
|
||||||
app.use('/api/docs/:docId/compare', withDoc);
|
app.use('/api/docs/:docId/compare', withDoc);
|
||||||
app.use('/api/docs/:docId/assign', withDocWithoutAuth);
|
app.use('/api/docs/:docId/assign', withDocWithoutAuth);
|
||||||
|
app.use('/api/docs/:docId/webhooks/queue', withDoc);
|
||||||
app.use('^/api/docs$', withoutDoc);
|
app.use('^/api/docs$', withoutDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1712,6 +1712,13 @@ export class ActiveDoc extends EventEmitter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clears all outgoing webhook requests queued for this document.
|
||||||
|
*/
|
||||||
|
public async clearWebhookQueue() {
|
||||||
|
await this._triggers.clearWebhookQueue();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Loads an open document from DocStorage. Returns a list of the tables it contains.
|
* Loads an open document from DocStorage. Returns a list of the tables it contains.
|
||||||
*/
|
*/
|
||||||
|
@ -651,6 +651,10 @@ export function assertAccess(
|
|||||||
throw new ErrorWithCode("AUTH_NO_EDIT", "No write access", details);
|
throw new ErrorWithCode("AUTH_NO_EDIT", "No write access", details);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (role === 'owners' && docAuth.access !== 'owners') {
|
||||||
|
throw new ErrorWithCode("AUTH_NO_OWNER", "No owner access", details);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -618,6 +618,14 @@ export class DocWorkerApi {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Clears all outgoing webhooks in the queue for this document.
|
||||||
|
this._app.delete('/api/docs/:docId/webhooks/queue', isOwner,
|
||||||
|
withDoc(async (activeDoc, req, res) => {
|
||||||
|
await activeDoc.clearWebhookQueue();
|
||||||
|
res.json({success: true});
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
// Reload a document forcibly (in fact this closes the doc, it will be automatically
|
// Reload a document forcibly (in fact this closes the doc, it will be automatically
|
||||||
// reopened on use).
|
// reopened on use).
|
||||||
this._app.post('/api/docs/:docId/force-reload', canEdit, throttled(async (req, res) => {
|
this._app.post('/api/docs/:docId/force-reload', canEdit, throttled(async (req, res) => {
|
||||||
|
@ -71,7 +71,7 @@ async function asFetchResponse(req: GaxiosPromise<Readable>, filename?: string |
|
|||||||
if (!error.response) {
|
if (!error.response) {
|
||||||
// Fetch throws exception on network error.
|
// Fetch throws exception on network error.
|
||||||
// https://github.com/node-fetch/node-fetch/blob/master/docs/ERROR-HANDLING.md
|
// https://github.com/node-fetch/node-fetch/blob/master/docs/ERROR-HANDLING.md
|
||||||
throw new FetchError(error.message, "system", error.code || "unknown");
|
throw new FetchError(error.message, "system", error);
|
||||||
} else {
|
} else {
|
||||||
// Fetch returns failure response on http error
|
// Fetch returns failure response on http error
|
||||||
const resInit = error.response ? {
|
const resInit = error.response ? {
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import {LocalActionBundle} from 'app/common/ActionBundle';
|
import {LocalActionBundle} from 'app/common/ActionBundle';
|
||||||
import {ActionSummary, TableDelta} from 'app/common/ActionSummary';
|
import {ActionSummary, TableDelta} from 'app/common/ActionSummary';
|
||||||
import {MapWithTTL} from 'app/common/AsyncCreate';
|
import {MapWithTTL} from 'app/common/AsyncCreate';
|
||||||
import {delay} from 'app/common/delay';
|
|
||||||
import {fromTableDataAction, RowRecord, TableColValues, TableDataAction} from 'app/common/DocActions';
|
import {fromTableDataAction, RowRecord, TableColValues, TableDataAction} from 'app/common/DocActions';
|
||||||
import {StringUnion} from 'app/common/StringUnion';
|
import {StringUnion} from 'app/common/StringUnion';
|
||||||
import {MetaRowRecord} from 'app/common/TableData';
|
import {MetaRowRecord} from 'app/common/TableData';
|
||||||
@ -10,11 +9,13 @@ import {summarizeAction} from 'app/server/lib/ActionSummary';
|
|||||||
import {ActiveDoc} from 'app/server/lib/ActiveDoc';
|
import {ActiveDoc} from 'app/server/lib/ActiveDoc';
|
||||||
import {makeExceptionalDocSession} from 'app/server/lib/DocSession';
|
import {makeExceptionalDocSession} from 'app/server/lib/DocSession';
|
||||||
import log from 'app/server/lib/log';
|
import log from 'app/server/lib/log';
|
||||||
|
import {matchesBaseDomain} from 'app/server/lib/requestUtils';
|
||||||
|
import {delayAbort} from 'app/server/lib/serverUtils';
|
||||||
import {promisifyAll} from 'bluebird';
|
import {promisifyAll} from 'bluebird';
|
||||||
import * as _ from 'lodash';
|
import * as _ from 'lodash';
|
||||||
|
import {AbortController, AbortSignal} from 'node-abort-controller';
|
||||||
import fetch from 'node-fetch';
|
import fetch from 'node-fetch';
|
||||||
import {createClient, Multi, RedisClient} from 'redis';
|
import {createClient, Multi, RedisClient} from 'redis';
|
||||||
import {matchesBaseDomain} from 'app/server/lib/requestUtils';
|
|
||||||
|
|
||||||
promisifyAll(RedisClient.prototype);
|
promisifyAll(RedisClient.prototype);
|
||||||
|
|
||||||
@ -72,6 +73,10 @@ const MAX_QUEUE_SIZE = 1000;
|
|||||||
|
|
||||||
const WEBHOOK_CACHE_TTL = 10000;
|
const WEBHOOK_CACHE_TTL = 10000;
|
||||||
|
|
||||||
|
// A time to wait for between retries of a webhook. Exposed for tests.
|
||||||
|
const TRIGGER_WAIT_DELAY =
|
||||||
|
process.env.GRIST_TRIGGER_WAIT_DELAY ? parseInt(process.env.GRIST_TRIGGER_WAIT_DELAY, 10) : 1000;
|
||||||
|
|
||||||
// Processes triggers for records changed as described in action bundles.
|
// Processes triggers for records changed as described in action bundles.
|
||||||
// initiating webhooks and automations.
|
// initiating webhooks and automations.
|
||||||
// The interesting stuff starts in the handle() method.
|
// The interesting stuff starts in the handle() method.
|
||||||
@ -107,6 +112,9 @@ export class DocTriggers {
|
|||||||
// Promise which resolves after we finish fetching the backup queue from redis on startup.
|
// Promise which resolves after we finish fetching the backup queue from redis on startup.
|
||||||
private _getRedisQueuePromise: Promise<void> | undefined;
|
private _getRedisQueuePromise: Promise<void> | undefined;
|
||||||
|
|
||||||
|
// Abort controller for the loop that sends webhooks.
|
||||||
|
private _loopAbort: AbortController|undefined;
|
||||||
|
|
||||||
constructor(private _activeDoc: ActiveDoc) {
|
constructor(private _activeDoc: ActiveDoc) {
|
||||||
const redisUrl = process.env.REDIS_URL;
|
const redisUrl = process.env.REDIS_URL;
|
||||||
if (redisUrl) {
|
if (redisUrl) {
|
||||||
@ -118,6 +126,7 @@ export class DocTriggers {
|
|||||||
|
|
||||||
public shutdown() {
|
public shutdown() {
|
||||||
this._shuttingDown = true;
|
this._shuttingDown = true;
|
||||||
|
this._loopAbort?.abort();
|
||||||
if (!this._sending) {
|
if (!this._sending) {
|
||||||
void(this._redisClientField?.quitAsync());
|
void(this._redisClientField?.quitAsync());
|
||||||
}
|
}
|
||||||
@ -210,12 +219,30 @@ export class DocTriggers {
|
|||||||
|
|
||||||
// Prevent further document activity while the queue is too full.
|
// Prevent further document activity while the queue is too full.
|
||||||
while (this._drainingQueue && !this._shuttingDown) {
|
while (this._drainingQueue && !this._shuttingDown) {
|
||||||
await delay(1000);
|
await delayAbort(1000, this._loopAbort?.signal);
|
||||||
}
|
}
|
||||||
|
|
||||||
return summary;
|
return summary;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public async clearWebhookQueue() {
|
||||||
|
// Make sure we are after start and in sync with redis.
|
||||||
|
if (this._getRedisQueuePromise) {
|
||||||
|
await this._getRedisQueuePromise;
|
||||||
|
}
|
||||||
|
// Clear in-memory queue.
|
||||||
|
const removed = this._webHookEventQueue.splice(0, this._webHookEventQueue.length).length;
|
||||||
|
// Notify the loop that it should restart.
|
||||||
|
this._loopAbort?.abort();
|
||||||
|
// If we have backup in redis, clear it also.
|
||||||
|
// NOTE: this is subject to a race condition, currently it is not possible, but any future modification probably
|
||||||
|
// will require some kind of locking over the queue (or a rewrite)
|
||||||
|
if (removed && this._redisClient) {
|
||||||
|
await this._redisClient.multi().ltrim(this._redisQueueKey, 0, -1).execAsync();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private get _docId() {
|
private get _docId() {
|
||||||
return this._activeDoc.docName;
|
return this._activeDoc.docName;
|
||||||
}
|
}
|
||||||
@ -421,14 +448,18 @@ export class DocTriggers {
|
|||||||
|
|
||||||
// TODO delay/prevent shutting down while queue isn't empty?
|
// TODO delay/prevent shutting down while queue isn't empty?
|
||||||
while (!this._shuttingDown) {
|
while (!this._shuttingDown) {
|
||||||
|
this._loopAbort = new AbortController();
|
||||||
if (!this._webHookEventQueue.length) {
|
if (!this._webHookEventQueue.length) {
|
||||||
await delay(1000);
|
await delayAbort(TRIGGER_WAIT_DELAY, this._loopAbort.signal).catch(() => {});
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const id = this._webHookEventQueue[0].id;
|
const id = this._webHookEventQueue[0].id;
|
||||||
const batch = _.takeWhile(this._webHookEventQueue.slice(0, 100), {id});
|
const batch = _.takeWhile(this._webHookEventQueue.slice(0, 100), {id});
|
||||||
const body = JSON.stringify(batch.map(e => e.payload));
|
const body = JSON.stringify(batch.map(e => e.payload));
|
||||||
const url = await this._getWebHookUrl(id);
|
const url = await this._getWebHookUrl(id);
|
||||||
|
if (this._loopAbort.signal.aborted) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
let meta: Record<string, any>|undefined;
|
let meta: Record<string, any>|undefined;
|
||||||
|
|
||||||
let success: boolean;
|
let success: boolean;
|
||||||
@ -437,7 +468,11 @@ export class DocTriggers {
|
|||||||
} else {
|
} else {
|
||||||
meta = {numEvents: batch.length, webhookId: id, host: new URL(url).host};
|
meta = {numEvents: batch.length, webhookId: id, host: new URL(url).host};
|
||||||
this._log("Sending batch of webhook events", meta);
|
this._log("Sending batch of webhook events", meta);
|
||||||
success = await this._sendWebhookWithRetries(url, body);
|
success = await this._sendWebhookWithRetries(url, body, this._loopAbort.signal);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this._loopAbort.signal.aborted) {
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
this._webHookEventQueue.splice(0, batch.length);
|
this._webHookEventQueue.splice(0, batch.length);
|
||||||
@ -493,7 +528,7 @@ export class DocTriggers {
|
|||||||
return this._drainingQueue ? 5 : 20;
|
return this._drainingQueue ? 5 : 20;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async _sendWebhookWithRetries(url: string, body: string) {
|
private async _sendWebhookWithRetries(url: string, body: string, signal: AbortSignal) {
|
||||||
const maxWait = 64;
|
const maxWait = 64;
|
||||||
let wait = 1;
|
let wait = 1;
|
||||||
for (let attempt = 0; attempt < this._maxWebhookAttempts; attempt++) {
|
for (let attempt = 0; attempt < this._maxWebhookAttempts; attempt++) {
|
||||||
@ -507,6 +542,7 @@ export class DocTriggers {
|
|||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
|
signal,
|
||||||
});
|
});
|
||||||
if (response.status === 200) {
|
if (response.status === 200) {
|
||||||
return true;
|
return true;
|
||||||
@ -516,6 +552,10 @@ export class DocTriggers {
|
|||||||
this._log(`Webhook sending error: ${e}`, {level: 'warn', attempt});
|
this._log(`Webhook sending error: ${e}`, {level: 'warn', attempt});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (signal.aborted) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
// Don't wait any more if this is the last attempt.
|
// Don't wait any more if this is the last attempt.
|
||||||
if (attempt >= this._maxWebhookAttempts - 1) {
|
if (attempt >= this._maxWebhookAttempts - 1) {
|
||||||
return false;
|
return false;
|
||||||
@ -526,7 +566,11 @@ export class DocTriggers {
|
|||||||
if (this._shuttingDown) {
|
if (this._shuttingDown) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
await delay(1000);
|
try {
|
||||||
|
await delayAbort(TRIGGER_WAIT_DELAY, signal);
|
||||||
|
} catch (e) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (wait < maxWait) {
|
if (wait < maxWait) {
|
||||||
wait *= 2;
|
wait *= 2;
|
||||||
|
@ -4,11 +4,12 @@ import * as net from 'net';
|
|||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import { ConnectionOptions } from 'typeorm';
|
import { ConnectionOptions } from 'typeorm';
|
||||||
import uuidv4 from 'uuid/v4';
|
import uuidv4 from 'uuid/v4';
|
||||||
|
import {AbortSignal} from 'node-abort-controller';
|
||||||
|
|
||||||
import {EngineCode} from 'app/common/DocumentSettings';
|
import {EngineCode} from 'app/common/DocumentSettings';
|
||||||
import log from 'app/server/lib/log';
|
import log from 'app/server/lib/log';
|
||||||
import { OpenMode, SQLiteDB } from 'app/server/lib/SQLiteDB';
|
import {OpenMode, SQLiteDB} from 'app/server/lib/SQLiteDB';
|
||||||
import { getDocSessionAccessOrNull, getDocSessionUser, OptDocSession } from './DocSession';
|
import {getDocSessionAccessOrNull, getDocSessionUser, OptDocSession} from './DocSession';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Promisify a node-style callback function. E.g.
|
* Promisify a node-style callback function. E.g.
|
||||||
@ -168,3 +169,28 @@ export function getSupportedEngineChoices(): EngineCode[]|undefined {
|
|||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a promise that resolves in the given number of milliseconds or rejects
|
||||||
|
* when the given signal is raised.
|
||||||
|
*/
|
||||||
|
export function delayAbort(msec: number, signal?: AbortSignal): Promise<void> {
|
||||||
|
return new Promise<void>((resolve, reject) => {
|
||||||
|
let resolved = false;
|
||||||
|
const timeout = setTimeout(() => {
|
||||||
|
if (!resolved) {
|
||||||
|
resolved = true;
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
}, msec);
|
||||||
|
if (signal?.addEventListener) {
|
||||||
|
signal.addEventListener('abort', (ev) => {
|
||||||
|
if (!resolved) {
|
||||||
|
resolved = true;
|
||||||
|
clearTimeout(timeout);
|
||||||
|
reject(ev);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
@ -56,7 +56,7 @@
|
|||||||
"@types/mocha": "5.2.5",
|
"@types/mocha": "5.2.5",
|
||||||
"@types/moment-timezone": "0.5.9",
|
"@types/moment-timezone": "0.5.9",
|
||||||
"@types/node": "^14",
|
"@types/node": "^14",
|
||||||
"@types/node-fetch": "2.1.2",
|
"@types/node-fetch": "2.6.2",
|
||||||
"@types/pidusage": "2.0.1",
|
"@types/pidusage": "2.0.1",
|
||||||
"@types/plotly.js": "2.12.1",
|
"@types/plotly.js": "2.12.1",
|
||||||
"@types/qrcode": "1.4.2",
|
"@types/qrcode": "1.4.2",
|
||||||
@ -147,6 +147,7 @@
|
|||||||
"morgan": "1.9.1",
|
"morgan": "1.9.1",
|
||||||
"mousetrap": "1.6.2",
|
"mousetrap": "1.6.2",
|
||||||
"multiparty": "4.2.2",
|
"multiparty": "4.2.2",
|
||||||
|
"node-abort-controller": "3.0.1",
|
||||||
"node-fetch": "2.6.7",
|
"node-fetch": "2.6.7",
|
||||||
"pg": "8.6.0",
|
"pg": "8.6.0",
|
||||||
"plotly.js-basic-dist": "2.13.2",
|
"plotly.js-basic-dist": "2.13.2",
|
||||||
|
@ -5,14 +5,10 @@ import {DocState, UserAPIImpl} from 'app/common/UserAPI';
|
|||||||
import {testDailyApiLimitFeatures} from 'app/gen-server/entity/Product';
|
import {testDailyApiLimitFeatures} from 'app/gen-server/entity/Product';
|
||||||
import {AddOrUpdateRecord, Record as ApiRecord} from 'app/plugin/DocApiTypes';
|
import {AddOrUpdateRecord, Record as ApiRecord} from 'app/plugin/DocApiTypes';
|
||||||
import {CellValue, GristObjCode} from 'app/plugin/GristData';
|
import {CellValue, GristObjCode} from 'app/plugin/GristData';
|
||||||
import {
|
import {applyQueryParameters, docApiUsagePeriods, docPeriodicApiUsageKey,
|
||||||
applyQueryParameters,
|
getDocApiUsageKeysToIncr} from 'app/server/lib/DocApi';
|
||||||
docApiUsagePeriods,
|
|
||||||
docPeriodicApiUsageKey,
|
|
||||||
getDocApiUsageKeysToIncr
|
|
||||||
} from 'app/server/lib/DocApi';
|
|
||||||
import log from 'app/server/lib/log';
|
import log from 'app/server/lib/log';
|
||||||
import {exitPromise} from 'app/server/lib/serverUtils';
|
import {delayAbort, exitPromise} from 'app/server/lib/serverUtils';
|
||||||
import {connectTestingHooks, TestingHooksClient} from 'app/server/lib/TestingHooks';
|
import {connectTestingHooks, TestingHooksClient} from 'app/server/lib/TestingHooks';
|
||||||
import axios, {AxiosResponse} from 'axios';
|
import axios, {AxiosResponse} from 'axios';
|
||||||
import {delay} from 'bluebird';
|
import {delay} from 'bluebird';
|
||||||
@ -28,6 +24,7 @@ import fetch from 'node-fetch';
|
|||||||
import {tmpdir} from 'os';
|
import {tmpdir} from 'os';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import {createClient, RedisClient} from 'redis';
|
import {createClient, RedisClient} from 'redis';
|
||||||
|
import {AbortController} from 'node-abort-controller';
|
||||||
import {configForUser} from 'test/gen-server/testUtils';
|
import {configForUser} from 'test/gen-server/testUtils';
|
||||||
import {serveSomething, Serving} from 'test/server/customUtil';
|
import {serveSomething, Serving} from 'test/server/customUtil';
|
||||||
import * as testUtils from 'test/server/testUtils';
|
import * as testUtils from 'test/server/testUtils';
|
||||||
@ -2890,7 +2887,19 @@ function testDocApi() {
|
|||||||
let redisMonitor: any;
|
let redisMonitor: any;
|
||||||
let redisCalls: any[];
|
let redisCalls: any[];
|
||||||
|
|
||||||
|
// Create couple of promises that can be used to monitor
|
||||||
|
// if the endpoint was called.
|
||||||
|
const successCalled = signal();
|
||||||
|
const notFoundCalled = signal();
|
||||||
|
const longStarted = signal();
|
||||||
|
const longFinished = signal();
|
||||||
|
|
||||||
|
// Create an abort controller for the latest request. We will
|
||||||
|
// use it to abort the delay on the longEndpoint.
|
||||||
|
let controller = new AbortController();
|
||||||
|
|
||||||
before(async function() {
|
before(async function() {
|
||||||
|
this.timeout(20000);
|
||||||
if (!process.env.TEST_REDIS_URL) { this.skip(); }
|
if (!process.env.TEST_REDIS_URL) { this.skip(); }
|
||||||
requests = {
|
requests = {
|
||||||
"add,update": [],
|
"add,update": [],
|
||||||
@ -2906,6 +2915,33 @@ function testDocApi() {
|
|||||||
// TODO test retries on failure and slowness in a new test
|
// TODO test retries on failure and slowness in a new test
|
||||||
serving = await serveSomething(app => {
|
serving = await serveSomething(app => {
|
||||||
app.use(bodyParser.json());
|
app.use(bodyParser.json());
|
||||||
|
app.post('/200', ({body}, res) => {
|
||||||
|
successCalled.emit(body[0].A);
|
||||||
|
res.sendStatus(200);
|
||||||
|
res.end();
|
||||||
|
});
|
||||||
|
app.post('/404', ({body}, res) => {
|
||||||
|
notFoundCalled.emit(body[0].A);
|
||||||
|
res.sendStatus(404); // Webhooks treats it as an error and will retry. Probably it shouldn't work this way.
|
||||||
|
res.end();
|
||||||
|
});
|
||||||
|
app.post('/long', async ({body}, res) => {
|
||||||
|
longStarted.emit(body[0].A);
|
||||||
|
// We are scoping the controller to this call, so any subsequent
|
||||||
|
// call will have a new controller. Caller can save this value to abort the previous calls.
|
||||||
|
const scoped = new AbortController();
|
||||||
|
controller = scoped;
|
||||||
|
try {
|
||||||
|
await delayAbort(2000, scoped.signal); // We don't expect to wait for this.
|
||||||
|
res.sendStatus(200);
|
||||||
|
res.end();
|
||||||
|
longFinished.emit(body[0].A);
|
||||||
|
} catch(exc) {
|
||||||
|
res.sendStatus(200); // Send ok, so that it won't be seen as an error.
|
||||||
|
res.end();
|
||||||
|
longFinished.emit([408, body[0].A]); // We will signal that this is success but after aborting timeout.
|
||||||
|
}
|
||||||
|
});
|
||||||
app.post('/:eventTypes', async ({body, params: {eventTypes}}, res) => {
|
app.post('/:eventTypes', async ({body, params: {eventTypes}}, res) => {
|
||||||
requests[eventTypes as keyof WebhookRequests].push(body);
|
requests[eventTypes as keyof WebhookRequests].push(body);
|
||||||
res.sendStatus(200);
|
res.sendStatus(200);
|
||||||
@ -3062,6 +3098,181 @@ function testDocApi() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should clear the outgoing queue", async() => {
|
||||||
|
// Create a test document.
|
||||||
|
const ws1 = (await userApi.getOrgWorkspaces('current'))[0].id;
|
||||||
|
const docId = await userApi.newDoc({name: 'testdoc2'}, ws1);
|
||||||
|
const doc = userApi.getDocAPI(docId);
|
||||||
|
await axios.post(`${serverUrl}/api/docs/${docId}/apply`, [
|
||||||
|
['ModifyColumn', 'Table1', 'B', {type: 'Bool'}],
|
||||||
|
], chimpy);
|
||||||
|
// Subscribe helper that returns a method to unsubscribe.
|
||||||
|
const subscribe = async (endpoint: string) => {
|
||||||
|
const {data, status} = await axios.post(
|
||||||
|
`${serverUrl}/api/docs/${docId}/tables/Table1/_subscribe`,
|
||||||
|
{eventTypes: ['add', 'update'], url: `${serving.url}/${endpoint}`, isReadyColumn: "B"}, chimpy
|
||||||
|
);
|
||||||
|
assert.equal(status, 200);
|
||||||
|
return () => axios.post(
|
||||||
|
`${serverUrl}/api/docs/${docId}/tables/Table1/_unsubscribe`,
|
||||||
|
data, chimpy
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Try to clear the queue, even if it is empty.
|
||||||
|
const deleteResult = await axios.delete(
|
||||||
|
`${serverUrl}/api/docs/${docId}/webhooks/queue`, chimpy
|
||||||
|
);
|
||||||
|
assert.equal(deleteResult.status, 200);
|
||||||
|
|
||||||
|
const cleanup: (() => Promise<any>)[] = [];
|
||||||
|
|
||||||
|
// Subscribe a valid webhook endpoint.
|
||||||
|
cleanup.push(await subscribe('200'));
|
||||||
|
// Subscribe an invalid webhook endpoint.
|
||||||
|
cleanup.push(await subscribe('404'));
|
||||||
|
|
||||||
|
// Prepare signals, we will be waiting for those two to be called.
|
||||||
|
successCalled.reset();
|
||||||
|
notFoundCalled.reset();
|
||||||
|
// Trigger both events.
|
||||||
|
await doc.addRows("Table1", {
|
||||||
|
A: [1],
|
||||||
|
B: [true],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for both of them to be called (this is correct order)
|
||||||
|
await successCalled.waitAndReset();
|
||||||
|
await notFoundCalled.waitAndReset();
|
||||||
|
|
||||||
|
// Broken endpoint will be called multiple times here, and any subsequent triggers for working
|
||||||
|
// endpoint won't be called.
|
||||||
|
await notFoundCalled.waitAndReset();
|
||||||
|
|
||||||
|
// But the working endpoint won't be called more then once.
|
||||||
|
assert.isFalse(successCalled.called());
|
||||||
|
|
||||||
|
// Trigger second event.
|
||||||
|
await doc.addRows("Table1", {
|
||||||
|
A: [2],
|
||||||
|
B: [true],
|
||||||
|
});
|
||||||
|
// Error endpoint will be called with the first row (still).
|
||||||
|
const firstRow = await notFoundCalled.waitAndReset();
|
||||||
|
assert.deepEqual(firstRow, 1);
|
||||||
|
|
||||||
|
// But the working endpoint won't be called till we reset the queue.
|
||||||
|
assert.isFalse(successCalled.called());
|
||||||
|
|
||||||
|
// Now reset the queue.
|
||||||
|
await axios.delete(
|
||||||
|
`${serverUrl}/api/docs/${docId}/webhooks/queue`, chimpy
|
||||||
|
);
|
||||||
|
assert.isFalse(successCalled.called());
|
||||||
|
assert.isFalse(notFoundCalled.called());
|
||||||
|
|
||||||
|
// Prepare for new calls.
|
||||||
|
successCalled.reset();
|
||||||
|
notFoundCalled.reset();
|
||||||
|
// Trigger them.
|
||||||
|
await doc.addRows("Table1", {
|
||||||
|
A: [3],
|
||||||
|
B: [true],
|
||||||
|
});
|
||||||
|
// We will receive data from the 3rd row only (the second one was omitted).
|
||||||
|
let thirdRow = await successCalled.waitAndReset();
|
||||||
|
assert.deepEqual(thirdRow, 3);
|
||||||
|
thirdRow = await notFoundCalled.waitAndReset();
|
||||||
|
assert.deepEqual(thirdRow, 3);
|
||||||
|
// And the situation will be the same, the working endpoint won't be called till we reset the queue, but
|
||||||
|
// the error endpoint will be called with the third row multiple times.
|
||||||
|
await notFoundCalled.waitAndReset();
|
||||||
|
assert.isFalse(successCalled.called());
|
||||||
|
|
||||||
|
// Cleanup everything, we will now test request timeouts.
|
||||||
|
await Promise.all(cleanup.map(fn => fn())).finally(() => cleanup.length = 0);
|
||||||
|
assert.isTrue((await axios.delete(
|
||||||
|
`${serverUrl}/api/docs/${docId}/webhooks/queue`, chimpy
|
||||||
|
)).status === 200);
|
||||||
|
|
||||||
|
// Create 2 webhooks, one that is very long.
|
||||||
|
cleanup.push(await subscribe('200'));
|
||||||
|
cleanup.push(await subscribe('long'));
|
||||||
|
successCalled.reset();
|
||||||
|
longFinished.reset();
|
||||||
|
longStarted.reset();
|
||||||
|
// Trigger them.
|
||||||
|
await doc.addRows("Table1", {
|
||||||
|
A: [4],
|
||||||
|
B: [true],
|
||||||
|
});
|
||||||
|
// 200 will be called immediately.
|
||||||
|
await successCalled.waitAndReset();
|
||||||
|
// Long will be started immediately.
|
||||||
|
await longStarted.waitAndReset();
|
||||||
|
// But it won't be finished.
|
||||||
|
assert.isFalse(longFinished.called());
|
||||||
|
// It will be aborted.
|
||||||
|
controller.abort();
|
||||||
|
assert.deepEqual(await longFinished.waitAndReset(), [408, 4]);
|
||||||
|
|
||||||
|
// Trigger another event.
|
||||||
|
await doc.addRows("Table1", {
|
||||||
|
A: [5],
|
||||||
|
B: [true],
|
||||||
|
});
|
||||||
|
// We are stuck once again on the long call. But this time we won't
|
||||||
|
// abort it till the end of this test.
|
||||||
|
assert.deepEqual(await successCalled.waitAndReset(), 5);
|
||||||
|
assert.deepEqual(await longStarted.waitAndReset(), 5);
|
||||||
|
assert.isFalse(longFinished.called());
|
||||||
|
|
||||||
|
// Remember this controller for cleanup.
|
||||||
|
const controller5 = controller;
|
||||||
|
// Trigger another event.
|
||||||
|
await doc.addRows("Table1", {
|
||||||
|
A: [6],
|
||||||
|
B: [true],
|
||||||
|
});
|
||||||
|
// We are now completely stuck on the 5th row webhook.
|
||||||
|
assert.isFalse(successCalled.called());
|
||||||
|
assert.isFalse(longFinished.called());
|
||||||
|
// Clear the queue, it will free webhooks requests, but it won't cancel long handler on the external server
|
||||||
|
// so it is still waiting.
|
||||||
|
assert.isTrue((await axios.delete(
|
||||||
|
`${serverUrl}/api/docs/${docId}/webhooks/queue`, chimpy
|
||||||
|
)).status === 200);
|
||||||
|
// Now we can release the stuck request.
|
||||||
|
controller5.abort();
|
||||||
|
// We will be cancelled from the 5th row.
|
||||||
|
assert.deepEqual(await longFinished.waitAndReset(), [408, 5]);
|
||||||
|
|
||||||
|
// We won't be called for the 6th row at all, as it was stuck and the queue was purged.
|
||||||
|
assert.isFalse(successCalled.called());
|
||||||
|
assert.isFalse(longStarted.called());
|
||||||
|
|
||||||
|
// Trigger next event.
|
||||||
|
await doc.addRows("Table1", {
|
||||||
|
A: [7],
|
||||||
|
B: [true],
|
||||||
|
});
|
||||||
|
// We will be called once again with a new 7th row.
|
||||||
|
assert.deepEqual(await successCalled.waitAndReset(), 7);
|
||||||
|
assert.deepEqual(await longStarted.waitAndReset(), 7);
|
||||||
|
// But we are stuck again.
|
||||||
|
assert.isFalse(longFinished.called());
|
||||||
|
// And we can abort current request from 7th row (6th row was skipped).
|
||||||
|
controller.abort();
|
||||||
|
assert.deepEqual(await longFinished.waitAndReset(), [408, 7]);
|
||||||
|
|
||||||
|
// Cleanup all
|
||||||
|
await Promise.all(cleanup.map(fn => fn())).finally(() => cleanup.length = 0);
|
||||||
|
assert.isTrue((await axios.delete(
|
||||||
|
`${serverUrl}/api/docs/${docId}/webhooks/queue`, chimpy
|
||||||
|
)).status === 200);
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("Allowed Origin", () => {
|
describe("Allowed Origin", () => {
|
||||||
@ -3252,6 +3463,7 @@ class TestServer {
|
|||||||
APP_HOME_URL: _homeUrl,
|
APP_HOME_URL: _homeUrl,
|
||||||
ALLOWED_WEBHOOK_DOMAINS: `example.com,localhost:${webhooksTestPort}`,
|
ALLOWED_WEBHOOK_DOMAINS: `example.com,localhost:${webhooksTestPort}`,
|
||||||
GRIST_ALLOWED_HOSTS: `example.com,localhost`,
|
GRIST_ALLOWED_HOSTS: `example.com,localhost`,
|
||||||
|
GRIST_TRIGGER_WAIT_DELAY: '200',
|
||||||
...process.env
|
...process.env
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -3339,3 +3551,41 @@ async function setupDataDir(dir: string) {
|
|||||||
'ApiDataRecordsTest.grist',
|
'ApiDataRecordsTest.grist',
|
||||||
path.resolve(dir, docIds.ApiDataRecordsTest + '.grist'));
|
path.resolve(dir, docIds.ApiDataRecordsTest + '.grist'));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper that creates a promise that can be resolved from outside.
|
||||||
|
*/
|
||||||
|
function signal() {
|
||||||
|
let resolve: null | ((data: any) => void) = null;
|
||||||
|
let promise: null | Promise<any> = null;
|
||||||
|
let called = false;
|
||||||
|
return {
|
||||||
|
emit(data: any) {
|
||||||
|
if (!resolve) {
|
||||||
|
throw new Error("signal.emit() called before signal.reset()");
|
||||||
|
}
|
||||||
|
called = true;
|
||||||
|
resolve(data);
|
||||||
|
},
|
||||||
|
async wait() {
|
||||||
|
if (!promise) {
|
||||||
|
throw new Error("signal.wait() called before signal.reset()");
|
||||||
|
}
|
||||||
|
return await promise;
|
||||||
|
},
|
||||||
|
async waitAndReset() {
|
||||||
|
try {
|
||||||
|
return await this.wait();
|
||||||
|
} finally {
|
||||||
|
this.reset();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
called() {
|
||||||
|
return called;
|
||||||
|
},
|
||||||
|
reset() {
|
||||||
|
called = false;
|
||||||
|
promise = new Promise((res) => { resolve = res; });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
27
yarn.lock
27
yarn.lock
@ -481,12 +481,13 @@
|
|||||||
dependencies:
|
dependencies:
|
||||||
moment ">=2.14.0"
|
moment ">=2.14.0"
|
||||||
|
|
||||||
"@types/node-fetch@2.1.2":
|
"@types/node-fetch@2.6.2":
|
||||||
version "2.1.2"
|
version "2.6.2"
|
||||||
resolved "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.1.2.tgz"
|
resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.2.tgz#d1a9c5fd049d9415dce61571557104dec3ec81da"
|
||||||
integrity sha512-XroxUzLpKuL+CVkQqXlffRkEPi4Gh3Oui/mWyS7ztKiyqVxiU+h3imCW5I2NQmde5jK+3q++36/Q96cyRWsweg==
|
integrity sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/node" "*"
|
"@types/node" "*"
|
||||||
|
form-data "^3.0.0"
|
||||||
|
|
||||||
"@types/node@*":
|
"@types/node@*":
|
||||||
version "14.0.1"
|
version "14.0.1"
|
||||||
@ -1873,9 +1874,9 @@ combine-source-map@^0.8.0, combine-source-map@~0.8.0:
|
|||||||
lodash.memoize "~3.0.3"
|
lodash.memoize "~3.0.3"
|
||||||
source-map "~0.5.3"
|
source-map "~0.5.3"
|
||||||
|
|
||||||
combined-stream@^1.0.6, combined-stream@~1.0.6:
|
combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6:
|
||||||
version "1.0.8"
|
version "1.0.8"
|
||||||
resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz"
|
resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
|
||||||
integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==
|
integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==
|
||||||
dependencies:
|
dependencies:
|
||||||
delayed-stream "~1.0.0"
|
delayed-stream "~1.0.0"
|
||||||
@ -3039,6 +3040,15 @@ forever-agent@~0.6.1:
|
|||||||
resolved "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz"
|
resolved "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz"
|
||||||
integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=
|
integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=
|
||||||
|
|
||||||
|
form-data@^3.0.0:
|
||||||
|
version "3.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f"
|
||||||
|
integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==
|
||||||
|
dependencies:
|
||||||
|
asynckit "^0.4.0"
|
||||||
|
combined-stream "^1.0.8"
|
||||||
|
mime-types "^2.1.12"
|
||||||
|
|
||||||
form-data@~2.3.2:
|
form-data@~2.3.2:
|
||||||
version "2.3.3"
|
version "2.3.3"
|
||||||
resolved "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz"
|
resolved "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz"
|
||||||
@ -4932,6 +4942,11 @@ nise@^1.4.6:
|
|||||||
lolex "^5.0.1"
|
lolex "^5.0.1"
|
||||||
path-to-regexp "^1.7.0"
|
path-to-regexp "^1.7.0"
|
||||||
|
|
||||||
|
node-abort-controller@3.0.1:
|
||||||
|
version "3.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/node-abort-controller/-/node-abort-controller-3.0.1.tgz#f91fa50b1dee3f909afabb7e261b1e1d6b0cb74e"
|
||||||
|
integrity sha512-/ujIVxthRs+7q6hsdjHMaj8hRG9NuWmwrz+JdRwZ14jdFoKSkm+vDsCbF9PLpnSqjaWQJuTmVtcWHNLr+vrOFw==
|
||||||
|
|
||||||
node-addon-api@2.0.0:
|
node-addon-api@2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
resolved "https://registry.npmjs.org/node-addon-api/-/node-addon-api-2.0.0.tgz"
|
resolved "https://registry.npmjs.org/node-addon-api/-/node-addon-api-2.0.0.tgz"
|
||||||
|
Loading…
Reference in New Issue
Block a user