mirror of
https://github.com/gristlabs/grist-core.git
synced 2026-03-02 04:09:24 +00:00
(core) Adds a UI panel for managing webhooks
Summary: This adds a UI panel for managing webhooks. Work started by Cyprien Pindat. You can find the UI on a document's settings page. Main changes relative to Cyprien's demo: * Changed behavior of virtual table to be more consistent with the rest of Grist, by factoring out part of the implementation of on-demand tables. * Cell values that would create an error can now be denied and reverted (as for the rest of Grist). * Changes made by other users are integrated in a sane way. * Basic undo/redo support is added using the regular undo/redo stack. * The table list in the drop-down is now updated if schema changes. * Added a notification from back-end when webhook status is updated so constant polling isn't needed to support multi-user operation. * Factored out webhook specific logic from general virtual table support. * Made a bunch of fixes to various broken behavior. * Added tests. The code remains somewhat unpolished, and behavior in the presence of errors is imperfect in general but may be adequate for this case. I assume that we'll soon be lifting the restriction on the set of domains that are supported for webhooks - otherwise we'd want to provide some friendly way to discover that list of supported domains rather than just throwing an error. I don't actually know a lot about how the front-end works - it looks like tables/columns/fields/sections can be safely added if they have string ids that won't collide with bone fide numeric ids from the back end. Sneaky. Contains a migration, so needs an extra reviewer for that. Test Plan: added tests Reviewers: jarek, dsagal Reviewed By: jarek, dsagal Differential Revision: https://phab.getgrist.com/D3856
This commit is contained in:
463
app/common/ActionSummarizer.ts
Normal file
463
app/common/ActionSummarizer.ts
Normal file
@@ -0,0 +1,463 @@
|
||||
import {getEnvContent, LocalActionBundle} from 'app/common/ActionBundle';
|
||||
import {ActionSummary, ColumnDelta, createEmptyActionSummary,
|
||||
createEmptyTableDelta, defunctTableName, LabelDelta, TableDelta} from 'app/common/ActionSummary';
|
||||
import {DocAction} from 'app/common/DocActions';
|
||||
import * as Action from 'app/common/DocActions';
|
||||
import {arrayExtend} from 'app/common/gutil';
|
||||
import {CellDelta} from 'app/common/TabularDiff';
|
||||
import fromPairs = require('lodash/fromPairs');
|
||||
import keyBy = require('lodash/keyBy');
|
||||
import sortBy = require('lodash/sortBy');
|
||||
import toPairs = require('lodash/toPairs');
|
||||
import values = require('lodash/values');
|
||||
|
||||
/**
|
||||
* The default maximum number of rows in a single bulk change that will be recorded
|
||||
* individually. Bulk changes that touch more than this number of rows
|
||||
* will be summarized only by the number of rows touched.
|
||||
*/
|
||||
const MAXIMUM_INLINE_ROWS = 10;
|
||||
|
||||
/**
|
||||
* Options when producing an action summary.
|
||||
*/
|
||||
export interface ActionSummaryOptions {
|
||||
maximumInlineRows?: number; // Overrides the maximum number of rows in a
|
||||
// single bulk change that will be recorded individually.
|
||||
alwaysPreserveColIds?: string[]; // If set, all cells in these columns are preserved
|
||||
// regardless of maximumInlineRows setting.
|
||||
}
|
||||
|
||||
class ActionSummarizer {
|
||||
|
||||
constructor(private _options?: ActionSummaryOptions) {}
|
||||
|
||||
/** add information about an action based on the forward direction */
|
||||
public addForwardAction(summary: ActionSummary, act: DocAction) {
|
||||
const tableId = act[1];
|
||||
if (Action.isAddTable(act)) {
|
||||
summary.tableRenames.push([null, tableId]);
|
||||
for (const info of act[2]) {
|
||||
this._forTable(summary, tableId).columnRenames.push([null, info.id]);
|
||||
}
|
||||
} else if (Action.isRenameTable(act)) {
|
||||
this._addRename(summary.tableRenames, [tableId, act[2]]);
|
||||
} else if (Action.isRenameColumn(act)) {
|
||||
this._addRename(this._forTable(summary, tableId).columnRenames, [act[2], act[3]]);
|
||||
} else if (Action.isAddColumn(act)) {
|
||||
this._forTable(summary, tableId).columnRenames.push([null, act[2]]);
|
||||
} else if (Action.isRemoveColumn(act)) {
|
||||
this._forTable(summary, tableId).columnRenames.push([act[2], null]);
|
||||
} else if (Action.isAddRecord(act)) {
|
||||
const td = this._forTable(summary, tableId);
|
||||
td.addRows.push(act[2]);
|
||||
this._addRow(td, act[2], act[3], 1);
|
||||
} else if (Action.isUpdateRecord(act)) {
|
||||
const td = this._forTable(summary, tableId);
|
||||
td.updateRows.push(act[2]);
|
||||
this._addRow(td, act[2], act[3], 1);
|
||||
} else if (Action.isBulkAddRecord(act)) {
|
||||
const td = this._forTable(summary, tableId);
|
||||
arrayExtend(td.addRows, act[2]);
|
||||
this._addRows(tableId, td, act[2], act[3], 1);
|
||||
} else if (Action.isBulkUpdateRecord(act)) {
|
||||
const td = this._forTable(summary, tableId);
|
||||
arrayExtend(td.updateRows, act[2]);
|
||||
this._addRows(tableId, td, act[2], act[3], 1);
|
||||
} else if (Action.isReplaceTableData(act)) {
|
||||
const td = this._forTable(summary, tableId);
|
||||
arrayExtend(td.addRows, act[2]);
|
||||
this._addRows(tableId, td, act[2], act[3], 1);
|
||||
}
|
||||
}
|
||||
|
||||
/** add information about an action based on undo information */
|
||||
public addReverseAction(summary: ActionSummary, act: DocAction) {
|
||||
const tableId = act[1];
|
||||
if (Action.isAddTable(act)) { // undoing, so this is a table removal
|
||||
summary.tableRenames.push([tableId, null]);
|
||||
for (const info of act[2]) {
|
||||
this._forTable(summary, tableId).columnRenames.push([info.id, null]);
|
||||
}
|
||||
} else if (Action.isAddRecord(act)) { // undoing, so this is a record removal
|
||||
const td = this._forTable(summary, tableId);
|
||||
td.removeRows.push(act[2]);
|
||||
this._addRow(td, act[2], act[3], 0);
|
||||
} else if (Action.isUpdateRecord(act)) { // undoing, so this is reversal of a record update
|
||||
const td = this._forTable(summary, tableId);
|
||||
this._addRow(td, act[2], act[3], 0);
|
||||
} else if (Action.isBulkAddRecord(act)) { // undoing, this may be reversing a table delete
|
||||
const td = this._forTable(summary, tableId);
|
||||
arrayExtend(td.removeRows, act[2]);
|
||||
this._addRows(tableId, td, act[2], act[3], 0);
|
||||
} else if (Action.isBulkUpdateRecord(act)) { // undoing, so this is reversal of a bulk record update
|
||||
const td = this._forTable(summary, tableId);
|
||||
arrayExtend(td.updateRows, act[2]);
|
||||
this._addRows(tableId, td, act[2], act[3], 0);
|
||||
} else if (Action.isRenameTable(act)) { // undoing - sometimes renames only in undo info
|
||||
this._addRename(summary.tableRenames, [act[2], tableId]);
|
||||
} else if (Action.isRenameColumn(act)) { // undoing - sometimes renames only in undo info
|
||||
this._addRename(this._forTable(summary, tableId).columnRenames, [act[3], act[2]]);
|
||||
} else if (Action.isReplaceTableData(act)) { // undoing
|
||||
const td = this._forTable(summary, tableId);
|
||||
arrayExtend(td.removeRows, act[2]);
|
||||
this._addRows(tableId, td, act[2], act[3], 0);
|
||||
}
|
||||
}
|
||||
|
||||
/** helper function to access summary changes for a specific table by name */
|
||||
private _forTable(summary: ActionSummary, tableId: string): TableDelta {
|
||||
return summary.tableDeltas[tableId] || (summary.tableDeltas[tableId] = createEmptyTableDelta());
|
||||
}
|
||||
|
||||
/** helper function to access summary changes for a specific cell by rowId and colId */
|
||||
private _forCell(td: TableDelta, rowId: number, colId: string): CellDelta {
|
||||
const cd = td.columnDeltas[colId] || (td.columnDeltas[colId] = {});
|
||||
return cd[rowId] || (cd[rowId] = [null, null]);
|
||||
}
|
||||
|
||||
/**
|
||||
* helper function to store detailed cell changes for a single row.
|
||||
* Direction parameter is 0 if values are prior values of cells, 1 if values are new values.
|
||||
*/
|
||||
private _addRow(td: TableDelta, rowId: number, colValues: Action.ColValues,
|
||||
direction: 0|1) {
|
||||
for (const [colId, colChanges] of toPairs(colValues)) {
|
||||
const cell = this._forCell(td, rowId, colId);
|
||||
cell[direction] = [colChanges];
|
||||
}
|
||||
}
|
||||
|
||||
/** helper function to store detailed cell changes for a set of rows */
|
||||
private _addRows(tableId: string, td: TableDelta, rowIds: number[],
|
||||
colValues: Action.BulkColValues, direction: 0|1) {
|
||||
const maximumInlineRows = this._options?.maximumInlineRows || MAXIMUM_INLINE_ROWS;
|
||||
const limitRows: boolean = rowIds.length > maximumInlineRows && !tableId.startsWith("_grist_");
|
||||
let selectedRows: Array<[number, number]> = [];
|
||||
if (limitRows) {
|
||||
// if many rows, just take some from start and one from end as examples
|
||||
selectedRows = [...rowIds.slice(0, maximumInlineRows - 1).entries()];
|
||||
selectedRows.push([rowIds.length - 1, rowIds[rowIds.length - 1]]);
|
||||
}
|
||||
|
||||
const alwaysPreserveColIds = new Set(this._options?.alwaysPreserveColIds || []);
|
||||
for (const [colId, colChanges] of toPairs(colValues)) {
|
||||
const addCellToSummary = (rowId: number, idx: number) => {
|
||||
const cell = this._forCell(td, rowId, colId);
|
||||
cell[direction] = [colChanges[idx]];
|
||||
};
|
||||
if (!limitRows || alwaysPreserveColIds.has(colId)) {
|
||||
rowIds.forEach(addCellToSummary);
|
||||
} else {
|
||||
selectedRows.forEach(([idx, rowId]) => addCellToSummary(rowId, idx));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** add a rename to a list, avoiding duplicates */
|
||||
private _addRename(renames: LabelDelta[], rename: LabelDelta) {
|
||||
if (renames.find(r => r[0] === rename[0] && r[1] === rename[1])) { return; }
|
||||
renames.push(rename);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Summarize the tabular changes that a LocalActionBundle results in, in a form
|
||||
* that will be suitable for composition.
|
||||
*/
|
||||
export function summarizeAction(body: LocalActionBundle, options?: ActionSummaryOptions): ActionSummary {
|
||||
return summarizeStoredAndUndo(getEnvContent(body.stored), body.undo, options);
|
||||
}
|
||||
|
||||
export function summarizeStoredAndUndo(stored: DocAction[], undo: DocAction[],
|
||||
options?: ActionSummaryOptions): ActionSummary {
|
||||
const summarizer = new ActionSummarizer(options);
|
||||
const summary = createEmptyActionSummary();
|
||||
for (const act of stored) {
|
||||
summarizer.addForwardAction(summary, act);
|
||||
}
|
||||
for (const act of Array.from(undo).reverse()) {
|
||||
summarizer.addReverseAction(summary, act);
|
||||
}
|
||||
// Name tables consistently, by their ultimate name, now we know it.
|
||||
for (const renames of summary.tableRenames) {
|
||||
const pre = renames[0];
|
||||
let post = renames[1];
|
||||
if (pre === null) { continue; }
|
||||
if (post === null) { post = defunctTableName(pre); }
|
||||
if (summary.tableDeltas[pre]) {
|
||||
summary.tableDeltas[post] = summary.tableDeltas[pre];
|
||||
delete summary.tableDeltas[pre];
|
||||
}
|
||||
}
|
||||
for (const td of values(summary.tableDeltas)) {
|
||||
// Name columns consistently, by their ultimate name, now we know it.
|
||||
for (const renames of td.columnRenames) {
|
||||
const pre = renames[0];
|
||||
let post = renames[1];
|
||||
if (pre === null) { continue; }
|
||||
if (post === null) { post = defunctTableName(pre); }
|
||||
if (td.columnDeltas[pre]) {
|
||||
td.columnDeltas[post] = td.columnDeltas[pre];
|
||||
delete td.columnDeltas[pre];
|
||||
}
|
||||
}
|
||||
// remove any duplicates that crept in
|
||||
td.addRows = Array.from(new Set(td.addRows));
|
||||
td.updateRows = Array.from(new Set(td.updateRows));
|
||||
td.removeRows = Array.from(new Set(td.removeRows));
|
||||
}
|
||||
return summary;
|
||||
}
|
||||
|
||||
/**
|
||||
* Once we can produce an ActionSummary for each LocalActionBundle, it is useful to be able
|
||||
* to compose them. Take the case of an ActionSummary pair, part 1 and part 2. NameMerge
|
||||
* is an internal structure to help merging table/column name changes across two parts.
|
||||
*/
|
||||
interface NameMerge {
|
||||
dead1: Set<string>; /** anything of this name in part 1 should be removed from merge */
|
||||
dead2: Set<string>; /** anything of this name in part 2 should be removed from merge */
|
||||
rename1: Map<string, string>; /** replace these names in part 1 */
|
||||
rename2: Map<string, string>; /** replace these names in part 2 */
|
||||
merge: LabelDelta[]; /** a merged list of adds/removes/renames for the result */
|
||||
}
|
||||
|
||||
/**
|
||||
* Looks at a pair of name change lists (could be tables or columns) and figures out what
|
||||
* changes would need to be made to a data structure keyed on those names in order to key
|
||||
* it consistently on final names.
|
||||
*/
|
||||
function planNameMerge(names1: LabelDelta[], names2: LabelDelta[]): NameMerge {
|
||||
const result: NameMerge = {
|
||||
dead1: new Set(),
|
||||
dead2: new Set(),
|
||||
rename1: new Map<string, string>(),
|
||||
rename2: new Map<string, string>(),
|
||||
merge: new Array<LabelDelta>(),
|
||||
};
|
||||
const names1ByFinalName: {[name: string]: LabelDelta} = keyBy(names1, p => p[1]!);
|
||||
const names2ByInitialName: {[name: string]: LabelDelta} = keyBy(names2, p => p[0]!);
|
||||
for (const [before1, after1] of names1) {
|
||||
if (!after1) {
|
||||
if (!before1) { throw new Error("invalid name change found"); }
|
||||
// Table/column was deleted in part 1.
|
||||
result.dead1.add(before1);
|
||||
result.merge.push([before1, null]);
|
||||
continue;
|
||||
}
|
||||
// At this point, we know the table/column existed at end of part 1.
|
||||
const pair2 = names2ByInitialName[after1];
|
||||
if (!pair2) {
|
||||
// Table/column's name was stable in part 2, so only change was in part 1.
|
||||
result.merge.push([before1, after1]);
|
||||
continue;
|
||||
}
|
||||
const after2 = pair2[1];
|
||||
if (!after2) {
|
||||
// Table/column was deleted in part 2.
|
||||
result.dead2.add(after1);
|
||||
if (before1) {
|
||||
// Table/column existed prior to part 1, so we need to expose its history.
|
||||
result.dead1.add(before1);
|
||||
result.merge.push([before1, null]);
|
||||
} else {
|
||||
// Table/column did not exist prior to part 1, so we erase it from history.
|
||||
result.dead1.add(after1);
|
||||
result.dead2.add(defunctTableName(after1));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// It we made it this far, our table/column exists after part 2. Any information
|
||||
// keyed to its name in part 1 will need to be rekeyed to its final name.
|
||||
result.rename1.set(after1, after2);
|
||||
result.merge.push([before1, after2]);
|
||||
}
|
||||
// Look through part 2 for any changes not already covered.
|
||||
for (const [before2, after2] of names2) {
|
||||
if (!before2 && !after2) { throw new Error("invalid name change found"); }
|
||||
if (before2 && names1ByFinalName[before2]) { continue; } // Already handled
|
||||
result.merge.push([before2, after2]);
|
||||
// If table/column is renamed in part 2, and name was stable in part 1,
|
||||
// rekey any information about it in part 1.
|
||||
if (before2 && after2) { result.rename1.set(before2, after2); }
|
||||
}
|
||||
// For neatness, sort the merge order. Not essential.
|
||||
result.merge = sortBy(result.merge, ([a, b]) => [a || "", b || ""]);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-key nested data to match name changes / removals. Needs to be done a little carefully
|
||||
* since it is perfectly possible for names to be swapped or shuffled.
|
||||
*
|
||||
* Entries may be TableDeltas in the case of table renames or ColumnDeltas for column renames.
|
||||
*
|
||||
* @param entries: a dictionary of nested data - TableDeltas for tables, ColumnDeltas for columns.
|
||||
* @param dead: a set of keys to remove from the dictionary.
|
||||
* @param rename: changes of names to apply to the dictionary.
|
||||
*/
|
||||
function renameAndDelete<T>(entries: {[name: string]: T}, dead: Set<string>,
|
||||
rename: Map<string, string>) {
|
||||
// Remove all entries marked as dead.
|
||||
for (const key of dead) { delete entries[key]; }
|
||||
// Move all entries that are going to be renamed out to a cache temporarily.
|
||||
const cache: {[name: string]: any} = {};
|
||||
for (const key of rename.keys()) {
|
||||
if (entries[key]) {
|
||||
cache[key] = entries[key];
|
||||
delete entries[key];
|
||||
}
|
||||
}
|
||||
// Move all renamed entries back in with their new names.
|
||||
for (const [key, val] of rename.entries()) { if (cache[key]) { entries[val] = cache[key]; } }
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply planned name changes to a pair of entries, and return a merged entry incorporating
|
||||
* their composition.
|
||||
*
|
||||
* @param names: the planned name changes as calculated by planNameMerge()
|
||||
* @param entries1: the first dictionary of nested data keyed on the names
|
||||
* @param entries2: test second dictionary of nested data keyed on the names
|
||||
* @param mergeEntry: a function to apply any further corrections needed to the entries
|
||||
*
|
||||
*/
|
||||
function mergeNames<T>(names: NameMerge,
|
||||
entries1: {[name: string]: T},
|
||||
entries2: {[name: string]: T},
|
||||
mergeEntry: (e1: T, e2: T) => T): {[name: string]: T} {
|
||||
// Update the keys of the entries1 and entries2 dictionaries to be consistent.
|
||||
renameAndDelete(entries1, names.dead1, names.rename1);
|
||||
renameAndDelete(entries2, names.dead2, names.rename2);
|
||||
|
||||
// Prepare the composition of the two dictionaries.
|
||||
const entries = entries2; // Start with the second dictionary.
|
||||
for (const key of Object.keys(entries1)) { // Add material from the first.
|
||||
const e1 = entries1[key];
|
||||
if (!entries[key]) { entries[key] = e1; continue; } // No overlap - just add and move on.
|
||||
entries[key] = mergeEntry(e1, entries[key]); // Recursive merge if overlap.
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
|
||||
/**
|
||||
* Track whether a specific row was added, removed or updated.
|
||||
*/
|
||||
interface RowChange {
|
||||
added: boolean;
|
||||
removed: boolean;
|
||||
updated: boolean;
|
||||
}
|
||||
|
||||
/** RowChange for each row in a table */
|
||||
export interface RowChanges {
|
||||
[rowId: number]: RowChange;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This is used when we hit a cell that we know has changed but don't know its
|
||||
* value due to it being part of a bulk input. This produces a cell that
|
||||
* represents the unknowns.
|
||||
*/
|
||||
function bulkCellFor(rc: RowChange|undefined): CellDelta|undefined {
|
||||
if (!rc) { return undefined; }
|
||||
const result: CellDelta = [null, null];
|
||||
if (rc.removed || rc.updated) { result[0] = '?'; }
|
||||
if (rc.added || rc.updated) { result[1] = '?'; }
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge changes that apply to a particular column.
|
||||
*
|
||||
* @param present1: affected rows in part 1
|
||||
* @param present2: affected rows in part 2
|
||||
* @param e1: cached cell values for the column in part 1
|
||||
* @param e2: cached cell values for the column in part 2
|
||||
*/
|
||||
function mergeColumn(present1: RowChanges, present2: RowChanges,
|
||||
e1: ColumnDelta, e2: ColumnDelta): ColumnDelta {
|
||||
for (const key of (Object.keys(present1) as unknown as number[])) {
|
||||
let v1 = e1[key];
|
||||
let v2 = e2[key];
|
||||
if (!v1 && !v2) { continue; }
|
||||
v1 = v1 || bulkCellFor(present1[key]);
|
||||
v2 = v2 || bulkCellFor(present2[key]);
|
||||
if (!v2) { e2[key] = e1[key]; continue; }
|
||||
if (!v1[1]) { continue; } // Deleted row.
|
||||
e2[key] = [v1[0], v2[1]]; // Change is from initial value in e1 to final value in e2.
|
||||
}
|
||||
return e2;
|
||||
}
|
||||
|
||||
|
||||
/** Put list of numbers in ascending order, with duplicates removed. */
|
||||
function uniqueAndSorted(lst: number[]) {
|
||||
return [...new Set(lst)].sort((a, b) => a - b);
|
||||
}
|
||||
|
||||
/** For each row changed, figure out whether it was added/removed/updated */
|
||||
/** TODO: need for this method suggests maybe a better core representation for this info */
|
||||
function getRowChanges(e: TableDelta): RowChanges {
|
||||
const all = new Set([...e.addRows, ...e.removeRows, ...e.updateRows]);
|
||||
const added = new Set(e.addRows);
|
||||
const removed = new Set(e.removeRows);
|
||||
const updated = new Set(e.updateRows);
|
||||
return fromPairs([...all].map(x => {
|
||||
return [x, {added: added.has(x),
|
||||
removed: removed.has(x),
|
||||
updated: updated.has(x)}] as [number, RowChange];
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge changes that apply to a particular table. For updating addRows and removeRows, care is
|
||||
* needed, since it is fine to remove and add the same rowId within a single summary -- this is just
|
||||
* rowId reuse. It needs to be tracked so we know lifetime of rows though.
|
||||
*/
|
||||
function mergeTable(e1: TableDelta, e2: TableDelta): TableDelta {
|
||||
// First, sort out any changes to names of columns.
|
||||
const names = planNameMerge(e1.columnRenames, e2.columnRenames);
|
||||
mergeNames(names, e1.columnDeltas, e2.columnDeltas,
|
||||
mergeColumn.bind(null,
|
||||
getRowChanges(e1),
|
||||
getRowChanges(e2)));
|
||||
e2.columnRenames = names.merge;
|
||||
// All the columnar data is now merged. What remains is to merge the summary lists of rowIds
|
||||
// that we maintain.
|
||||
const addRows1 = new Set(e1.addRows); // Non-transient rows we have clearly added.
|
||||
const removeRows2 = new Set(e2.removeRows); // Non-transient rows we have clearly removed.
|
||||
const transients = e1.addRows.filter(x => removeRows2.has(x));
|
||||
e2.addRows = uniqueAndSorted([...e2.addRows, ...e1.addRows.filter(x => !removeRows2.has(x))]);
|
||||
e2.removeRows = uniqueAndSorted([...e2.removeRows.filter(x => !addRows1.has(x)), ...e1.removeRows]);
|
||||
e2.updateRows = uniqueAndSorted([...e1.updateRows.filter(x => !removeRows2.has(x)),
|
||||
...e2.updateRows.filter(x => !addRows1.has(x))]);
|
||||
// Remove all traces of transients (rows that were created and destroyed) from history.
|
||||
for (const cols of values(e2.columnDeltas)) {
|
||||
for (const key of transients) { delete cols[key]; }
|
||||
}
|
||||
return e2;
|
||||
}
|
||||
|
||||
/** Finally, merge a pair of summaries. */
|
||||
export function concatenateSummaryPair(sum1: ActionSummary, sum2: ActionSummary): ActionSummary {
|
||||
const names = planNameMerge(sum1.tableRenames, sum2.tableRenames);
|
||||
const rowChanges = mergeNames(names, sum1.tableDeltas, sum2.tableDeltas, mergeTable);
|
||||
const sum: ActionSummary = {
|
||||
tableRenames: names.merge,
|
||||
tableDeltas: rowChanges
|
||||
};
|
||||
return sum;
|
||||
}
|
||||
|
||||
/** Generalize to merging a list of summaries. */
|
||||
export function concatenateSummaries(sums: ActionSummary[]): ActionSummary {
|
||||
if (sums.length === 0) { return createEmptyActionSummary(); }
|
||||
let result = sums[0];
|
||||
for (let i = 1; i < sums.length; i++) {
|
||||
result = concatenateSummaryPair(result, sums[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
200
app/common/AlternateActions.ts
Normal file
200
app/common/AlternateActions.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import { BulkColValues, ColValues, DocAction, isSchemaAction,
|
||||
TableDataAction, UserAction } from 'app/common/DocActions';
|
||||
|
||||
const ACTION_TYPES = new Set([
|
||||
'AddRecord', 'BulkAddRecord', 'UpdateRecord', 'BulkUpdateRecord',
|
||||
'RemoveRecord', 'BulkRemoveRecord'
|
||||
]);
|
||||
|
||||
/**
|
||||
* The result of processing a UserAction.
|
||||
*/
|
||||
export interface ProcessedAction {
|
||||
stored: DocAction[];
|
||||
undo: DocAction[];
|
||||
retValues: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* A minimal interface for interpreting UserActions in the context of
|
||||
* some current state. We need to know the next free rowId for each
|
||||
* table, and also the current state of cells. This interface was
|
||||
* abstracted from the initial implementation of on-demand tables.
|
||||
*/
|
||||
export interface AlternateStorage {
|
||||
getNextRowId(tableId: string): Promise<number>;
|
||||
fetchActionData(tableId: string, rowIds: number[],
|
||||
colIds?: string[]): Promise<TableDataAction>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle converting UserActions to DocActions for tables stored
|
||||
* in some way that is not handled by the regular data engine.
|
||||
*/
|
||||
export class AlternateActions {
|
||||
|
||||
constructor(private _storage: AlternateStorage) {}
|
||||
|
||||
/**
|
||||
* This may be overridden to allow mixing two different storage mechanisms.
|
||||
* The implementation of on-demand tables does this.
|
||||
*/
|
||||
public usesAlternateStorage(tableId: string): boolean {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a UserAction into stored and undo DocActions as well as return values.
|
||||
*/
|
||||
public processUserAction(action: UserAction): Promise<ProcessedAction> {
|
||||
const a = action.map(item => item as any);
|
||||
switch (a[0]) {
|
||||
case "ApplyUndoActions": return this._doApplyUndoActions(a[1]);
|
||||
case "AddRecord": return this._doAddRecord (a[1], a[2], a[3]);
|
||||
case "BulkAddRecord": return this._doBulkAddRecord (a[1], a[2], a[3]);
|
||||
case "UpdateRecord": return this._doUpdateRecord (a[1], a[2], a[3]);
|
||||
case "BulkUpdateRecord": return this._doBulkUpdateRecord(a[1], a[2], a[3]);
|
||||
case "RemoveRecord": return this._doRemoveRecord (a[1], a[2]);
|
||||
case "BulkRemoveRecord": return this._doBulkRemoveRecord(a[1], a[2]);
|
||||
default: throw new Error(`Received unknown action ${action[0]}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits an array of UserActions into two separate arrays of normal and onDemand actions.
|
||||
*/
|
||||
public splitByStorage(actions: UserAction[]): [UserAction[], UserAction[]] {
|
||||
const normal: UserAction[] = [];
|
||||
const onDemand: UserAction[] = [];
|
||||
actions.forEach(a => {
|
||||
// Check that the actionType can be applied without the sandbox and also that the action
|
||||
// is on a data table.
|
||||
const isOnDemandAction = ACTION_TYPES.has(a[0] as string);
|
||||
const isDataTableAction = typeof a[1] === 'string' && !a[1].startsWith('_grist_');
|
||||
if (a[0] === 'ApplyUndoActions') {
|
||||
// Split actions inside the undo action array.
|
||||
const [undoNormal, undoOnDemand] = this.splitByStorage(a[1] as UserAction[]);
|
||||
if (undoNormal.length > 0) {
|
||||
normal.push(['ApplyUndoActions', undoNormal]);
|
||||
}
|
||||
if (undoOnDemand.length > 0) {
|
||||
onDemand.push(['ApplyUndoActions', undoOnDemand]);
|
||||
}
|
||||
} else if (isDataTableAction && isOnDemandAction && this.usesAlternateStorage(a[1] as string)) {
|
||||
// Check whether the tableId belongs to an onDemand table.
|
||||
onDemand.push(a);
|
||||
} else {
|
||||
normal.push(a);
|
||||
}
|
||||
});
|
||||
return [normal, onDemand];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an action represents a schema change on an onDemand table.
|
||||
*/
|
||||
public isSchemaAction(docAction: DocAction): boolean {
|
||||
return isSchemaAction(docAction) && this.usesAlternateStorage(docAction[1]);
|
||||
}
|
||||
|
||||
private async _doApplyUndoActions(actions: DocAction[]) {
|
||||
const undo: DocAction[] = [];
|
||||
for (const a of actions) {
|
||||
const converted = await this.processUserAction(a);
|
||||
undo.concat(converted.undo);
|
||||
}
|
||||
return {
|
||||
stored: actions,
|
||||
undo,
|
||||
retValues: null
|
||||
};
|
||||
}
|
||||
|
||||
private async _doAddRecord(
|
||||
tableId: string,
|
||||
rowId: number|null,
|
||||
colValues: ColValues
|
||||
): Promise<ProcessedAction> {
|
||||
if (rowId === null) {
|
||||
rowId = await this._storage.getNextRowId(tableId);
|
||||
}
|
||||
// Set the manualSort to be the same as the rowId. This forces new rows to always be added
|
||||
// at the end of the table.
|
||||
colValues.manualSort = rowId;
|
||||
return {
|
||||
stored: [['AddRecord', tableId, rowId, colValues]],
|
||||
undo: [['RemoveRecord', tableId, rowId]],
|
||||
retValues: rowId
|
||||
};
|
||||
}
|
||||
|
||||
private async _doBulkAddRecord(
|
||||
tableId: string,
|
||||
rowIds: Array<number|null>,
|
||||
colValues: BulkColValues
|
||||
): Promise<ProcessedAction> {
|
||||
|
||||
// When unset, we will set the rowId values to count up from the greatest
|
||||
// values already in the table.
|
||||
if (rowIds[0] === null) {
|
||||
const nextRowId = await this._storage.getNextRowId(tableId);
|
||||
for (let i = 0; i < rowIds.length; i++) {
|
||||
rowIds[i] = nextRowId + i;
|
||||
}
|
||||
}
|
||||
// Set the manualSort values to be the same as the rowIds. This forces new rows to always be
|
||||
// added at the end of the table.
|
||||
colValues.manualSort = rowIds;
|
||||
return {
|
||||
stored: [['BulkAddRecord', tableId, rowIds as number[], colValues]],
|
||||
undo: [['BulkRemoveRecord', tableId, rowIds as number[]]],
|
||||
retValues: rowIds
|
||||
};
|
||||
}
|
||||
|
||||
private async _doUpdateRecord(
|
||||
tableId: string,
|
||||
rowId: number,
|
||||
colValues: ColValues
|
||||
): Promise<ProcessedAction> {
|
||||
const [, , oldRowIds, oldColValues] =
|
||||
await this._storage.fetchActionData(tableId, [rowId], Object.keys(colValues));
|
||||
return {
|
||||
stored: [['UpdateRecord', tableId, rowId, colValues]],
|
||||
undo: [['BulkUpdateRecord', tableId, oldRowIds, oldColValues]],
|
||||
retValues: null
|
||||
};
|
||||
}
|
||||
|
||||
private async _doBulkUpdateRecord(
|
||||
tableId: string,
|
||||
rowIds: number[],
|
||||
colValues: BulkColValues
|
||||
): Promise<ProcessedAction> {
|
||||
const [, , oldRowIds, oldColValues] =
|
||||
await this._storage.fetchActionData(tableId, rowIds, Object.keys(colValues));
|
||||
return {
|
||||
stored: [['BulkUpdateRecord', tableId, rowIds, colValues]],
|
||||
undo: [['BulkUpdateRecord', tableId, oldRowIds, oldColValues]],
|
||||
retValues: null
|
||||
};
|
||||
}
|
||||
|
||||
private async _doRemoveRecord(tableId: string, rowId: number): Promise<ProcessedAction> {
|
||||
const [, , oldRowIds, oldColValues] = await this._storage.fetchActionData(tableId, [rowId]);
|
||||
return {
|
||||
stored: [['RemoveRecord', tableId, rowId]],
|
||||
undo: [['BulkAddRecord', tableId, oldRowIds, oldColValues]],
|
||||
retValues: null
|
||||
};
|
||||
}
|
||||
|
||||
private async _doBulkRemoveRecord(tableId: string, rowIds: number[]): Promise<ProcessedAction> {
|
||||
const [, , oldRowIds, oldColValues] = await this._storage.fetchActionData(tableId, rowIds);
|
||||
return {
|
||||
stored: [['BulkRemoveRecord', tableId, rowIds]],
|
||||
undo: [['BulkAddRecord', tableId, oldRowIds, oldColValues]],
|
||||
retValues: null
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -138,8 +138,13 @@ function throwApiError(url: string, resp: Response | AxiosResponse, body: any) {
|
||||
// If the response includes details, include them into the ApiError we construct. Include
|
||||
// also the error message from the server as details.userError. It's used by the Notifier.
|
||||
if (!body) { body = {}; }
|
||||
const details: ApiErrorDetails = body.details && typeof body.details === 'object' ? body.details : {};
|
||||
if (body.error) {
|
||||
const details: ApiErrorDetails = body.details && typeof body.details === 'object' ? body.details :
|
||||
{errorDetails: body.details};
|
||||
// If a userError is already specified, do not overwrite it.
|
||||
// (The error handling here is quite confusing, would it not be better
|
||||
// to just unserialize an ApiError into the form it would have had on
|
||||
// the server?)
|
||||
if (body.error && !details.userError) {
|
||||
details.userError = body.error;
|
||||
}
|
||||
if (body.memos) {
|
||||
|
||||
@@ -5,8 +5,9 @@ import {Product} from 'app/common/Features';
|
||||
import {StringUnion} from 'app/common/StringUnion';
|
||||
import {UserProfile} from 'app/common/LoginSessionAPI';
|
||||
|
||||
export const ValidEvent = StringUnion('docListAction', 'docUserAction', 'docShutdown', 'docError',
|
||||
'docUsage', 'clientConnect');
|
||||
export const ValidEvent = StringUnion(
|
||||
'docListAction', 'docUserAction', 'docShutdown', 'docError',
|
||||
'docUsage', 'docChatter', 'clientConnect');
|
||||
export type ValidEvent = typeof ValidEvent.type;
|
||||
|
||||
|
||||
@@ -50,12 +51,12 @@ export interface CommMessageBase {
|
||||
data?: unknown;
|
||||
}
|
||||
|
||||
export type CommDocMessage = CommDocUserAction | CommDocUsage | CommDocShutdown | CommDocError;
|
||||
export type CommDocMessage = CommDocUserAction | CommDocUsage | CommDocShutdown | CommDocError | CommDocChatter;
|
||||
export type CommMessage = CommDocMessage | CommDocListAction | CommClientConnect;
|
||||
|
||||
export type CommResponseBase = CommResponse | CommResponseError | CommMessage;
|
||||
|
||||
export type CommDocEventType = CommDocMessage['type']
|
||||
export type CommDocEventType = CommDocMessage['type'];
|
||||
|
||||
/**
|
||||
* Event for a change to the document list.
|
||||
@@ -89,6 +90,20 @@ export interface CommDocUserAction extends CommMessageBase {
|
||||
};
|
||||
}
|
||||
|
||||
export interface CommDocChatter extends CommMessageBase {
|
||||
type: 'docChatter';
|
||||
docFD: number;
|
||||
data: {
|
||||
webhooks?: {
|
||||
// If present, something happened related to webhooks.
|
||||
// Currently, we give no details, leaving it to client
|
||||
// to call back for details if it cares.
|
||||
},
|
||||
// This could also be a fine place to send updated info
|
||||
// about other users of the document.
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Event for a change to document usage. Sent to all clients that have this document open.
|
||||
*/
|
||||
|
||||
@@ -75,6 +75,15 @@ export class DocData extends ActionDispatcher {
|
||||
return this._tables.get(tableId);
|
||||
}
|
||||
|
||||
public async requireTable(tableId: string): Promise<TableData> {
|
||||
await this.fetchTable(tableId);
|
||||
const td = this._tables.get(tableId);
|
||||
if (!td) {
|
||||
throw new Error(`could not fetch table: ${tableId}`);
|
||||
}
|
||||
return td;
|
||||
}
|
||||
|
||||
/**
|
||||
* Like getTable, but the result knows about the types of its records
|
||||
*/
|
||||
|
||||
51
app/common/DocDataCache.ts
Normal file
51
app/common/DocDataCache.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { AlternateActions, AlternateStorage, ProcessedAction} from 'app/common/AlternateActions';
|
||||
import { DocAction, UserAction } from 'app/common/DocActions';
|
||||
import { DocData } from 'app/common/DocData';
|
||||
import max from 'lodash/max';
|
||||
|
||||
/**
|
||||
* An implementation of an in-memory storage that can handle UserActions,
|
||||
* generating DocActions and retValues that work as for regular storage.
|
||||
* It shares an implementation with on-demand tables.
|
||||
*/
|
||||
export class DocDataCache implements AlternateStorage {
|
||||
public docData: DocData;
|
||||
private _altActions: AlternateActions;
|
||||
constructor(actions?: DocAction[]) {
|
||||
this.docData = new DocData(
|
||||
async (tableId) => {
|
||||
throw new Error(`no ${tableId}`);
|
||||
},
|
||||
null,
|
||||
);
|
||||
this._altActions = new AlternateActions(this);
|
||||
for (const action of actions || []) {
|
||||
this.docData.receiveAction(action);
|
||||
}
|
||||
}
|
||||
|
||||
public async sendTableActions(actions: UserAction[]): Promise<ProcessedAction[]> {
|
||||
const results: ProcessedAction[] = [];
|
||||
for (const userAction of actions) {
|
||||
const processedAction = await this._altActions.processUserAction(userAction);
|
||||
results.push(processedAction);
|
||||
for (const storedAction of processedAction.stored) {
|
||||
this.docData.receiveAction(storedAction);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
public async fetchActionData(tableId: string, rowIds: number[], colIds?: string[]) {
|
||||
const table = await this.docData.requireTable(tableId);
|
||||
return table.getTableDataAction(
|
||||
rowIds,
|
||||
colIds,
|
||||
);
|
||||
}
|
||||
|
||||
public async getNextRowId(tableId: string): Promise<number> {
|
||||
const table = await this.docData.requireTable(tableId);
|
||||
return (max(table.getRowIds()) || 0) + 1;
|
||||
}
|
||||
}
|
||||
@@ -254,23 +254,27 @@ export class TableData extends ActionDispatcher implements SkippableRows {
|
||||
* Optionally takes a list of row ids to return data from. If a row id is
|
||||
* not actually present in the table, a row of nulls will be returned for it.
|
||||
*/
|
||||
public getTableDataAction(desiredRowIds?: number[]): TableDataAction {
|
||||
public getTableDataAction(desiredRowIds?: number[],
|
||||
colIds?: string[]): TableDataAction {
|
||||
colIds = colIds || this.getColIds();
|
||||
const colIdSet = new Set<string>(colIds);
|
||||
const rowIds = desiredRowIds || this.getRowIds();
|
||||
let bulkColValues: {[colId: string]: CellValue[]};
|
||||
const colArray = this._colArray.filter(({colId}) => colIdSet.has(colId));
|
||||
if (desiredRowIds) {
|
||||
const len = rowIds.length;
|
||||
bulkColValues = {};
|
||||
for (const colId of this.getColIds()) { bulkColValues[colId] = Array(len); }
|
||||
for (const colId of colIds) { bulkColValues[colId] = Array(len); }
|
||||
for (let i = 0; i < len; i++) {
|
||||
const index = this._rowMap.get(rowIds[i]);
|
||||
for (const {colId, values} of this._colArray) {
|
||||
for (const {colId, values} of colArray) {
|
||||
const value = (index === undefined) ? null : values[index];
|
||||
bulkColValues[colId][i] = value;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
bulkColValues = fromPairs(
|
||||
this.getColIds()
|
||||
colIds
|
||||
.filter(colId => colId !== 'id')
|
||||
.map(colId => [colId, this.getColValues(colId)! as CellValue[]]));
|
||||
}
|
||||
|
||||
@@ -10,13 +10,36 @@ export const WebhookFields = t.iface([], {
|
||||
"tableId": "string",
|
||||
"enabled": t.opt("boolean"),
|
||||
"isReadyColumn": t.opt(t.union("string", "null")),
|
||||
"name": t.opt("string"),
|
||||
"memo": t.opt("string"),
|
||||
});
|
||||
|
||||
export const WebhookBatchStatus = t.union(t.lit('success'), t.lit('failure'), t.lit('rejected'));
|
||||
|
||||
export const WebhookStatus = t.union(t.lit('idle'), t.lit('sending'), t.lit('retrying'), t.lit('postponed'), t.lit('error'), t.lit('invalid'));
|
||||
|
||||
export const WebhookSubscribe = t.iface([], {
|
||||
"url": "string",
|
||||
"eventTypes": t.array(t.union(t.lit("add"), t.lit("update"))),
|
||||
"enabled": t.opt("boolean"),
|
||||
"isReadyColumn": t.opt(t.union("string", "null")),
|
||||
"name": t.opt("string"),
|
||||
"memo": t.opt("string"),
|
||||
});
|
||||
|
||||
export const WebhookSummary = t.iface([], {
|
||||
"id": "string",
|
||||
"fields": t.iface([], {
|
||||
"url": "string",
|
||||
"unsubscribeKey": "string",
|
||||
"eventTypes": t.array("string"),
|
||||
"isReadyColumn": t.union("string", "null"),
|
||||
"tableId": "string",
|
||||
"enabled": "boolean",
|
||||
"name": "string",
|
||||
"memo": "string",
|
||||
}),
|
||||
"usage": t.union("WebhookUsage", "null"),
|
||||
});
|
||||
|
||||
export const WebhookUpdate = t.iface([], {
|
||||
@@ -30,12 +53,39 @@ export const WebhookPatch = t.iface([], {
|
||||
"tableId": t.opt("string"),
|
||||
"enabled": t.opt("boolean"),
|
||||
"isReadyColumn": t.opt(t.union("string", "null")),
|
||||
"name": t.opt("string"),
|
||||
"memo": t.opt("string"),
|
||||
});
|
||||
|
||||
export const WebhookUsage = t.iface([], {
|
||||
"numWaiting": "number",
|
||||
"status": "WebhookStatus",
|
||||
"updatedTime": t.opt(t.union("number", "null")),
|
||||
"lastSuccessTime": t.opt(t.union("number", "null")),
|
||||
"lastFailureTime": t.opt(t.union("number", "null")),
|
||||
"lastErrorMessage": t.opt(t.union("string", "null")),
|
||||
"lastHttpStatus": t.opt(t.union("number", "null")),
|
||||
"lastEventBatch": t.opt(t.union("null", t.iface([], {
|
||||
"size": "number",
|
||||
"errorMessage": t.union("string", "null"),
|
||||
"httpStatus": t.union("number", "null"),
|
||||
"status": "WebhookBatchStatus",
|
||||
"attempts": "number",
|
||||
}))),
|
||||
"numSuccess": t.opt(t.iface([], {
|
||||
"pastHour": "number",
|
||||
"past24Hours": "number",
|
||||
})),
|
||||
});
|
||||
|
||||
const exportedTypeSuite: t.ITypeSuite = {
|
||||
WebhookFields,
|
||||
WebhookBatchStatus,
|
||||
WebhookStatus,
|
||||
WebhookSubscribe,
|
||||
WebhookSummary,
|
||||
WebhookUpdate,
|
||||
WebhookPatch,
|
||||
WebhookUsage,
|
||||
};
|
||||
export default exportedTypeSuite;
|
||||
|
||||
@@ -4,8 +4,15 @@ export interface WebhookFields {
|
||||
tableId: string;
|
||||
enabled?: boolean;
|
||||
isReadyColumn?: string|null;
|
||||
name?: string;
|
||||
memo?: string;
|
||||
}
|
||||
|
||||
// Union discriminated by type
|
||||
export type WebhookBatchStatus = 'success'|'failure'|'rejected';
|
||||
export type WebhookStatus = 'idle'|'sending'|'retrying'|'postponed'|'error'|'invalid';
|
||||
|
||||
|
||||
// WebhookSubscribe should be `Omit<WebhookFields, 'tableId'>` (because subscribe endpoint read
|
||||
// tableId from the url) but generics are not yet supported by ts-interface-builder
|
||||
export interface WebhookSubscribe {
|
||||
@@ -13,6 +20,23 @@ export interface WebhookSubscribe {
|
||||
eventTypes: Array<"add"|"update">;
|
||||
enabled?: boolean;
|
||||
isReadyColumn?: string|null;
|
||||
name?: string;
|
||||
memo?: string;
|
||||
}
|
||||
|
||||
export interface WebhookSummary {
|
||||
id: string;
|
||||
fields: {
|
||||
url: string;
|
||||
unsubscribeKey: string;
|
||||
eventTypes: string[];
|
||||
isReadyColumn: string|null;
|
||||
tableId: string;
|
||||
enabled: boolean;
|
||||
name: string;
|
||||
memo: string;
|
||||
},
|
||||
usage: WebhookUsage|null,
|
||||
}
|
||||
|
||||
// Describes fields to update a webhook
|
||||
@@ -29,4 +53,29 @@ export interface WebhookPatch {
|
||||
tableId?: string;
|
||||
enabled?: boolean;
|
||||
isReadyColumn?: string|null;
|
||||
name?: string;
|
||||
memo?: string;
|
||||
}
|
||||
|
||||
|
||||
export interface WebhookUsage {
|
||||
// As minimum we need number of waiting events and status (by default pending).
|
||||
numWaiting: number,
|
||||
status: WebhookStatus;
|
||||
updatedTime?: number|null;
|
||||
lastSuccessTime?: number|null;
|
||||
lastFailureTime?: number|null;
|
||||
lastErrorMessage?: string|null;
|
||||
lastHttpStatus?: number|null;
|
||||
lastEventBatch?: null | {
|
||||
size: number;
|
||||
errorMessage: string|null;
|
||||
httpStatus: number|null;
|
||||
status: WebhookBatchStatus;
|
||||
attempts: number;
|
||||
},
|
||||
numSuccess?: {
|
||||
pastHour: number;
|
||||
past24Hours: number;
|
||||
},
|
||||
}
|
||||
|
||||
@@ -14,7 +14,9 @@ import {OrgPrefs, UserOrgPrefs, UserPrefs} from 'app/common/Prefs';
|
||||
import * as roles from 'app/common/roles';
|
||||
import {addCurrentOrgToPath} from 'app/common/urlUtils';
|
||||
import {encodeQueryParams} from 'app/common/gutil';
|
||||
import {WebhookUpdate} from 'app/common/Triggers';
|
||||
import {WebhookFields, WebhookSubscribe, WebhookSummary, WebhookUpdate} from 'app/common/Triggers';
|
||||
import omitBy from 'lodash/omitBy';
|
||||
|
||||
|
||||
export type {FullUser, UserProfile};
|
||||
|
||||
@@ -454,8 +456,12 @@ export interface DocAPI {
|
||||
// Get users that are worth proposing to "View As" for access control purposes.
|
||||
getUsersForViewAs(): Promise<PermissionDataWithExtraUsers>;
|
||||
|
||||
getWebhooks(): Promise<WebhookSummary[]>;
|
||||
addWebhook(webhook: WebhookFields): Promise<{webhookId: string}>;
|
||||
removeWebhook(webhookId: string, tableId: string): Promise<void>;
|
||||
// Update webhook
|
||||
updateWebhook(webhook: WebhookUpdate): Promise<void>;
|
||||
flushWebhooks(): Promise<void>;
|
||||
}
|
||||
|
||||
// Operations that are supported by a doc worker.
|
||||
@@ -905,6 +911,19 @@ export class DocAPIImpl extends BaseAPI implements DocAPI {
|
||||
return this.requestJson(`${this._url}/usersForViewAs`);
|
||||
}
|
||||
|
||||
public async getWebhooks(): Promise<WebhookSummary[]> {
|
||||
return this.requestJson(`${this._url}/webhooks`);
|
||||
}
|
||||
|
||||
public async addWebhook(webhook: WebhookSubscribe & {tableId: string}): Promise<{webhookId: string}> {
|
||||
const {tableId} = webhook;
|
||||
return this.requestJson(`${this._url}/tables/${tableId}/_subscribe`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(
|
||||
omitBy(webhook, (val, key) => key === 'tableId' || val === null)),
|
||||
});
|
||||
}
|
||||
|
||||
public async updateWebhook(webhook: WebhookUpdate): Promise<void> {
|
||||
return this.requestJson(`${this._url}/webhooks/${webhook.id}`, {
|
||||
method: 'PATCH',
|
||||
@@ -912,6 +931,21 @@ export class DocAPIImpl extends BaseAPI implements DocAPI {
|
||||
});
|
||||
}
|
||||
|
||||
public removeWebhook(webhookId: string, tableId: string) {
|
||||
// unsubscribeKey is not required for owners
|
||||
const unsubscribeKey = '';
|
||||
return this.requestJson(`${this._url}/tables/${tableId}/_unsubscribe`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({webhookId, unsubscribeKey}),
|
||||
});
|
||||
}
|
||||
|
||||
public async flushWebhooks(): Promise<void> {
|
||||
await this.request(`${this._url}/webhooks/queue`, {
|
||||
method: 'DELETE'
|
||||
});
|
||||
}
|
||||
|
||||
public async forceReload(): Promise<void> {
|
||||
await this.request(`${this._url}/force-reload`, {
|
||||
method: 'POST'
|
||||
|
||||
@@ -10,7 +10,7 @@ import {Document} from 'app/common/UserAPI';
|
||||
import clone = require('lodash/clone');
|
||||
import pickBy = require('lodash/pickBy');
|
||||
|
||||
export const SpecialDocPage = StringUnion('code', 'acl', 'data', 'GristDocTour', 'settings');
|
||||
export const SpecialDocPage = StringUnion('code', 'acl', 'data', 'GristDocTour', 'settings', 'webhook');
|
||||
type SpecialDocPage = typeof SpecialDocPage.type;
|
||||
export type IDocPage = number | SpecialDocPage;
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { GristObjCode } from "app/plugin/GristData";
|
||||
|
||||
// tslint:disable:object-literal-key-quotes
|
||||
|
||||
export const SCHEMA_VERSION = 37;
|
||||
export const SCHEMA_VERSION = 38;
|
||||
|
||||
export const schema = {
|
||||
|
||||
@@ -160,6 +160,9 @@ export const schema = {
|
||||
eventTypes : "ChoiceList",
|
||||
isReadyColRef : "Ref:_grist_Tables_column",
|
||||
actions : "Text",
|
||||
label : "Text",
|
||||
memo : "Text",
|
||||
enabled : "Bool",
|
||||
},
|
||||
|
||||
"_grist_ACLRules": {
|
||||
@@ -367,6 +370,9 @@ export interface SchemaTypes {
|
||||
eventTypes: [GristObjCode.List, ...string[]]|null;
|
||||
isReadyColRef: number;
|
||||
actions: string;
|
||||
label: string;
|
||||
memo: string;
|
||||
enabled: boolean;
|
||||
};
|
||||
|
||||
"_grist_ACLRules": {
|
||||
|
||||
Reference in New Issue
Block a user