mirror of
https://github.com/gristlabs/grist-core.git
synced 2024-10-27 20:44:07 +00:00
(core) updates from grist-core
This commit is contained in:
commit
6171a012db
43
.github/workflows/fly-build.yml
vendored
Normal file
43
.github/workflows/fly-build.yml
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
# fly-deploy will be triggered on completion of this workflow to actually deploy the code to fly.io.
|
||||
|
||||
name: fly.io Build
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
types: [labeled, opened, synchronize, reopened]
|
||||
|
||||
# Allows running this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docker image
|
||||
runs-on: ubuntu-latest
|
||||
# Build when the 'preview' label is added, or when PR is updated with this label present.
|
||||
if: >
|
||||
github.event_name == 'workflow_dispatch' ||
|
||||
(github.event_name == 'pull_request' &&
|
||||
contains(github.event.pull_request.labels.*.name, 'preview'))
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build and export Docker image
|
||||
id: docker-build
|
||||
run: >
|
||||
docker build -t grist-core:preview . &&
|
||||
docker image save grist-core:preview -o grist-core.tar
|
||||
- name: Save PR information
|
||||
run: |
|
||||
echo PR_NUMBER=${{ github.event.number }} >> ./pr-info.txt
|
||||
echo PR_SOURCE=${{ github.event.pull_request.head.repo.full_name }}-${{ github.event.pull_request.head.ref }} >> ./pr-info.txt
|
||||
echo PR_SHASUM=${{ github.event.pull_request.head.sha }} >> ./pr-info.txt
|
||||
# PR_SOURCE looks like <owner>/<repo>-<branch>.
|
||||
# For example, if the GitHub user "foo" forked grist-core as "grist-bar", and makes a PR from their branch named "baz",
|
||||
# it will be "foo/grist-bar-baz". deploy.js later replaces "/" with "-", making it "foo-grist-bar-baz".
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docker-image
|
||||
path: |
|
||||
./grist-core.tar
|
||||
./pr-info.txt
|
||||
if-no-files-found: "error"
|
20
.github/workflows/fly-cleanup.yml
vendored
20
.github/workflows/fly-cleanup.yml
vendored
@ -1,4 +1,4 @@
|
||||
name: Fly Cleanup
|
||||
name: fly.io Cleanup
|
||||
on:
|
||||
schedule:
|
||||
# Once a day, clean up jobs marked as expired
|
||||
@ -12,12 +12,12 @@ env:
|
||||
|
||||
jobs:
|
||||
clean:
|
||||
name: Clean stale deployed apps
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'gristlabs'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
with:
|
||||
version: 0.1.66
|
||||
- run: node buildtools/fly-deploy.js clean
|
||||
name: Clean stale deployed apps
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'gristlabs'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
with:
|
||||
version: 0.2.72
|
||||
- run: node buildtools/fly-deploy.js clean
|
||||
|
70
.github/workflows/fly-deploy.yml
vendored
Normal file
70
.github/workflows/fly-deploy.yml
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
# Follow-up of fly-build, with access to secrets for making deployments.
|
||||
# This workflow runs in the target repo context. It does not, and should never execute user-supplied code.
|
||||
# See https://securitylab.github.com/research/github-actions-preventing-pwn-requests/
|
||||
|
||||
name: fly.io Deploy
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["fly.io Build"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy app to fly.io
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.event.workflow_run.conclusion == 'success'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up flyctl
|
||||
uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
with:
|
||||
version: 0.2.72
|
||||
- name: Download artifacts
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
var artifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: ${{ github.event.workflow_run.id }},
|
||||
});
|
||||
var matchArtifact = artifacts.data.artifacts.filter((artifact) => {
|
||||
return artifact.name == "docker-image"
|
||||
})[0];
|
||||
var download = await github.rest.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
artifact_id: matchArtifact.id,
|
||||
archive_format: 'zip',
|
||||
});
|
||||
var fs = require('fs');
|
||||
fs.writeFileSync('${{github.workspace}}/docker-image.zip', Buffer.from(download.data));
|
||||
- name: Extract artifacts
|
||||
id: extract_artifacts
|
||||
run: |
|
||||
unzip docker-image.zip
|
||||
cat ./pr-info.txt >> $GITHUB_OUTPUT
|
||||
- name: Load Docker image
|
||||
run: docker load --input grist-core.tar
|
||||
- name: Deploy to fly.io
|
||||
id: fly_deploy
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
BRANCH_NAME: ${{ steps.extract_artifacts.outputs.PR_SOURCE }}
|
||||
run: |
|
||||
node buildtools/fly-deploy.js deploy
|
||||
flyctl config -c ./fly.toml env | awk '/APP_HOME_URL/{print "DEPLOY_URL=" $2}' >> $GITHUB_OUTPUT
|
||||
flyctl config -c ./fly.toml env | awk '/FLY_DEPLOY_EXPIRATION/{print "EXPIRES=" $2}' >> $GITHUB_OUTPUT
|
||||
- name: Comment on PR
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: ${{ steps.extract_artifacts.outputs.PR_NUMBER }},
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `Deployed commit \`${{ steps.extract_artifacts.outputs.PR_SHASUM }}\` as ${{ steps.fly_deploy.outputs.DEPLOY_URL }} (until ${{ steps.fly_deploy.outputs.EXPIRES }})`
|
||||
})
|
36
.github/workflows/fly-destroy.yml
vendored
Normal file
36
.github/workflows/fly-destroy.yml
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
# This workflow runs in the target repo context, as it is triggered via pull_request_target.
|
||||
# It does not, and should not have access to code in the PR.
|
||||
# See https://securitylab.github.com/research/github-actions-preventing-pwn-requests/
|
||||
|
||||
name: fly.io Destroy
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: [ main ]
|
||||
types: [unlabeled, closed]
|
||||
|
||||
# Allows running this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
destroy:
|
||||
name: Remove app from fly.io
|
||||
runs-on: ubuntu-latest
|
||||
# Remove the deployment when 'preview' label is removed, or the PR is closed.
|
||||
if: |
|
||||
github.event_name == 'workflow_dispatch' ||
|
||||
(github.event_name == 'pull_request_target' &&
|
||||
(github.event.action == 'closed' ||
|
||||
(github.event.action == 'unlabeled' && github.event.label.name == 'preview')))
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up flyctl
|
||||
uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
with:
|
||||
version: 0.2.72
|
||||
- name: Destroy fly.io app
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
BRANCH_NAME: ${{ github.event.pull_request.head.repo.full_name }}-${{ github.event.pull_request.head.ref }}
|
||||
# See fly-build for what BRANCH_NAME looks like.
|
||||
id: fly_destroy
|
||||
run: node buildtools/fly-deploy.js destroy
|
64
.github/workflows/fly.yml
vendored
64
.github/workflows/fly.yml
vendored
@ -1,64 +0,0 @@
|
||||
name: Fly Deploy
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
types: [labeled, unlabeled, closed, opened, synchronize, reopened]
|
||||
|
||||
# Allows running this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy app
|
||||
runs-on: ubuntu-latest
|
||||
# Deploy when the 'preview' label is added, or when PR is updated with this label present.
|
||||
if: |
|
||||
github.repository_owner == 'gristlabs' &&
|
||||
github.event_name == 'pull_request' && (
|
||||
github.event.action == 'labeled' ||
|
||||
github.event.action == 'opened' ||
|
||||
github.event.action == 'synchronize' ||
|
||||
github.event.action == 'reopened'
|
||||
) &&
|
||||
contains(github.event.pull_request.labels.*.name, 'preview')
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
with:
|
||||
version: 0.1.89
|
||||
- id: fly_deploy
|
||||
run: |
|
||||
node buildtools/fly-deploy.js deploy
|
||||
flyctl config -c ./fly.toml env | awk '/APP_HOME_URL/{print "DEPLOY_URL=" $2}' >> $GITHUB_OUTPUT
|
||||
flyctl config -c ./fly.toml env | awk '/FLY_DEPLOY_EXPIRATION/{print "EXPIRES=" $2}' >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `Deployed as ${{ steps.fly_deploy.outputs.DEPLOY_URL }} (until ${{ steps.fly_deploy.outputs.EXPIRES }})`
|
||||
})
|
||||
|
||||
destroy:
|
||||
name: Remove app
|
||||
runs-on: ubuntu-latest
|
||||
# Remove the deployment when 'preview' label is removed, or the PR is closed.
|
||||
if: |
|
||||
github.repository_owner == 'gristlabs' &&
|
||||
github.event_name == 'pull_request' &&
|
||||
(github.event.action == 'closed' ||
|
||||
(github.event.action == 'unlabeled' && github.event.label.name == 'preview'))
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
with:
|
||||
version: 0.1.89
|
||||
- id: fly_destroy
|
||||
run: node buildtools/fly-deploy.js destroy
|
@ -107,6 +107,12 @@ const WEBHOOK_COLUMNS = [
|
||||
type: 'Text',
|
||||
label: t('Status'),
|
||||
},
|
||||
{
|
||||
id: VirtualId(),
|
||||
colId: 'authorization',
|
||||
type: 'Text',
|
||||
label: t('Header Authorization'),
|
||||
},
|
||||
] as const;
|
||||
|
||||
/**
|
||||
@ -114,10 +120,11 @@ const WEBHOOK_COLUMNS = [
|
||||
*/
|
||||
const WEBHOOK_VIEW_FIELDS: Array<(typeof WEBHOOK_COLUMNS)[number]['colId']> = [
|
||||
'name', 'memo',
|
||||
'eventTypes', 'url',
|
||||
'tableId', 'isReadyColumn',
|
||||
'watchedColIdsText', 'webhookId',
|
||||
'enabled', 'status'
|
||||
'eventTypes', 'tableId',
|
||||
'watchedColIdsText', 'isReadyColumn',
|
||||
'url', 'authorization',
|
||||
'webhookId', 'enabled',
|
||||
'status'
|
||||
];
|
||||
|
||||
/**
|
||||
@ -136,7 +143,7 @@ class WebhookExternalTable implements IExternalTable {
|
||||
public name = 'GristHidden_WebhookTable';
|
||||
public initialActions = _prepareWebhookInitialActions(this.name);
|
||||
public saveableFields = [
|
||||
'tableId', 'watchedColIdsText', 'url', 'eventTypes', 'enabled', 'name', 'memo', 'isReadyColumn',
|
||||
'tableId', 'watchedColIdsText', 'url', 'authorization', 'eventTypes', 'enabled', 'name', 'memo', 'isReadyColumn',
|
||||
];
|
||||
public webhooks: ObservableArray<UIWebhookSummary> = observableArray<UIWebhookSummary>([]);
|
||||
|
||||
|
@ -14,6 +14,7 @@ export const Webhook = t.iface([], {
|
||||
|
||||
export const WebhookFields = t.iface([], {
|
||||
"url": "string",
|
||||
"authorization": t.opt("string"),
|
||||
"eventTypes": t.array(t.union(t.lit("add"), t.lit("update"))),
|
||||
"tableId": "string",
|
||||
"watchedColIds": t.opt(t.array("string")),
|
||||
@ -29,6 +30,7 @@ export const WebhookStatus = t.union(t.lit('idle'), t.lit('sending'), t.lit('ret
|
||||
|
||||
export const WebhookSubscribe = t.iface([], {
|
||||
"url": "string",
|
||||
"authorization": t.opt("string"),
|
||||
"eventTypes": t.array(t.union(t.lit("add"), t.lit("update"))),
|
||||
"watchedColIds": t.opt(t.array("string")),
|
||||
"enabled": t.opt("boolean"),
|
||||
@ -45,6 +47,7 @@ export const WebhookSummary = t.iface([], {
|
||||
"id": "string",
|
||||
"fields": t.iface([], {
|
||||
"url": "string",
|
||||
"authorization": t.opt("string"),
|
||||
"unsubscribeKey": "string",
|
||||
"eventTypes": t.array("string"),
|
||||
"isReadyColumn": t.union("string", "null"),
|
||||
@ -64,6 +67,7 @@ export const WebhookUpdate = t.iface([], {
|
||||
|
||||
export const WebhookPatch = t.iface([], {
|
||||
"url": t.opt("string"),
|
||||
"authorization": t.opt("string"),
|
||||
"eventTypes": t.opt(t.array(t.union(t.lit("add"), t.lit("update")))),
|
||||
"tableId": t.opt("string"),
|
||||
"watchedColIds": t.opt(t.array("string")),
|
||||
|
@ -8,6 +8,7 @@ export interface Webhook {
|
||||
|
||||
export interface WebhookFields {
|
||||
url: string;
|
||||
authorization?: string;
|
||||
eventTypes: Array<"add"|"update">;
|
||||
tableId: string;
|
||||
watchedColIds?: string[];
|
||||
@ -26,6 +27,7 @@ export type WebhookStatus = 'idle'|'sending'|'retrying'|'postponed'|'error'|'inv
|
||||
// tableId from the url) but generics are not yet supported by ts-interface-builder
|
||||
export interface WebhookSubscribe {
|
||||
url: string;
|
||||
authorization?: string;
|
||||
eventTypes: Array<"add"|"update">;
|
||||
watchedColIds?: string[];
|
||||
enabled?: boolean;
|
||||
@ -42,6 +44,7 @@ export interface WebhookSummary {
|
||||
id: string;
|
||||
fields: {
|
||||
url: string;
|
||||
authorization?: string;
|
||||
unsubscribeKey: string;
|
||||
eventTypes: string[];
|
||||
isReadyColumn: string|null;
|
||||
@ -64,6 +67,7 @@ export interface WebhookUpdate {
|
||||
// ts-interface-builder
|
||||
export interface WebhookPatch {
|
||||
url?: string;
|
||||
authorization?: string;
|
||||
eventTypes?: Array<"add"|"update">;
|
||||
tableId?: string;
|
||||
watchedColIds?: string[];
|
||||
|
27
app/common/normalizedDateTimeString.ts
Normal file
27
app/common/normalizedDateTimeString.ts
Normal file
@ -0,0 +1,27 @@
|
||||
import moment from 'moment-timezone';
|
||||
|
||||
/**
|
||||
* Output an ISO8601 format datetime string, with timezone.
|
||||
* Any string fed in without timezone is expected to be in UTC.
|
||||
*
|
||||
* When connected to postgres, dates will be extracted as Date objects,
|
||||
* with timezone information. The normalization done here is not
|
||||
* really needed in this case.
|
||||
*
|
||||
* Timestamps in SQLite are stored as UTC, and read as strings
|
||||
* (without timezone information). The normalization here is
|
||||
* pretty important in this case.
|
||||
*/
|
||||
export function normalizedDateTimeString(dateTime: any): string {
|
||||
if (!dateTime) { return dateTime; }
|
||||
if (dateTime instanceof Date) {
|
||||
return moment(dateTime).toISOString();
|
||||
}
|
||||
if (typeof dateTime === 'string' || typeof dateTime === 'number') {
|
||||
// When SQLite returns a string, it will be in UTC.
|
||||
// Need to make sure it actually have timezone info in it
|
||||
// (will not by default).
|
||||
return moment.utc(dateTime).toISOString();
|
||||
}
|
||||
throw new Error(`normalizedDateTimeString cannot handle ${dateTime}`);
|
||||
}
|
@ -9,7 +9,7 @@ import {FullUser} from 'app/common/LoginSessionAPI';
|
||||
import {BasicRole} from 'app/common/roles';
|
||||
import {OrganizationProperties, PermissionDelta} from 'app/common/UserAPI';
|
||||
import {User} from 'app/gen-server/entity/User';
|
||||
import {BillingOptions, HomeDBManager, QueryResult, Scope} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {BillingOptions, HomeDBManager, QueryResult, Scope} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {getAuthorizedUserId, getUserId, getUserProfiles, RequestWithLogin} from 'app/server/lib/Authorizer';
|
||||
import {getSessionUser, linkOrgWithEmail} from 'app/server/lib/BrowserSession';
|
||||
import {expressWrap} from 'app/server/lib/expressWrap';
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { makeId } from 'app/server/lib/idUtils';
|
||||
import { Activation } from 'app/gen-server/entity/Activation';
|
||||
import { HomeDBManager } from 'app/gen-server/lib/HomeDBManager';
|
||||
import { HomeDBManager } from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
|
||||
/**
|
||||
* Manage activations. Not much to do currently, there is at most one
|
||||
|
@ -5,7 +5,7 @@ import {AbortController} from 'node-abort-controller';
|
||||
import { ApiError } from 'app/common/ApiError';
|
||||
import { SHARE_KEY_PREFIX } from 'app/common/gristUrls';
|
||||
import { removeTrailingSlash } from 'app/common/gutil';
|
||||
import { HomeDBManager } from "app/gen-server/lib/HomeDBManager";
|
||||
import { HomeDBManager } from "app/gen-server/lib/homedb/HomeDBManager";
|
||||
import { assertAccess, getOrSetDocAuth, getTransitiveHeaders, RequestWithLogin } from 'app/server/lib/Authorizer';
|
||||
import { IDocWorkerMap } from "app/server/lib/DocWorkerMap";
|
||||
import { expressWrap } from "app/server/lib/expressWrap";
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { ApiError } from 'app/common/ApiError';
|
||||
import { FullUser } from 'app/common/UserAPI';
|
||||
import { Organization } from 'app/gen-server/entity/Organization';
|
||||
import { HomeDBManager, Scope } from 'app/gen-server/lib/HomeDBManager';
|
||||
import { HomeDBManager, Scope } from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import { INotifier } from 'app/server/lib/INotifier';
|
||||
import { scrubUserFromOrg } from 'app/gen-server/lib/scrubUserFromOrg';
|
||||
import { GristLoginSystem } from 'app/server/lib/GristServer';
|
||||
|
@ -1,12 +1,13 @@
|
||||
import { ApiError } from 'app/common/ApiError';
|
||||
import { delay } from 'app/common/delay';
|
||||
import { buildUrlId } from 'app/common/gristUrls';
|
||||
import { normalizedDateTimeString } from 'app/common/normalizedDateTimeString';
|
||||
import { BillingAccount } from 'app/gen-server/entity/BillingAccount';
|
||||
import { Document } from 'app/gen-server/entity/Document';
|
||||
import { Organization } from 'app/gen-server/entity/Organization';
|
||||
import { Product } from 'app/gen-server/entity/Product';
|
||||
import { Workspace } from 'app/gen-server/entity/Workspace';
|
||||
import { HomeDBManager, Scope } from 'app/gen-server/lib/HomeDBManager';
|
||||
import { HomeDBManager, Scope } from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import { fromNow } from 'app/gen-server/sqlUtils';
|
||||
import { getAuthorizedUserId } from 'app/server/lib/Authorizer';
|
||||
import { expressWrap } from 'app/server/lib/expressWrap';
|
||||
@ -16,7 +17,6 @@ import log from 'app/server/lib/log';
|
||||
import { IPermitStore } from 'app/server/lib/Permit';
|
||||
import { optStringParam, stringParam } from 'app/server/lib/requestUtils';
|
||||
import * as express from 'express';
|
||||
import moment from 'moment';
|
||||
import fetch from 'node-fetch';
|
||||
import * as Fetch from 'node-fetch';
|
||||
import { EntityManager } from 'typeorm';
|
||||
@ -416,32 +416,6 @@ export class Housekeeper {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Output an ISO8601 format datetime string, with timezone.
|
||||
* Any string fed in without timezone is expected to be in UTC.
|
||||
*
|
||||
* When connected to postgres, dates will be extracted as Date objects,
|
||||
* with timezone information. The normalization done here is not
|
||||
* really needed in this case.
|
||||
*
|
||||
* Timestamps in SQLite are stored as UTC, and read as strings
|
||||
* (without timezone information). The normalization here is
|
||||
* pretty important in this case.
|
||||
*/
|
||||
function normalizedDateTimeString(dateTime: any): string {
|
||||
if (!dateTime) { return dateTime; }
|
||||
if (dateTime instanceof Date) {
|
||||
return moment(dateTime).toISOString();
|
||||
}
|
||||
if (typeof dateTime === 'string') {
|
||||
// When SQLite returns a string, it will be in UTC.
|
||||
// Need to make sure it actually have timezone info in it
|
||||
// (will not by default).
|
||||
return moment.utc(dateTime).toISOString();
|
||||
}
|
||||
throw new Error(`normalizedDateTimeString cannot handle ${dateTime}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Call callback(item) for each item on the list, sleeping periodically to allow other works to
|
||||
* happen. Any time work takes more than SYNC_WORK_LIMIT_MS, will sleep for SYNC_WORK_BREAK_MS.
|
||||
|
@ -1,7 +1,7 @@
|
||||
import {Document} from 'app/gen-server/entity/Document';
|
||||
import {Organization} from 'app/gen-server/entity/Organization';
|
||||
import {User} from 'app/gen-server/entity/User';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import log from 'app/server/lib/log';
|
||||
|
||||
// Frequency of logging usage information. Not something we need
|
||||
|
@ -1608,7 +1608,7 @@ export class HomeDBManager extends EventEmitter {
|
||||
.where("id = :id AND doc_id = :docId", {id, docId})
|
||||
.execute();
|
||||
if (res.affected !== 1) {
|
||||
throw new ApiError('secret with given id not found', 404);
|
||||
throw new ApiError('secret with given id not found or nothing was updated', 404);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1623,14 +1623,32 @@ export class HomeDBManager extends EventEmitter {
|
||||
|
||||
// Update the webhook url in the webhook's corresponding secret (note: the webhook identifier is
|
||||
// its secret identifier).
|
||||
public async updateWebhookUrl(id: string, docId: string, url: string, outerManager?: EntityManager) {
|
||||
public async updateWebhookUrlAndAuth(
|
||||
props: {
|
||||
id: string,
|
||||
docId: string,
|
||||
url: string | undefined,
|
||||
auth: string | undefined,
|
||||
outerManager?: EntityManager}
|
||||
) {
|
||||
const {id, docId, url, auth, outerManager} = props;
|
||||
return await this._runInTransaction(outerManager, async manager => {
|
||||
if (url === undefined && auth === undefined) {
|
||||
throw new ApiError('None of the Webhook url and auth are defined', 404);
|
||||
}
|
||||
const value = await this.getSecret(id, docId, manager);
|
||||
if (!value) {
|
||||
throw new ApiError('Webhook with given id not found', 404);
|
||||
}
|
||||
const webhookSecret = JSON.parse(value);
|
||||
webhookSecret.url = url;
|
||||
// As we want to patch the webhookSecret object, only set the url and the authorization when they are defined.
|
||||
// When the user wants to empty the value, we are expected to receive empty strings.
|
||||
if (url !== undefined) {
|
||||
webhookSecret.url = url;
|
||||
}
|
||||
if (auth !== undefined) {
|
||||
webhookSecret.authorization = auth;
|
||||
}
|
||||
await this.updateSecret(id, docId, JSON.stringify(webhookSecret), manager);
|
||||
});
|
||||
}
|
@ -17,7 +17,7 @@ import { Group } from 'app/gen-server/entity/Group';
|
||||
import { Login } from 'app/gen-server/entity/Login';
|
||||
import { User } from 'app/gen-server/entity/User';
|
||||
import { appSettings } from 'app/server/lib/AppSettings';
|
||||
import { HomeDBManager, PermissionDeltaAnalysis, Scope } from 'app/gen-server/lib/HomeDBManager';
|
||||
import { HomeDBManager, PermissionDeltaAnalysis, Scope } from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {
|
||||
AvailableUsers, GetUserOptions, NonGuestGroup, QueryResult, Resource, RunInTransaction, UserProfileChange
|
||||
} from 'app/gen-server/lib/homedb/Interfaces';
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { Level, TelemetryContracts } from 'app/common/Telemetry';
|
||||
import { version } from 'app/common/version';
|
||||
import { synchronizeProducts } from 'app/gen-server/entity/Product';
|
||||
import { HomeDBManager } from 'app/gen-server/lib/HomeDBManager';
|
||||
import { HomeDBManager } from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import { applyPatch } from 'app/gen-server/lib/TypeORMPatches';
|
||||
import { getMigrations, getOrCreateConnection, getTypeORMSettings,
|
||||
undoLastMigration, updateDb } from 'app/server/lib/dbUtils';
|
||||
|
@ -69,6 +69,7 @@ import {commonUrls, parseUrlId} from 'app/common/gristUrls';
|
||||
import {byteString, countIf, retryOnce, safeJsonParse, timeoutReached} from 'app/common/gutil';
|
||||
import {InactivityTimer} from 'app/common/InactivityTimer';
|
||||
import {Interval} from 'app/common/Interval';
|
||||
import {normalizedDateTimeString} from 'app/common/normalizedDateTimeString';
|
||||
import {
|
||||
compilePredicateFormula,
|
||||
getPredicateFormulaProperties,
|
||||
@ -2496,6 +2497,24 @@ export class ActiveDoc extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
private _logSnapshotProgress(docSession: OptDocSession) {
|
||||
const snapshotProgress = this._docManager.storageManager.getSnapshotProgress(this.docName);
|
||||
const lastWindowTime = (snapshotProgress.lastWindowStartedAt &&
|
||||
snapshotProgress.lastWindowDoneAt &&
|
||||
snapshotProgress.lastWindowDoneAt > snapshotProgress.lastWindowStartedAt) ?
|
||||
snapshotProgress.lastWindowDoneAt : Date.now();
|
||||
const delay = snapshotProgress.lastWindowStartedAt ?
|
||||
lastWindowTime - snapshotProgress.lastWindowStartedAt : null;
|
||||
log.rawInfo('snapshot status', {
|
||||
...this.getLogMeta(docSession),
|
||||
...snapshotProgress,
|
||||
lastChangeAt: normalizedDateTimeString(snapshotProgress.lastChangeAt),
|
||||
lastWindowStartedAt: normalizedDateTimeString(snapshotProgress.lastWindowStartedAt),
|
||||
lastWindowDoneAt: normalizedDateTimeString(snapshotProgress.lastWindowDoneAt),
|
||||
delay,
|
||||
});
|
||||
}
|
||||
|
||||
private _logDocMetrics(docSession: OptDocSession, triggeredBy: 'docOpen' | 'interval'| 'docClose') {
|
||||
this.logTelemetryEvent(docSession, 'documentUsage', {
|
||||
limited: {
|
||||
@ -2513,6 +2532,9 @@ export class ActiveDoc extends EventEmitter {
|
||||
...this._getCustomWidgetMetrics(),
|
||||
},
|
||||
});
|
||||
// Log progress on making snapshots periodically, to catch anything
|
||||
// excessively slow.
|
||||
this._logSnapshotProgress(docSession);
|
||||
}
|
||||
|
||||
private _getAccessRuleMetrics() {
|
||||
|
@ -11,7 +11,7 @@ import {LocalPlugin} from "app/common/plugin";
|
||||
import {TELEMETRY_TEMPLATE_SIGNUP_COOKIE_NAME} from 'app/common/Telemetry';
|
||||
import {Document as APIDocument, PublicDocWorkerUrlInfo} from 'app/common/UserAPI';
|
||||
import {Document} from "app/gen-server/entity/Document";
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {assertAccess, getTransitiveHeaders, getUserId, isAnonymousUser,
|
||||
RequestWithLogin} from 'app/server/lib/Authorizer';
|
||||
import {DocStatus, IDocWorkerMap} from 'app/server/lib/DocWorkerMap';
|
||||
|
@ -7,7 +7,7 @@ import {canEdit, canView, getWeakestRole, Role} from 'app/common/roles';
|
||||
import {UserOptions} from 'app/common/UserAPI';
|
||||
import {Document} from 'app/gen-server/entity/Document';
|
||||
import {User} from 'app/gen-server/entity/User';
|
||||
import {DocAuthKey, DocAuthResult, HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {DocAuthKey, DocAuthResult, HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {forceSessionChange, getSessionProfiles, getSessionUser, getSignInStatus, linkOrgWithEmail, SessionObj,
|
||||
SessionUserObj, SignInStatus} from 'app/server/lib/BrowserSession';
|
||||
import {RequestWithOrg} from 'app/server/lib/extractOrg';
|
||||
|
@ -8,7 +8,7 @@ import {TelemetryMetadata} from 'app/common/Telemetry';
|
||||
import {ANONYMOUS_USER_EMAIL} from 'app/common/UserAPI';
|
||||
import {normalizeEmail} from 'app/common/emails';
|
||||
import {User} from 'app/gen-server/entity/User';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {ActiveDoc} from 'app/server/lib/ActiveDoc';
|
||||
import {Authorizer} from 'app/server/lib/Authorizer';
|
||||
import {ScopedSession} from 'app/server/lib/BrowserSession';
|
||||
|
@ -30,7 +30,7 @@ import {TelemetryMetadataByLevel} from "app/common/Telemetry";
|
||||
import {WebhookFields} from "app/common/Triggers";
|
||||
import TriggersTI from 'app/common/Triggers-ti';
|
||||
import {DocReplacementOptions, DocState, DocStateComparison, DocStates, NEW_DOCUMENT_CODE} from 'app/common/UserAPI';
|
||||
import {HomeDBManager, makeDocAuthResult} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager, makeDocAuthResult} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import * as Types from "app/plugin/DocApiTypes";
|
||||
import DocApiTypesTI from "app/plugin/DocApiTypes-ti";
|
||||
import {GristObjCode} from "app/plugin/GristData";
|
||||
@ -324,7 +324,7 @@ export class DocWorkerApi {
|
||||
);
|
||||
|
||||
const registerWebhook = async (activeDoc: ActiveDoc, req: RequestWithLogin, webhook: WebhookFields) => {
|
||||
const {fields, url} = await getWebhookSettings(activeDoc, req, null, webhook);
|
||||
const {fields, url, authorization} = await getWebhookSettings(activeDoc, req, null, webhook);
|
||||
if (!fields.eventTypes?.length) {
|
||||
throw new ApiError(`eventTypes must be a non-empty array`, 400);
|
||||
}
|
||||
@ -336,7 +336,7 @@ export class DocWorkerApi {
|
||||
}
|
||||
|
||||
const unsubscribeKey = uuidv4();
|
||||
const webhookSecret: WebHookSecret = {unsubscribeKey, url};
|
||||
const webhookSecret: WebHookSecret = {unsubscribeKey, url, authorization};
|
||||
const secretValue = JSON.stringify(webhookSecret);
|
||||
const webhookId = (await this._dbManager.addSecret(secretValue, activeDoc.docName)).id;
|
||||
|
||||
@ -392,7 +392,7 @@ export class DocWorkerApi {
|
||||
const tablesTable = activeDoc.docData!.getMetaTable("_grist_Tables");
|
||||
const trigger = webhookId ? activeDoc.triggers.getWebhookTriggerRecord(webhookId) : undefined;
|
||||
let currentTableId = trigger ? tablesTable.getValue(trigger.tableRef, 'tableId')! : undefined;
|
||||
const {url, eventTypes, watchedColIds, isReadyColumn, name} = webhook;
|
||||
const {url, authorization, eventTypes, watchedColIds, isReadyColumn, name} = webhook;
|
||||
const tableId = await getRealTableId(req.params.tableId || webhook.tableId, {metaTables});
|
||||
|
||||
const fields: Partial<SchemaTypes['_grist_Triggers']> = {};
|
||||
@ -454,6 +454,7 @@ export class DocWorkerApi {
|
||||
return {
|
||||
fields,
|
||||
url,
|
||||
authorization,
|
||||
};
|
||||
}
|
||||
|
||||
@ -926,16 +927,16 @@ export class DocWorkerApi {
|
||||
|
||||
const docId = activeDoc.docName;
|
||||
const webhookId = req.params.webhookId;
|
||||
const {fields, url} = await getWebhookSettings(activeDoc, req, webhookId, req.body);
|
||||
const {fields, url, authorization} = await getWebhookSettings(activeDoc, req, webhookId, req.body);
|
||||
if (fields.enabled === false) {
|
||||
await activeDoc.triggers.clearSingleWebhookQueue(webhookId);
|
||||
}
|
||||
|
||||
const triggerRowId = activeDoc.triggers.getWebhookTriggerRecord(webhookId).id;
|
||||
|
||||
// update url in homedb
|
||||
if (url) {
|
||||
await this._dbManager.updateWebhookUrl(webhookId, docId, url);
|
||||
// update url and authorization header in homedb
|
||||
if (url || authorization) {
|
||||
await this._dbManager.updateWebhookUrlAndAuth({id: webhookId, docId, url, auth: authorization});
|
||||
activeDoc.triggers.webhookDeleted(webhookId); // clear cache
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ import {Invite} from 'app/common/sharing';
|
||||
import {tbind} from 'app/common/tbind';
|
||||
import {TelemetryMetadataByLevel} from 'app/common/Telemetry';
|
||||
import {NEW_DOCUMENT_CODE} from 'app/common/UserAPI';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {assertAccess, Authorizer, DocAuthorizer, DummyAuthorizer, isSingleUserMode,
|
||||
RequestWithLogin} from 'app/server/lib/Authorizer';
|
||||
import {Client} from 'app/server/lib/Client';
|
||||
|
@ -11,7 +11,7 @@ import * as gutil from 'app/common/gutil';
|
||||
import {Comm} from 'app/server/lib/Comm';
|
||||
import * as docUtils from 'app/server/lib/docUtils';
|
||||
import {GristServer} from 'app/server/lib/GristServer';
|
||||
import {IDocStorageManager} from 'app/server/lib/IDocStorageManager';
|
||||
import {IDocStorageManager, SnapshotProgress} from 'app/server/lib/IDocStorageManager';
|
||||
import {IShell} from 'app/server/lib/IShell';
|
||||
import log from 'app/server/lib/log';
|
||||
import uuidv4 from "uuid/v4";
|
||||
@ -257,6 +257,17 @@ export class DocStorageManager implements IDocStorageManager {
|
||||
throw new Error('removeSnapshots not implemented');
|
||||
}
|
||||
|
||||
public getSnapshotProgress(): SnapshotProgress {
|
||||
return {
|
||||
pushes: 0,
|
||||
skippedPushes: 0,
|
||||
errors: 0,
|
||||
changes: 0,
|
||||
windowsStarted: 0,
|
||||
windowsDone: 0,
|
||||
};
|
||||
}
|
||||
|
||||
public async replace(docName: string, options: any): Promise<void> {
|
||||
throw new Error('replacement not implemented');
|
||||
}
|
||||
|
@ -3,7 +3,7 @@
|
||||
* In hosted environment, this comprises the functionality of the DocWorker instance type.
|
||||
*/
|
||||
import {isAffirmative} from 'app/common/gutil';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {ActionHistoryImpl} from 'app/server/lib/ActionHistoryImpl';
|
||||
import {assertAccess, getOrSetDocAuth, RequestWithLogin} from 'app/server/lib/Authorizer';
|
||||
import {Client} from 'app/server/lib/Client';
|
||||
|
@ -20,7 +20,7 @@ import {Activations} from 'app/gen-server/lib/Activations';
|
||||
import {DocApiForwarder} from 'app/gen-server/lib/DocApiForwarder';
|
||||
import {getDocWorkerMap} from 'app/gen-server/lib/DocWorkerMap';
|
||||
import {Doom} from 'app/gen-server/lib/Doom';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {Housekeeper} from 'app/gen-server/lib/Housekeeper';
|
||||
import {Usage} from 'app/gen-server/lib/Usage';
|
||||
import {AccessTokens, IAccessTokens} from 'app/server/lib/AccessTokens';
|
||||
|
@ -35,7 +35,7 @@ import { EmptyRecordView, InfoView, RecordView } from 'app/common/RecordView';
|
||||
import { canEdit, canView, isValidRole, Role } from 'app/common/roles';
|
||||
import { User } from 'app/common/User';
|
||||
import { FullUser, UserAccessData } from 'app/common/UserAPI';
|
||||
import { HomeDBManager } from 'app/gen-server/lib/HomeDBManager';
|
||||
import { HomeDBManager } from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import { GristObjCode } from 'app/plugin/GristData';
|
||||
import { DocClients } from 'app/server/lib/DocClients';
|
||||
import { getDocSessionAccess, getDocSessionAltSessionId, getDocSessionShare,
|
||||
|
@ -8,7 +8,7 @@ import { Organization } from 'app/gen-server/entity/Organization';
|
||||
import { User } from 'app/gen-server/entity/User';
|
||||
import { Workspace } from 'app/gen-server/entity/Workspace';
|
||||
import { Activations } from 'app/gen-server/lib/Activations';
|
||||
import { HomeDBManager } from 'app/gen-server/lib/HomeDBManager';
|
||||
import { HomeDBManager } from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import { IAccessTokens } from 'app/server/lib/AccessTokens';
|
||||
import { RequestWithLogin } from 'app/server/lib/Authorizer';
|
||||
import { Comm } from 'app/server/lib/Comm';
|
||||
|
@ -1,4 +1,4 @@
|
||||
import {DocumentMetadata, HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {DocumentMetadata, HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import log from 'app/server/lib/log';
|
||||
|
||||
/**
|
||||
|
@ -8,14 +8,14 @@ import {DocumentUsage} from 'app/common/DocUsage';
|
||||
import {buildUrlId, parseUrlId} from 'app/common/gristUrls';
|
||||
import {KeyedOps} from 'app/common/KeyedOps';
|
||||
import {DocReplacementOptions, NEW_DOCUMENT_CODE} from 'app/common/UserAPI';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {checksumFile} from 'app/server/lib/checksumFile';
|
||||
import {DocSnapshotInventory, DocSnapshotPruner} from 'app/server/lib/DocSnapshots';
|
||||
import {IDocWorkerMap} from 'app/server/lib/DocWorkerMap';
|
||||
import {ChecksummedExternalStorage, DELETED_TOKEN, ExternalStorage, Unchanged} from 'app/server/lib/ExternalStorage';
|
||||
import {HostedMetadataManager} from 'app/server/lib/HostedMetadataManager';
|
||||
import {ICreate} from 'app/server/lib/ICreate';
|
||||
import {IDocStorageManager} from 'app/server/lib/IDocStorageManager';
|
||||
import {IDocStorageManager, SnapshotProgress} from 'app/server/lib/IDocStorageManager';
|
||||
import {LogMethods} from "app/server/lib/LogMethods";
|
||||
import {fromCallback} from 'app/server/lib/serverUtils';
|
||||
import * as fse from 'fs-extra';
|
||||
@ -94,6 +94,9 @@ export class HostedStorageManager implements IDocStorageManager {
|
||||
// Time at which document was last changed.
|
||||
private _timestamps = new Map<string, string>();
|
||||
|
||||
// Statistics related to snapshot generation.
|
||||
private _snapshotProgress = new Map<string, SnapshotProgress>();
|
||||
|
||||
// Access external storage.
|
||||
private _ext: ChecksummedExternalStorage;
|
||||
private _extMeta: ChecksummedExternalStorage;
|
||||
@ -223,6 +226,25 @@ export class HostedStorageManager implements IDocStorageManager {
|
||||
return path.basename(altDocName, '.grist');
|
||||
}
|
||||
|
||||
/**
|
||||
* Read some statistics related to generating snapshots.
|
||||
*/
|
||||
public getSnapshotProgress(docName: string): SnapshotProgress {
|
||||
let snapshotProgress = this._snapshotProgress.get(docName);
|
||||
if (!snapshotProgress) {
|
||||
snapshotProgress = {
|
||||
pushes: 0,
|
||||
skippedPushes: 0,
|
||||
errors: 0,
|
||||
changes: 0,
|
||||
windowsStarted: 0,
|
||||
windowsDone: 0,
|
||||
};
|
||||
this._snapshotProgress.set(docName, snapshotProgress);
|
||||
}
|
||||
return snapshotProgress;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares a document for use locally. Here we sync the doc from S3 to the local filesystem.
|
||||
* Returns whether the document is new (needs to be created).
|
||||
@ -476,7 +498,11 @@ export class HostedStorageManager implements IDocStorageManager {
|
||||
* This is called when a document may have been changed, via edits or migrations etc.
|
||||
*/
|
||||
public markAsChanged(docName: string, reason?: string): void {
|
||||
const timestamp = new Date().toISOString();
|
||||
const now = new Date();
|
||||
const snapshotProgress = this.getSnapshotProgress(docName);
|
||||
snapshotProgress.lastChangeAt = now.getTime();
|
||||
snapshotProgress.changes++;
|
||||
const timestamp = now.toISOString();
|
||||
this._timestamps.set(docName, timestamp);
|
||||
try {
|
||||
if (parseUrlId(docName).snapshotId) { return; }
|
||||
@ -486,6 +512,10 @@ export class HostedStorageManager implements IDocStorageManager {
|
||||
}
|
||||
if (this._disableS3) { return; }
|
||||
if (this._closed) { throw new Error("HostedStorageManager.markAsChanged called after closing"); }
|
||||
if (!this._uploads.hasPendingOperation(docName)) {
|
||||
snapshotProgress.lastWindowStartedAt = now.getTime();
|
||||
snapshotProgress.windowsStarted++;
|
||||
}
|
||||
this._uploads.addOperation(docName);
|
||||
} finally {
|
||||
if (reason === 'edit') {
|
||||
@ -729,6 +759,7 @@ export class HostedStorageManager implements IDocStorageManager {
|
||||
private async _pushToS3(docId: string): Promise<void> {
|
||||
let tmpPath: string|null = null;
|
||||
|
||||
const snapshotProgress = this.getSnapshotProgress(docId);
|
||||
try {
|
||||
if (this._prepareFiles.has(docId)) {
|
||||
throw new Error('too soon to consider pushing');
|
||||
@ -748,14 +779,18 @@ export class HostedStorageManager implements IDocStorageManager {
|
||||
await this._inventory.uploadAndAdd(docId, async () => {
|
||||
const prevSnapshotId = this._latestVersions.get(docId) || null;
|
||||
const newSnapshotId = await this._ext.upload(docId, tmpPath as string, metadata);
|
||||
snapshotProgress.lastWindowDoneAt = Date.now();
|
||||
snapshotProgress.windowsDone++;
|
||||
if (newSnapshotId === Unchanged) {
|
||||
// Nothing uploaded because nothing changed
|
||||
snapshotProgress.skippedPushes++;
|
||||
return { prevSnapshotId };
|
||||
}
|
||||
if (!newSnapshotId) {
|
||||
// This is unexpected.
|
||||
throw new Error('No snapshotId allocated after upload');
|
||||
}
|
||||
snapshotProgress.pushes++;
|
||||
const snapshot = {
|
||||
lastModified: t,
|
||||
snapshotId: newSnapshotId,
|
||||
@ -767,6 +802,10 @@ export class HostedStorageManager implements IDocStorageManager {
|
||||
if (changeMade) {
|
||||
await this._onInventoryChange(docId);
|
||||
}
|
||||
} catch (e) {
|
||||
snapshotProgress.errors++;
|
||||
// Snapshot window completion time deliberately not set.
|
||||
throw e;
|
||||
} finally {
|
||||
// Clean up backup.
|
||||
// NOTE: fse.remove succeeds also when the file does not exist.
|
||||
|
@ -1,7 +1,7 @@
|
||||
import {GristDeploymentType} from 'app/common/gristUrls';
|
||||
import {getThemeBackgroundSnippet} from 'app/common/Themes';
|
||||
import {Document} from 'app/gen-server/entity/Document';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {ExternalStorage} from 'app/server/lib/ExternalStorage';
|
||||
import {createDummyTelemetry, GristServer} from 'app/server/lib/GristServer';
|
||||
import {IBilling} from 'app/server/lib/IBilling';
|
||||
|
@ -36,6 +36,8 @@ export interface IDocStorageManager {
|
||||
// Metadata may not be returned in this case.
|
||||
getSnapshots(docName: string, skipMetadataCache?: boolean): Promise<DocSnapshots>;
|
||||
removeSnapshots(docName: string, snapshotIds: string[]): Promise<void>;
|
||||
// Get information about how snapshot generation is going.
|
||||
getSnapshotProgress(docName: string): SnapshotProgress;
|
||||
replace(docName: string, options: DocReplacementOptions): Promise<void>;
|
||||
}
|
||||
|
||||
@ -66,5 +68,51 @@ export class TrivialDocStorageManager implements IDocStorageManager {
|
||||
public async flushDoc() {}
|
||||
public async getSnapshots(): Promise<never> { throw new Error('no'); }
|
||||
public async removeSnapshots(): Promise<never> { throw new Error('no'); }
|
||||
public getSnapshotProgress(): SnapshotProgress { throw new Error('no'); }
|
||||
public async replace(): Promise<never> { throw new Error('no'); }
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Some summary information about how snapshot generation is going.
|
||||
* Any times are in ms.
|
||||
* All information is within the lifetime of a doc worker, not global.
|
||||
*/
|
||||
export interface SnapshotProgress {
|
||||
/** The last time the document was marked as having changed. */
|
||||
lastChangeAt?: number;
|
||||
|
||||
/**
|
||||
* The last time a save window started for the document (checking to see
|
||||
* if it needs to be pushed, and pushing it if so, possibly waiting
|
||||
* quite some time to bundle any other changes).
|
||||
*/
|
||||
lastWindowStartedAt?: number;
|
||||
|
||||
/**
|
||||
* The last time the document was either pushed or determined to not
|
||||
* actually need to be pushed, after having been marked as changed.
|
||||
*/
|
||||
lastWindowDoneAt?: number;
|
||||
|
||||
/** Number of times the document was pushed. */
|
||||
pushes: number;
|
||||
|
||||
/** Number of times the document was not pushed because no change found. */
|
||||
skippedPushes: number;
|
||||
|
||||
/** Number of times there was an error trying to push. */
|
||||
errors: number;
|
||||
|
||||
/**
|
||||
* Number of times the document was marked as changed.
|
||||
* Will generally be a lot greater than saves.
|
||||
*/
|
||||
changes: number;
|
||||
|
||||
/** Number of times a save window was started. */
|
||||
windowsStarted: number;
|
||||
|
||||
/** Number of times a save window was completed. */
|
||||
windowsDone: number;
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
import {ApiError} from 'app/common/ApiError';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {appSettings} from 'app/server/lib/AppSettings';
|
||||
import {getUser, RequestWithLogin} from 'app/server/lib/Authorizer';
|
||||
import {User} from 'app/gen-server/entity/User';
|
||||
|
@ -17,7 +17,7 @@ import {
|
||||
import {TelemetryPrefsWithSources} from 'app/common/InstallAPI';
|
||||
import {Activation} from 'app/gen-server/entity/Activation';
|
||||
import {Activations} from 'app/gen-server/lib/Activations';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {RequestWithLogin} from 'app/server/lib/Authorizer';
|
||||
import {getDocSessionUser, OptDocSession} from 'app/server/lib/DocSession';
|
||||
import {expressWrap} from 'app/server/lib/expressWrap';
|
||||
|
@ -1,4 +1,4 @@
|
||||
import {SUPPORT_EMAIL} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {SUPPORT_EMAIL} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {GristLoginSystem, GristServer} from 'app/server/lib/GristServer';
|
||||
import {Request} from 'express';
|
||||
|
||||
|
@ -72,6 +72,7 @@ type Trigger = MetaRowRecord<"_grist_Triggers">;
|
||||
export interface WebHookSecret {
|
||||
url: string;
|
||||
unsubscribeKey: string;
|
||||
authorization?: string;
|
||||
}
|
||||
|
||||
// Work to do after fetching values from the document
|
||||
@ -259,6 +260,7 @@ export class DocTriggers {
|
||||
const getTableId = docData.getMetaTable("_grist_Tables").getRowPropFunc("tableId");
|
||||
const getColId = docData.getMetaTable("_grist_Tables_column").getRowPropFunc("colId");
|
||||
const getUrl = async (id: string) => (await this._getWebHook(id))?.url ?? '';
|
||||
const getAuthorization = async (id: string) => (await this._getWebHook(id))?.authorization ?? '';
|
||||
const getUnsubscribeKey = async (id: string) => (await this._getWebHook(id))?.unsubscribeKey ?? '';
|
||||
const resultTable: WebhookSummary[] = [];
|
||||
|
||||
@ -271,6 +273,7 @@ export class DocTriggers {
|
||||
for (const act of webhookActions) {
|
||||
// Url, probably should be hidden for non-owners (but currently this API is owners only).
|
||||
const url = await getUrl(act.id);
|
||||
const authorization = await getAuthorization(act.id);
|
||||
// Same story, should be hidden.
|
||||
const unsubscribeKey = await getUnsubscribeKey(act.id);
|
||||
if (!url || !unsubscribeKey) {
|
||||
@ -285,6 +288,7 @@ export class DocTriggers {
|
||||
fields: {
|
||||
// Url, probably should be hidden for non-owners (but currently this API is owners only).
|
||||
url,
|
||||
authorization,
|
||||
unsubscribeKey,
|
||||
// Other fields used to register this webhook.
|
||||
eventTypes: decodeObject(t.eventTypes) as string[],
|
||||
@ -683,6 +687,7 @@ export class DocTriggers {
|
||||
const batch = _.takeWhile(this._webHookEventQueue.slice(0, 100), {id});
|
||||
const body = JSON.stringify(batch.map(e => e.payload));
|
||||
const url = await this._getWebHookUrl(id);
|
||||
const authorization = (await this._getWebHook(id))?.authorization || "";
|
||||
if (this._loopAbort.signal.aborted) {
|
||||
continue;
|
||||
}
|
||||
@ -698,7 +703,8 @@ export class DocTriggers {
|
||||
this._activeDoc.logTelemetryEvent(null, 'sendingWebhooks', {
|
||||
limited: {numEvents: meta.numEvents},
|
||||
});
|
||||
success = await this._sendWebhookWithRetries(id, url, body, batch.length, this._loopAbort.signal);
|
||||
success = await this._sendWebhookWithRetries(
|
||||
id, url, authorization, body, batch.length, this._loopAbort.signal);
|
||||
if (this._loopAbort.signal.aborted) {
|
||||
continue;
|
||||
}
|
||||
@ -770,7 +776,8 @@ export class DocTriggers {
|
||||
return this._drainingQueue ? Math.min(5, TRIGGER_MAX_ATTEMPTS) : TRIGGER_MAX_ATTEMPTS;
|
||||
}
|
||||
|
||||
private async _sendWebhookWithRetries(id: string, url: string, body: string, size: number, signal: AbortSignal) {
|
||||
private async _sendWebhookWithRetries(
|
||||
id: string, url: string, authorization: string, body: string, size: number, signal: AbortSignal) {
|
||||
const maxWait = 64;
|
||||
let wait = 1;
|
||||
for (let attempt = 0; attempt < this._maxWebhookAttempts; attempt++) {
|
||||
@ -786,6 +793,7 @@ export class DocTriggers {
|
||||
body,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(authorization ? {'Authorization': authorization} : {}),
|
||||
},
|
||||
signal,
|
||||
agent: proxyAgent(new URL(url)),
|
||||
|
@ -3,7 +3,7 @@ import { mapGetOrSet, MapWithTTL } from 'app/common/AsyncCreate';
|
||||
import { extractOrgParts, getHostType, getKnownOrg } from 'app/common/gristUrls';
|
||||
import { isAffirmative } from 'app/common/gutil';
|
||||
import { Organization } from 'app/gen-server/entity/Organization';
|
||||
import { HomeDBManager } from 'app/gen-server/lib/HomeDBManager';
|
||||
import { HomeDBManager } from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import { GristServer } from 'app/server/lib/GristServer';
|
||||
import { getOriginUrl } from 'app/server/lib/requestUtils';
|
||||
import { NextFunction, Request, RequestHandler, Response } from 'express';
|
||||
|
@ -1,7 +1,7 @@
|
||||
import {ApiError} from 'app/common/ApiError';
|
||||
import { DEFAULT_HOME_SUBDOMAIN, isOrgInPathOnly, parseSubdomain, sanitizePathTail } from 'app/common/gristUrls';
|
||||
import * as gutil from 'app/common/gutil';
|
||||
import {DocScope, QueryResult, Scope} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {DocScope, QueryResult, Scope} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {getUserId, RequestWithLogin} from 'app/server/lib/Authorizer';
|
||||
import {RequestWithOrg} from 'app/server/lib/extractOrg';
|
||||
import {RequestWithGrist} from 'app/server/lib/GristServer';
|
||||
|
@ -12,7 +12,7 @@ import {isAffirmative} from 'app/common/gutil';
|
||||
import {getTagManagerSnippet} from 'app/common/tagManager';
|
||||
import {Document} from 'app/common/UserAPI';
|
||||
import {AttachedCustomWidgets, IAttachedCustomWidget} from "app/common/widgetTypes";
|
||||
import {SUPPORT_EMAIL} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {SUPPORT_EMAIL} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {isAnonymousUser, isSingleUserMode, RequestWithLogin} from 'app/server/lib/Authorizer';
|
||||
import {RequestWithOrg} from 'app/server/lib/extractOrg';
|
||||
import {GristServer} from 'app/server/lib/GristServer';
|
||||
|
@ -1,7 +1,6 @@
|
||||
const util = require('util');
|
||||
const childProcess = require('child_process');
|
||||
const fs = require('fs/promises');
|
||||
const {existsSync} = require('fs');
|
||||
|
||||
const exec = util.promisify(childProcess.exec);
|
||||
|
||||
@ -17,66 +16,81 @@ const getBranchName = () => {
|
||||
};
|
||||
|
||||
async function main() {
|
||||
if (process.argv[2] === 'deploy') {
|
||||
const appRoot = process.argv[3] || ".";
|
||||
if (!existsSync(`${appRoot}/Dockerfile`)) {
|
||||
console.log(`Dockerfile not found in appRoot of ${appRoot}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const name = getAppName();
|
||||
const volName = getVolumeName();
|
||||
if (!await appExists(name)) {
|
||||
await appCreate(name);
|
||||
await volCreate(name, volName);
|
||||
} else {
|
||||
// Check if volume exists, and create it if not. This is needed because there was an API
|
||||
// change in flyctl (mandatory -y flag) and some apps were created without a volume.
|
||||
if (!(await volList(name)).length) {
|
||||
switch (process.argv[2]) {
|
||||
case "deploy": {
|
||||
const name = getAppName();
|
||||
const volName = getVolumeName();
|
||||
if (!await appExists(name)) {
|
||||
await appCreate(name);
|
||||
await volCreate(name, volName);
|
||||
} else {
|
||||
// Check if volume exists, and create it if not. This is needed because there was an API
|
||||
// change in flyctl (mandatory -y flag) and some apps were created without a volume.
|
||||
if (!(await volList(name)).length) {
|
||||
await volCreate(name, volName);
|
||||
}
|
||||
}
|
||||
await prepConfig(name, volName);
|
||||
await appDeploy(name);
|
||||
break;
|
||||
}
|
||||
await prepConfig(name, appRoot, volName);
|
||||
await appDeploy(name, appRoot);
|
||||
} else if (process.argv[2] === 'destroy') {
|
||||
const name = getAppName();
|
||||
if (await appExists(name)) {
|
||||
await appDestroy(name);
|
||||
case "destroy": {
|
||||
const name = getAppName();
|
||||
if (await appExists(name)) {
|
||||
await appDestroy(name);
|
||||
}
|
||||
break;
|
||||
}
|
||||
} else if (process.argv[2] === 'clean') {
|
||||
const staleApps = await findStaleApps();
|
||||
for (const appName of staleApps) {
|
||||
await appDestroy(appName);
|
||||
case "clean": {
|
||||
const staleApps = await findStaleApps();
|
||||
for (const appName of staleApps) {
|
||||
await appDestroy(appName);
|
||||
}
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
console.log(`Usage:
|
||||
deploy [appRoot]:
|
||||
create (if needed) and deploy fly app grist-{BRANCH_NAME}.
|
||||
appRoot may specify the working directory that contains the Dockerfile to build.
|
||||
default: {
|
||||
console.log(`Usage:
|
||||
deploy: create (if needed) and deploy fly app grist-{BRANCH_NAME}.
|
||||
destroy: destroy fly app grist-{BRANCH_NAME}
|
||||
clean: destroy all grist-* fly apps whose time has come
|
||||
(according to FLY_DEPLOY_EXPIRATION env var set at deploy time)
|
||||
|
||||
DRYRUN=1 in environment will show what would be done
|
||||
`);
|
||||
process.exit(1);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getDockerTag(name) {
|
||||
return `registry.fly.io/${name}:latest`;
|
||||
}
|
||||
|
||||
const appExists = (name) => runFetch(`flyctl status -a ${name}`).then(() => true).catch(() => false);
|
||||
const appCreate = (name) => runAction(`flyctl launch --auto-confirm --name ${name} -r ewr -o ${org} --vm-memory 1024`);
|
||||
// We do not deploy at the create stage, since the Docker image isn't ready yet.
|
||||
// Assigning --image prevents flyctl from making inferences based on the codebase and provisioning unnecessary postgres/redis instances.
|
||||
const appCreate = (name) => runAction(`flyctl launch --no-deploy --auto-confirm --image ${getDockerTag(name)} --name ${name} -r ewr -o ${org}`);
|
||||
const volCreate = (name, vol) => runAction(`flyctl volumes create ${vol} -s 1 -r ewr -y -a ${name}`);
|
||||
const volList = (name) => runFetch(`flyctl volumes list -a ${name} -j`).then(({stdout}) => JSON.parse(stdout));
|
||||
const appDeploy = (name, appRoot) => runAction(`flyctl deploy ${appRoot} --remote-only --region=ewr --vm-memory 1024`,
|
||||
{shell: true, stdio: 'inherit'});
|
||||
const appDeploy = async (name) => {
|
||||
try {
|
||||
await runAction("flyctl auth docker")
|
||||
await runAction(`docker image tag grist-core:preview ${getDockerTag(name)}`);
|
||||
await runAction(`docker push ${getDockerTag(name)}`);
|
||||
await runAction(`flyctl deploy --app ${name} --image ${getDockerTag(name)}`);
|
||||
} catch (e) {
|
||||
console.log(`Error occurred when deploying: ${e}`);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
async function appDestroy(name) {
|
||||
await runAction(`flyctl apps destroy ${name} -y`);
|
||||
}
|
||||
|
||||
async function prepConfig(name, appRoot, volName) {
|
||||
const configPath = `${appRoot}/fly.toml`;
|
||||
const configTemplatePath = `${appRoot}/buildtools/fly-template.toml`;
|
||||
async function prepConfig(name, volName) {
|
||||
const configPath = "./fly.toml";
|
||||
const configTemplatePath = "./buildtools/fly-template.toml";
|
||||
const template = await fs.readFile(configTemplatePath, {encoding: 'utf8'});
|
||||
|
||||
// Calculate the time when we can destroy the app, used by findStaleApps.
|
||||
|
@ -48,3 +48,8 @@ processes = []
|
||||
[mounts]
|
||||
source="{VOLUME_NAME}"
|
||||
destination="/persist"
|
||||
|
||||
[[vm]]
|
||||
memory = '1gb'
|
||||
cpu_kind = 'shared'
|
||||
cpus = 1
|
||||
|
@ -1241,7 +1241,8 @@
|
||||
"URL": "URL",
|
||||
"Webhook Id": "Webhook Id",
|
||||
"Table": "Table",
|
||||
"Filter for changes in these columns (semicolon-separated ids)": "Filter for changes in these columns (semicolon-separated ids)"
|
||||
"Filter for changes in these columns (semicolon-separated ids)": "Filter for changes in these columns (semicolon-separated ids)",
|
||||
"Header Authorization": "Header Authorization"
|
||||
},
|
||||
"FormulaAssistant": {
|
||||
"Ask the bot.": "Ask the bot.",
|
||||
|
@ -6,7 +6,7 @@
|
||||
|
||||
import {commonUrls} from 'app/common/gristUrls';
|
||||
import {isAffirmative} from 'app/common/gutil';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {fixSiteProducts} from 'app/gen-server/lib/Housekeeper';
|
||||
|
||||
const debugging = isAffirmative(process.env.DEBUG) || isAffirmative(process.env.VERBOSE);
|
||||
|
@ -8,7 +8,7 @@ import {createEmptyOrgUsageSummary, OrgUsageSummary} from 'app/common/DocUsage';
|
||||
import {Document, Workspace} from 'app/common/UserAPI';
|
||||
import {Organization} from 'app/gen-server/entity/Organization';
|
||||
import {Product} from 'app/gen-server/entity/Product';
|
||||
import {HomeDBManager, UserChange} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager, UserChange} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {TestServer} from 'test/gen-server/apiUtils';
|
||||
import {TEAM_FREE_PLAN} from 'app/common/Features';
|
||||
|
||||
|
@ -4,7 +4,7 @@ import {Deps} from 'app/gen-server/ApiServer';
|
||||
import {Organization} from 'app/gen-server/entity/Organization';
|
||||
import {Product} from 'app/gen-server/entity/Product';
|
||||
import {User} from 'app/gen-server/entity/User';
|
||||
import {HomeDBManager, UserChange} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager, UserChange} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {SendGridConfig, SendGridMail} from 'app/gen-server/lib/NotifierTypes';
|
||||
import axios, {AxiosResponse} from 'axios';
|
||||
import {delay} from 'bluebird';
|
||||
|
@ -4,7 +4,7 @@ import * as chai from 'chai';
|
||||
import {configForUser} from 'test/gen-server/testUtils';
|
||||
import * as testUtils from 'test/server/testUtils';
|
||||
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
|
||||
import {TestServer} from 'test/gen-server/apiUtils';
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
import {delay} from 'app/common/delay';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {FlexServer} from 'app/server/lib/FlexServer';
|
||||
import log from 'app/server/lib/log';
|
||||
import {main as mergedServerMain} from 'app/server/mergedServerMain';
|
||||
|
@ -11,7 +11,7 @@ import {User} from 'app/gen-server/entity/User';
|
||||
import {Workspace} from 'app/gen-server/entity/Workspace';
|
||||
import {SessionUserObj} from 'app/server/lib/BrowserSession';
|
||||
import {getDocWorkerMap} from 'app/gen-server/lib/DocWorkerMap';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import * as docUtils from 'app/server/lib/docUtils';
|
||||
import {FlexServer, FlexServerOptions} from 'app/server/lib/FlexServer';
|
||||
import {main as mergedServerMain, ServerType} from 'app/server/mergedServerMain';
|
||||
|
@ -1,7 +1,7 @@
|
||||
import {QueryRunner} from "typeorm";
|
||||
import * as roles from "app/common/roles";
|
||||
import {Organization} from 'app/gen-server/entity/Organization';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {Permissions} from 'app/gen-server/lib/Permissions';
|
||||
import {assert} from 'chai';
|
||||
import {addSeedData, createInitialDb, removeConnection, setUpDB} from 'test/gen-server/seed';
|
||||
|
@ -40,7 +40,7 @@ import {Organization} from "app/gen-server/entity/Organization";
|
||||
import {Product, PRODUCTS, synchronizeProducts, teamFreeFeatures} from "app/gen-server/entity/Product";
|
||||
import {User} from "app/gen-server/entity/User";
|
||||
import {Workspace} from "app/gen-server/entity/Workspace";
|
||||
import {EXAMPLE_WORKSPACE_NAME} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {EXAMPLE_WORKSPACE_NAME} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {Permissions} from 'app/gen-server/lib/Permissions';
|
||||
import {getOrCreateConnection, runMigrations, undoLastMigration, updateDb} from 'app/server/lib/dbUtils';
|
||||
import {FlexServer} from 'app/server/lib/FlexServer';
|
||||
|
@ -2,7 +2,7 @@ import {GristLoadConfig} from 'app/common/gristUrls';
|
||||
import {BillingAccount} from 'app/gen-server/entity/BillingAccount';
|
||||
import {Organization} from 'app/gen-server/entity/Organization';
|
||||
import {Product} from 'app/gen-server/entity/Product';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {INotifier} from 'app/server/lib/INotifier';
|
||||
import {AxiosRequestConfig} from "axios";
|
||||
import {delay} from 'bluebird';
|
||||
|
@ -52,10 +52,11 @@ describe('WebhookPage', function () {
|
||||
'Name',
|
||||
'Memo',
|
||||
'Event Types',
|
||||
'URL',
|
||||
'Table',
|
||||
'Ready Column',
|
||||
'Filter for changes in these columns (semicolon-separated ids)',
|
||||
'Ready Column',
|
||||
'URL',
|
||||
'Header Authorization',
|
||||
'Webhook Id',
|
||||
'Enabled',
|
||||
'Status',
|
||||
@ -81,7 +82,7 @@ describe('WebhookPage', function () {
|
||||
await gu.waitToPass(async () => {
|
||||
assert.equal(await getField(1, 'Webhook Id'), id);
|
||||
});
|
||||
// Now other fields like name, memo and watchColIds are persisted.
|
||||
// Now other fields like name, memo, watchColIds, and Header Auth are persisted.
|
||||
await setField(1, 'Name', 'Test Webhook');
|
||||
await setField(1, 'Memo', 'Test Memo');
|
||||
await setField(1, 'Filter for changes in these columns (semicolon-separated ids)', 'A; B');
|
||||
@ -115,6 +116,27 @@ describe('WebhookPage', function () {
|
||||
assert.lengthOf((await docApi.getRows('Table2')).A, 0);
|
||||
});
|
||||
|
||||
it('can create webhook with persistant header authorization', async function () {
|
||||
// The webhook won't work because the header auth doesn't match the api key of the current test user.
|
||||
await openWebhookPage();
|
||||
await setField(1, 'Event Types', 'add\nupdate\n');
|
||||
await setField(1, 'URL', `http://${host}/api/docs/${doc.id}/tables/Table2/records?flat=1`);
|
||||
await setField(1, 'Table', 'Table1');
|
||||
await gu.waitForServer();
|
||||
await driver.navigate().refresh();
|
||||
await waitForWebhookPage();
|
||||
await setField(1, 'Header Authorization', 'Bearer 1234');
|
||||
await gu.waitForServer();
|
||||
await driver.navigate().refresh();
|
||||
await waitForWebhookPage();
|
||||
await gu.waitToPass(async () => {
|
||||
assert.equal(await getField(1, 'Header Authorization'), 'Bearer 1234');
|
||||
});
|
||||
await gu.getDetailCell({col:'Header Authorization', rowNum: 1}).click();
|
||||
await gu.enterCell(Key.DELETE, Key.ENTER);
|
||||
await gu.waitForServer();
|
||||
});
|
||||
|
||||
it('can create two webhooks', async function () {
|
||||
await openWebhookPage();
|
||||
await setField(1, 'Event Types', 'add\nupdate\n');
|
||||
|
@ -13,7 +13,7 @@ import {normalizeEmail} from 'app/common/emails';
|
||||
import {UserProfile} from 'app/common/LoginSessionAPI';
|
||||
import {BehavioralPrompt, UserPrefs, WelcomePopup} from 'app/common/Prefs';
|
||||
import {DocWorkerAPI, UserAPI, UserAPIImpl} from 'app/common/UserAPI';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {TestingHooksClient} from 'app/server/lib/TestingHooks';
|
||||
import EventEmitter = require('events');
|
||||
|
||||
|
@ -11,7 +11,7 @@
|
||||
* into a file whose path is printed when server starts.
|
||||
*/
|
||||
import {encodeUrl, IGristUrlState, parseSubdomain} from 'app/common/gristUrls';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import log from 'app/server/lib/log';
|
||||
import {getAppRoot} from 'app/server/lib/places';
|
||||
import {makeGristConfig} from 'app/server/lib/sendAppPage';
|
||||
|
@ -1,5 +1,5 @@
|
||||
import {parseUrlId} from 'app/common/gristUrls';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {DocManager} from 'app/server/lib/DocManager';
|
||||
import {FlexServer} from 'app/server/lib/FlexServer';
|
||||
import axios from 'axios';
|
||||
|
@ -4625,6 +4625,7 @@ function testDocApi() {
|
||||
id: first.webhookId,
|
||||
fields: {
|
||||
url: `${serving.url}/200`,
|
||||
authorization: '',
|
||||
unsubscribeKey: first.unsubscribeKey,
|
||||
eventTypes: ['add', 'update'],
|
||||
enabled: true,
|
||||
@ -4643,6 +4644,7 @@ function testDocApi() {
|
||||
id: second.webhookId,
|
||||
fields: {
|
||||
url: `${serving.url}/404`,
|
||||
authorization: '',
|
||||
unsubscribeKey: second.unsubscribeKey,
|
||||
eventTypes: ['add', 'update'],
|
||||
enabled: true,
|
||||
@ -5010,6 +5012,7 @@ function testDocApi() {
|
||||
|
||||
const expectedFields = {
|
||||
url: `${serving.url}/foo`,
|
||||
authorization: '',
|
||||
eventTypes: ['add'],
|
||||
isReadyColumn: 'B',
|
||||
tableId: 'Table1',
|
||||
@ -5079,6 +5082,8 @@ function testDocApi() {
|
||||
|
||||
await check({isReadyColumn: null}, 200);
|
||||
await check({isReadyColumn: "bar"}, 404, `Column not found "bar"`);
|
||||
|
||||
await check({authorization: 'Bearer fake-token'}, 200);
|
||||
});
|
||||
|
||||
});
|
||||
|
@ -2,7 +2,7 @@ import {ErrorOrValue, freezeError, mapGetOrSet, MapWithTTL} from 'app/common/Asy
|
||||
import {ObjMetadata, ObjSnapshot, ObjSnapshotWithMetadata} from 'app/common/DocSnapshot';
|
||||
import {SCHEMA_VERSION} from 'app/common/schema';
|
||||
import {DocWorkerMap} from 'app/gen-server/lib/DocWorkerMap';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
import {ActiveDoc} from 'app/server/lib/ActiveDoc';
|
||||
import {create} from 'app/server/lib/create';
|
||||
import {DocManager} from 'app/server/lib/DocManager';
|
||||
|
@ -1,4 +1,4 @@
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/homedb/HomeDBManager';
|
||||
|
||||
export async function getDatabase(typeormDb?: string): Promise<HomeDBManager> {
|
||||
const origTypeormDB = process.env.TYPEORM_DATABASE;
|
||||
|
Loading…
Reference in New Issue
Block a user