(core) updates from grist-core

pull/895/head
Paul Fitzpatrick 2 months ago
commit 17857ec1f0

@ -1,8 +1,8 @@
# Welcome to the contribution guide for Grist!
You are eager to contribute to Grist? That's awesome! See below some contributions you can make:
- [translate](/documentation/translate.md)
- [write tutorials and user documentation](https://github.com/gristlabs/grist-help)
- [translate](/documentation/translations.md)
- [write tutorials and user documentation](https://github.com/gristlabs/grist-help?tab=readme-ov-file#grist-help-center)
- [develop](/documentation/develop.md)
- [report issues or suggest enhancement](https://github.com/gristlabs/grist-core/issues/new)

@ -130,6 +130,23 @@ You can find a lot more about configuring Grist, setting up authentication,
and running it on a public server in our
[Self-Managed Grist](https://support.getgrist.com/self-managed/) handbook.
## Activating the boot page for diagnosing problems
You can turn on a special "boot page" to inspect the status of your
installation. Just visit `/boot` on your Grist server for instructions.
Since it is useful for the boot page to be available even when authentication
isn't set up, you can give it a special access key by setting `GRIST_BOOT_KEY`.
```
docker run -p 8484:8484 -e GRIST_BOOT_KEY=secret -it gristlabs/grist
```
The boot page should then be available at `/boot/<GRIST_BOOT_KEY>`. We are
starting to collect probes for common problems there. If you hit a problem that
isn't covered, it would be great if you could add a probe for it in
[BootProbes](https://github.com/gristlabs/grist-core/blob/main/app/server/lib/BootProbes.ts).
Or file an issue so someone else can add it, we're just getting start with this.
## Building from source
To build Grist from source, follow these steps:
@ -242,6 +259,7 @@ GRIST_ADAPT_DOMAIN | set to "true" to support multiple base domains (careful, h
GRIST_ALLOWED_HOSTS | comma-separated list of permitted domains origin for requests (e.g. my.site,another.com)
GRIST_APP_ROOT | directory containing Grist sandbox and assets (specifically the sandbox and static subdirectories).
GRIST_BACKUP_DELAY_SECS | wait this long after a doc change before making a backup
GRIST_BOOT_KEY | if set, offer diagnostics at /boot/GRIST_BOOT_KEY
GRIST_DATA_DIR | directory in which to store document caches.
GRIST_DEFAULT_EMAIL | if set, login as this user if no other credentials presented
GRIST_DEFAULT_PRODUCT | if set, this controls enabled features and limits of new sites. See names of PRODUCTS in Product.ts.
@ -276,7 +294,8 @@ GRIST_FORCE_LOGIN | Much like GRIST_ANON_PLAYGROUND but don't support anonymo
GRIST_SINGLE_ORG | set to an org "domain" to pin client to that org
GRIST_TEMPLATE_ORG | set to an org "domain" to show public docs from that org
GRIST_HELP_CENTER | set the help center link ref
FREE_COACHING_CALL_URL | set the link to the human help (example: email or meeting scheduling tool)
FREE_COACHING_CALL_URL | set the link to the human help (example: email adress or meeting scheduling tool)
GRIST_CONTACT_SUPPORT_URL | set the link to contact support on error pages (example: email adress or online form)
GRIST_SUPPORT_ANON | if set to 'true', show UI for anonymous access (not shown by default)
GRIST_SUPPORT_EMAIL | if set, give a user with the specified email support powers. The main extra power is the ability to share sites, workspaces, and docs with all users in a listed way.
GRIST_TELEMETRY_LEVEL | the telemetry level. Can be set to: `off` (default), `limited`, or `full`.
@ -290,6 +309,7 @@ COOKIE_MAX_AGE | session cookie max age, defaults to 90 days; can be set to
HOME_PORT | port number to listen on for REST API server; if set to "share", add API endpoints to regular grist port.
PORT | port number to listen on for Grist server
REDIS_URL | optional redis server for browser sessions and db query caching
GRIST_SKIP_REDIS_CHECKSUM_MISMATCH | Experimental. If set, only warn if the checksum in Redis differs with the one in your S3 backend storage. You may turn it on if your backend storage implements the [read-after-write consistency](https://aws.amazon.com/fr/blogs/aws/amazon-s3-update-strong-read-after-write-consistency/). Defaults to false.
GRIST_SNAPSHOT_TIME_CAP | optional. Define the caps for tracking buckets. Usage: {"hour": 25, "day": 32, "isoWeek": 12, "month": 96, "year": 1000}
GRIST_SNAPSHOT_KEEP | optional. Number of recent snapshots to retain unconditionally for a document, regardless of when they were made
GRIST_PROMCLIENT_PORT | optional. If set, serve the Prometheus metrics on the specified port number. ⚠️ Be sure to use a port which is not publicly exposed ⚠️.

@ -0,0 +1,259 @@
import { AppModel } from 'app/client/models/AppModel';
import { createAppPage } from 'app/client/ui/createAppPage';
import { pagePanels } from 'app/client/ui/PagePanels';
import { BootProbeInfo, BootProbeResult } from 'app/common/BootProbe';
import { removeTrailingSlash } from 'app/common/gutil';
import { getGristConfig } from 'app/common/urlUtils';
import { Disposable, dom, Observable, styled, UseCBOwner } from 'grainjs';
const cssBody = styled('div', `
padding: 20px;
overflow: auto;
`);
const cssHeader = styled('div', `
padding: 20px;
`);
const cssResult = styled('div', `
max-width: 500px;
`);
/**
*
* A "boot" page for inspecting the state of the Grist installation.
*
* TODO: deferring using any localization machinery so as not
* to have to worry about its failure modes yet, but it should be
* fine as long as assets served locally are used.
*
*/
export class Boot extends Disposable {
// The back end will offer a set of probes (diagnostics) we
// can use. Probes have unique IDs.
public probes: Observable<BootProbeInfo[]>;
// Keep track of probe results we have received, by probe ID.
public results: Map<string, Observable<BootProbeResult>>;
// Keep track of probe requests we are making, by probe ID.
public requests: Map<string, BootProbe>;
constructor(_appModel: AppModel) {
super();
// Setting title in constructor seems to be how we are doing this,
// based on other similar pages.
document.title = 'Booting Grist';
this.probes = Observable.create(this, []);
this.results = new Map();
this.requests = new Map();
}
/**
* Set up the page. Uses the generic Grist layout with an empty
* side panel, just for convenience. Could be made a lot prettier.
*/
public buildDom() {
const config = getGristConfig();
const errMessage = config.errMessage;
if (!errMessage) {
// Probe tool URLs are relative to the current URL. Don't trust configuration,
// because it may be buggy if the user is here looking at the boot page
// to figure out some problem.
const url = new URL(removeTrailingSlash(document.location.href));
url.pathname += '/probe';
fetch(url.href).then(async resp => {
const _probes = await resp.json();
this.probes.set(_probes.probes);
}).catch(e => reportError(e));
}
const rootNode = dom('div',
dom.domComputed(
use => {
return pagePanels({
leftPanel: {
panelWidth: Observable.create(this, 240),
panelOpen: Observable.create(this, false),
hideOpener: true,
header: null,
content: null,
},
headerMain: cssHeader(dom('h1', 'Grist Boot')),
contentMain: this.buildBody(use, {errMessage}),
});
}
),
);
return rootNode;
}
/**
* The body of the page is very simple right now, basically a
* placeholder. Make a section for each probe, and kick them off in
* parallel, showing results as they come in.
*/
public buildBody(use: UseCBOwner, options: {errMessage?: string}) {
if (options.errMessage) {
return cssBody(cssResult(this.buildError()));
}
return cssBody([
...use(this.probes).map(probe => {
const {id} = probe;
let result = this.results.get(id);
if (!result) {
result = Observable.create(this, {});
this.results.set(id, result);
}
let request = this.requests.get(id);
if (!request) {
request = new BootProbe(id, this);
this.requests.set(id, request);
}
request.start();
return cssResult(
this.buildResult(probe, use(result), probeDetails[id]));
}),
]);
}
/**
* This is used when there is an attempt to access the boot page
* but something isn't right - either the page isn't enabled, or
* the key in the URL is wrong. Give the user some information about
* how to set things up.
*/
public buildError() {
return dom(
'div',
dom('p',
'A diagnostics page can be made available at:',
dom('blockquote', '/boot/GRIST_BOOT_KEY'),
'GRIST_BOOT_KEY is an environment variable ',
' set before Grist starts. It should only',
' contain characters that are valid in a URL.',
' It should be a secret, since no authentication is needed',
' to visit the diagnostics page.'),
dom('p',
'You are seeing this page because either the key is not set,',
' or it is not in the URL.'),
);
}
/**
* An ugly rendering of information returned by the probe.
*/
public buildResult(info: BootProbeInfo, result: BootProbeResult,
details: ProbeDetails|undefined) {
const out: (HTMLElement|string|null)[] = [];
out.push(dom('h2', info.name));
if (details) {
out.push(dom('p', '> ', details.info));
}
if (result.verdict) {
out.push(dom('pre', result.verdict));
}
if (result.success !== undefined) {
out.push(result.success ? '✅' : '❌');
}
if (result.done === true) {
out.push(dom('p', 'no fault detected'));
}
if (result.details) {
for (const [key, val] of Object.entries(result.details)) {
out.push(dom(
'div',
key,
dom('input', dom.prop('value', JSON.stringify(val)))));
}
}
return out;
}
}
/**
* Represents a single diagnostic.
*/
export class BootProbe {
constructor(public id: string, public boot: Boot) {
const url = new URL(removeTrailingSlash(document.location.href));
url.pathname = url.pathname + '/probe/' + id;
fetch(url.href).then(async resp => {
const _probes: BootProbeResult = await resp.json();
const ob = boot.results.get(id);
if (ob) {
ob.set(_probes);
}
}).catch(e => console.error(e));
}
public start() {
let result = this.boot.results.get(this.id);
if (!result) {
result = Observable.create(this.boot, {});
this.boot.results.set(this.id, result);
}
}
}
/**
* Create a stripped down page to show boot information.
* Make sure the API isn't used since it may well be unreachable
* due to a misconfiguration, especially in multi-server setups.
*/
createAppPage(appModel => {
return dom.create(Boot, appModel);
}, {
useApi: false,
});
/**
* Basic information about diagnostics is kept on the server,
* but it can be useful to show extra details and tips in the
* client.
*/
const probeDetails: Record<string, ProbeDetails> = {
'boot-page': {
info: `
This boot page should not be too easy to access. Either turn
it off when configuration is ok (by unsetting GRIST_BOOT_KEY)
or make GRIST_BOOT_KEY long and cryptographically secure.
`,
},
'health-check': {
info: `
Grist has a small built-in health check often used when running
it as a container.
`,
},
'host-header': {
info: `
Requests arriving to Grist should have an accurate Host
header. This is essential when GRIST_SERVE_SAME_ORIGIN
is set.
`,
},
'system-user': {
info: `
It is good practice not to run Grist as the root user.
`,
},
'reachable': {
info: `
The main page of Grist should be available.
`
},
};
/**
* Information about the probe.
*/
interface ProbeDetails {
info: string;
}

@ -2,7 +2,7 @@
* dispose.js provides tools to components that needs to dispose of resources, such as
* destroy DOM, and unsubscribe from events. The motivation with examples is presented here:
*
* https://phab.getgrist.com/w/disposal/
* /documentation/disposal/disposal.md
*/
@ -191,7 +191,7 @@ Object.assign(Disposable.prototype, {
}
// Finish by wiping out the object, since nothing should use it after dispose().
// See https://phab.getgrist.com/w/disposal/ for more motivation.
// See /documentation/disposal.md for more motivation.
wipeOutObject(this);
}
});

@ -146,6 +146,11 @@ export interface AppModel {
switchUser(user: FullUser, org?: string): Promise<void>;
}
export interface TopAppModelOptions {
/** Defaults to true. */
useApi?: boolean;
}
export class TopAppModelImpl extends Disposable implements TopAppModel {
public readonly isSingleOrg: boolean;
public readonly productFlavor: ProductFlavor;
@ -163,14 +168,16 @@ export class TopAppModelImpl extends Disposable implements TopAppModel {
// up new widgets - that seems ok.
private readonly _widgets: AsyncCreate<ICustomWidget[]>;
constructor(window: {gristConfig?: GristLoadConfig}, public readonly api: UserAPI = newUserAPIImpl()) {
constructor(window: {gristConfig?: GristLoadConfig},
public readonly api: UserAPI = newUserAPIImpl(),
public readonly options: TopAppModelOptions = {}) {
super();
setErrorNotifier(this.notifier);
this.isSingleOrg = Boolean(window.gristConfig && window.gristConfig.singleOrg);
this.productFlavor = getFlavor(window.gristConfig && window.gristConfig.org);
this._gristConfig = window.gristConfig;
this._widgets = new AsyncCreate<ICustomWidget[]>(async () => {
const widgets = await this.api.getWidgets();
const widgets = this.options.useApi === false ? [] : await this.api.getWidgets();
this.customWidgets.set(widgets);
return widgets;
});
@ -180,7 +187,9 @@ export class TopAppModelImpl extends Disposable implements TopAppModel {
this.autoDispose(subscribe(this.currentSubdomain, (use) => this.initialize()));
this.plugins = this._gristConfig?.plugins || [];
this.fetchUsersAndOrgs().catch(reportError);
if (this.options.useApi !== false) {
this.fetchUsersAndOrgs().catch(reportError);
}
}
public initialize(): void {
@ -237,6 +246,10 @@ export class TopAppModelImpl extends Disposable implements TopAppModel {
private async _doInitialize() {
this.appObs.set(null);
if (this.options.useApi === false) {
AppModelImpl.create(this.appObs, this, null, null, {error: 'no-api', status: 500});
return;
}
try {
const {user, org, orgError} = await this.api.getSessionActive();
if (this.isDisposed()) { return; }

@ -17,7 +17,7 @@ import {TableData} from 'app/client/models/TableData';
import {ColumnFilterCalendarView} from 'app/client/ui/ColumnFilterCalendarView';
import {relativeDatesControl} from 'app/client/ui/ColumnFilterMenuUtils';
import {cssInput} from 'app/client/ui/cssInput';
import {DateRangeOptions, IDateRangeOption} from 'app/client/ui/DateRangeOptions';
import {getDateRangeOptions, IDateRangeOption} from 'app/client/ui/DateRangeOptions';
import {cssPinButton} from 'app/client/ui/RightPanelStyles';
import {basicButton, primaryButton, textButton} from 'app/client/ui2018/buttons';
import {cssLabel as cssCheckboxLabel, cssCheckboxSquare,
@ -176,16 +176,16 @@ export function columnFilterMenu(owner: IDisposableOwner, opts: IFilterMenuOptio
cssLinkRow(
testId('presets-links'),
cssLink(
DateRangeOptions[0].label,
dom.on('click', () => action(DateRangeOptions[0]))
getDateRangeOptions()[0].label,
dom.on('click', () => action(getDateRangeOptions()[0]))
),
cssLink(
DateRangeOptions[1].label,
dom.on('click', () => action(DateRangeOptions[1]))
getDateRangeOptions()[1].label,
dom.on('click', () => action(getDateRangeOptions()[1]))
),
cssLink(
'More ', icon('Dropdown'),
menu(() => DateRangeOptions.map(
menu(() => getDateRangeOptions().map(
(option) => menuItem(() => action(option), option.label)
), {attach: '.' + cssMenu.className})
),

@ -1,41 +1,55 @@
import {makeT} from 'app/client/lib/localization';
import { CURRENT_DATE, IRelativeDateSpec } from "app/common/RelativeDates";
const t = makeT('DateRangeOptions');
export interface IDateRangeOption {
label: string;
min: IRelativeDateSpec;
max: IRelativeDateSpec;
}
export const DateRangeOptions: IDateRangeOption[] = [{
label: 'Today',
min: CURRENT_DATE,
max: CURRENT_DATE,
}, {
label: 'Last 7 days',
min: [{quantity: -7, unit: 'day'}],
max: [{quantity: -1, unit: 'day'}],
}, {
label: 'Next 7 days',
min: [{quantity: 1, unit: 'day'}],
max: [{quantity: 7, unit: 'day'}],
}, {
label: 'Last Week',
min: [{quantity: -1, unit: 'week'}],
max: [{quantity: -1, unit: 'week', endOf: true}],
}, {
label: 'Last 30 days',
min: [{quantity: -30, unit: 'day'}],
max: [{quantity: -1, unit: 'day'}],
}, {
label: 'This week',
min: [{quantity: 0, unit: 'week'}],
max: [{quantity: 0, unit: 'week', endOf: true}],
}, {
label: 'This month',
min: [{quantity: 0, unit: 'month'}],
max: [{quantity: 0, unit: 'month', endOf: true}],
}, {
label: 'This year',
min: [{quantity: 0, unit: 'year'}],
max: [{quantity: 0, unit: 'year', endOf: true}],
}];
export function getDateRangeOptions(): IDateRangeOption[] {
return [
{
label: t('Today'),
min: CURRENT_DATE,
max: CURRENT_DATE,
},
{
label: t('Last 7 days'),
min: [{quantity: -7, unit: 'day'}],
max: [{quantity: -1, unit: 'day'}],
},
{
label: t('Next 7 days'),
min: [{quantity: 1, unit: 'day'}],
max: [{quantity: 7, unit: 'day'}],
},
{
label: t('Last Week'),
min: [{quantity: -1, unit: 'week'}],
max: [{quantity: -1, unit: 'week', endOf: true}],
},
{
label: t('Last 30 days'),
min: [{quantity: -30, unit: 'day'}],
max: [{quantity: -1, unit: 'day'}],
},
{
label: t('This week'),
min: [{quantity: 0, unit: 'week'}],
max: [{quantity: 0, unit: 'week', endOf: true}],
},
{
label: t('This month'),
min: [{quantity: 0, unit: 'month'}],
max: [{quantity: 0, unit: 'month', endOf: true}],
},
{
label: t('This year'),
min: [{quantity: 0, unit: 'year'}],
max: [{quantity: 0, unit: 'year', endOf: true}],
},
];
}

@ -85,7 +85,7 @@ export const GristTooltips: Record<Tooltip, TooltipContentFunc> = {
t('Try out changes in a copy, then decide whether to replace the original with your edits.')
),
dom('div',
cssLink({href: commonUrls.helpTryingOutChanges, target: '_blank'}, 'Learn more.'),
cssLink({href: commonUrls.helpTryingOutChanges, target: '_blank'}, t('Learn more.')),
),
...args,
),

@ -303,7 +303,7 @@ export function downloadDocModal(doc: Document, pageModel: DocPageModel) {
const selected = Observable.create<DownloadOption>(owner, 'full');
return [
cssModalTitle(`Download document`),
cssModalTitle(t(`Download document`)),
cssRadioCheckboxOptions(
radioCheckboxOption(selected, 'full', t("Download full document and history")),
radioCheckboxOption(selected, 'nohistory', t("Remove document history (can significantly reduce file size)")),
@ -311,7 +311,7 @@ export function downloadDocModal(doc: Document, pageModel: DocPageModel) {
),
cssModalButtons(
dom.domComputed(use =>
bigPrimaryButtonLink(`Download`, hooks.maybeModifyLinkAttrs({
bigPrimaryButtonLink(t(`Download`), hooks.maybeModifyLinkAttrs({
href: pageModel.appModel.api.getDocAPI(doc.id).getDownloadUrl({
template: use(selected) === "template",
removeHistory: use(selected) === "nohistory" || use(selected) === "template",
@ -325,7 +325,7 @@ export function downloadDocModal(doc: Document, pageModel: DocPageModel) {
testId('download-button-link'),
),
),
bigBasicButton('Cancel', dom.on('click', () => {
bigBasicButton(t('Cancel'), dom.on('click', () => {
ctl.close();
}))
)

@ -8,6 +8,7 @@ import {cardPopup, cssPopupBody, cssPopupButtons, cssPopupCloseButton,
import {icon} from 'app/client/ui2018/icons';
import {getGristConfig} from 'app/common/urlUtils';
import {dom, styled} from 'grainjs';
import { commonUrls } from 'app/common/gristUrls';
const t = makeT('WelcomeCoachingCall');
@ -103,7 +104,7 @@ We can show you the Grist basics, or start working with your data right away to
logTelemetryEvent('clickedScheduleCoachingCall');
}),
{
href: getGristConfig().freeCoachingCallUrl,
href: commonUrls.freeCoachingCall,
target: '_blank',
},
testId('popup-primary-button'),

@ -1,6 +1,6 @@
import {get as getBrowserGlobals} from 'app/client/lib/browserGlobals';
import {setupLocale} from 'app/client/lib/localization';
import {AppModel, TopAppModelImpl} from 'app/client/models/AppModel';
import {AppModel, TopAppModelImpl, TopAppModelOptions} from 'app/client/models/AppModel';
import {reportError, setUpErrorHandling} from 'app/client/models/errors';
import {buildSnackbarDom} from 'app/client/ui/NotifyUI';
import {addViewportTag} from 'app/client/ui/viewport';
@ -14,10 +14,12 @@ const G = getBrowserGlobals('document', 'window');
* Sets up the application model, error handling, and global styles, and replaces
* the DOM body with the result of calling `buildAppPage`.
*/
export function createAppPage(buildAppPage: (appModel: AppModel) => DomContents) {
export function createAppPage(
buildAppPage: (appModel: AppModel) => DomContents,
modelOptions: TopAppModelOptions = {}) {
setUpErrorHandling();
const topAppModel = TopAppModelImpl.create(null, {});
const topAppModel = TopAppModelImpl.create(null, {}, undefined, modelOptions);
addViewportTag();
attachCssRootVars(topAppModel.productFlavor);

@ -7,7 +7,7 @@ import {pagePanels} from 'app/client/ui/PagePanels';
import {createTopBarHome} from 'app/client/ui/TopBar';
import {bigBasicButtonLink, bigPrimaryButtonLink} from 'app/client/ui2018/buttons';
import {theme, vars} from 'app/client/ui2018/cssVars';
import {getPageTitleSuffix} from 'app/common/gristUrls';
import {commonUrls, getPageTitleSuffix} from 'app/common/gristUrls';
import {getGristConfig} from 'app/common/urlUtils';
import {dom, DomElementArg, makeTestId, observable, styled} from 'grainjs';
@ -94,7 +94,7 @@ export function createNotFoundPage(appModel: AppModel, message?: string) {
})),
cssButtonWrap(bigPrimaryButtonLink(t("Go to main page"), testId('error-primary-btn'),
urlState().setLinkUrl({}))),
cssButtonWrap(bigBasicButtonLink(t("Contact support"), {href: 'https://getgrist.com/contact'})),
cssButtonWrap(bigBasicButtonLink(t("Contact support"), {href: commonUrls.contactSupport})),
]);
}
@ -109,7 +109,7 @@ export function createOtherErrorPage(appModel: AppModel, message?: string) {
t('There was an unknown error.')),
cssButtonWrap(bigPrimaryButtonLink(t("Go to main page"), testId('error-primary-btn'),
urlState().setLinkUrl({}))),
cssButtonWrap(bigBasicButtonLink(t("Contact support"), {href: 'https://getgrist.com/contact'})),
cssButtonWrap(bigBasicButtonLink(t("Contact support"), {href: commonUrls.contactSupport})),
]);
}

@ -0,0 +1,22 @@
export type BootProbeIds =
'boot-page' |
'health-check' |
'reachable' |
'host-header' |
'system-user'
;
export interface BootProbeResult {
verdict?: string;
success?: boolean;
done?: boolean;
severity?: 'fault' | 'warning' | 'hmm';
details?: Record<string, any>;
}
export interface BootProbeInfo {
id: BootProbeIds;
name: string;
}

@ -86,6 +86,8 @@ export const commonUrls = {
helpTelemetryLimited: "https://support.getgrist.com/telemetry-limited",
helpCalendarWidget: "https://support.getgrist.com/widget-calendar",
helpLinkKeys: "https://support.getgrist.com/examples/2021-04-link-keys",
freeCoachingCall: getFreeCoachingCallUrl(),
contactSupport: getContactSupportUrl(),
plans: "https://www.getgrist.com/pricing",
sproutsProgram: "https://www.getgrist.com/sprouts-program",
contact: "https://www.getgrist.com/contact",
@ -670,6 +672,9 @@ export interface GristLoadConfig {
// Url for free coaching call scheduling for the browser client to use.
freeCoachingCallUrl?: string;
// Url for "contact support" button on Grist's "not found" error page
contactSupportUrl?: string;
// When set, this directs the client to encode org information in path, not in domain.
pathOnly?: boolean;
@ -865,21 +870,33 @@ export function getKnownOrg(): string|null {
}
}
export function getHelpCenterUrl(): string|null {
export function getHelpCenterUrl(): string {
const defaultUrl = "https://support.getgrist.com";
if(isClient()) {
const gristConfig: GristLoadConfig = (window as any).gristConfig;
return gristConfig && gristConfig.helpCenterUrl || defaultUrl;
} else {
return process.env.GRIST_HELP_CENTER || defaultUrl;
}
}
export function getFreeCoachingCallUrl(): string {
const defaultUrl = "https://calendly.com/grist-team/grist-free-coaching-call";
if(isClient()) {
const gristConfig: GristLoadConfig = (window as any).gristConfig;
return gristConfig && gristConfig.helpCenterUrl || null;
return gristConfig && gristConfig.freeCoachingCallUrl || defaultUrl;
} else {
return process.env.GRIST_HELP_CENTER || null;
return process.env.FREE_COACHING_CALL_URL || defaultUrl;
}
}
export function getFreeCoachingCallUrl(): string|null {
export function getContactSupportUrl(): string {
const defaultUrl = "https://www.getgrist.com/contact/";
if(isClient()) {
const gristConfig: GristLoadConfig = (window as any).gristConfig;
return gristConfig && gristConfig.freeCoachingCallUrl || null;
return gristConfig && gristConfig.contactSupportUrl || defaultUrl;
} else {
return process.env.FREE_COACHING_CALL_URL || null;
return process.env.GRIST_CONTACT_SUPPORT_URL || defaultUrl;
}
}
@ -938,7 +955,7 @@ export function extractOrgParts(reqHost: string|undefined, reqPath: string): Org
orgFromHost = getOrgFromHost(reqHost);
if (orgFromHost) {
// Some subdomains are shared, and do not reflect the name of an organization.
// See https://phab.getgrist.com/w/hosting/v1/urls/ for a list.
// See /documentation/urls.md for a list.
if (/^(api|v1-.*|doc-worker-.*)$/.test(orgFromHost)) {
orgFromHost = null;
}

@ -31,7 +31,7 @@ const BLACKLISTED_SUBDOMAINS = new Set([
/**
*
* Checks whether the subdomain is on the list of forbidden subdomains.
* See https://phab.getgrist.com/w/hosting/v1/urls/#organization-subdomains
* See /documentation/urls.md#organization-subdomains
*
* Also enforces various sanity checks.
*

@ -9,7 +9,7 @@ export interface GristTable {
// This is documenting what is currently returned by the core plugins. Capitalization
// is python-style.
//
// TODO: could be worth reconciling with: https://phab.getgrist.com/w/grist_data_format/.
// TODO: could be worth reconciling with: /documentation/grist-data-format.md.
table_name: string | null; // currently allow names to be null
column_metadata: GristColumn[];
table_data: any[][];

@ -0,0 +1,185 @@
import { ApiError } from 'app/common/ApiError';
import { BootProbeIds, BootProbeResult } from 'app/common/BootProbe';
import { removeTrailingSlash } from 'app/common/gutil';
import { expressWrap, jsonErrorHandler } from 'app/server/lib/expressWrap';
import { GristServer } from 'app/server/lib/GristServer';
import * as express from 'express';
import fetch from 'node-fetch';
/**
* Self-diagnostics useful when installing Grist.
*/
export class BootProbes {
// List of probes.
public _probes = new Array<Probe>();
// Probes indexed by id.
public _probeById = new Map<string, Probe>();
public constructor(private _app: express.Application,
private _server: GristServer,
private _base: string) {
this._addProbes();
}
public addEndpoints() {
// Return a list of available probes.
this._app.use(`${this._base}/probe$`, expressWrap(async (_, res) => {
res.json({
'probes': this._probes.map(probe => {
return { id: probe.id, name: probe.name };
}),
});
}));
// Return result of running an individual probe.
this._app.use(`${this._base}/probe/:probeId`, expressWrap(async (req, res) => {
const probe = this._probeById.get(req.params.probeId);
if (!probe) {
throw new ApiError('unknown probe', 400);
}
const result = await probe.apply(this._server, req);
res.json(result);
}));
// Fall-back for errors.
this._app.use(`${this._base}/probe`, jsonErrorHandler);
}
private _addProbes() {
this._probes.push(_homeUrlReachableProbe);
this._probes.push(_statusCheckProbe);
this._probes.push(_userProbe);
this._probes.push(_bootProbe);
this._probes.push(_hostHeaderProbe);
this._probeById = new Map(this._probes.map(p => [p.id, p]));
}
}
/**
* An individual probe has an id, a name, an optional description,
* and a method that returns a probe result.
*/
export interface Probe {
id: BootProbeIds;
name: string;
description?: string;
apply: (server: GristServer, req: express.Request) => Promise<BootProbeResult>;
}
const _homeUrlReachableProbe: Probe = {
id: 'reachable',
name: 'Grist is reachable',
apply: async (server, req) => {
const url = server.getHomeUrl(req);
try {
const resp = await fetch(url);
if (resp.status !== 200) {
throw new ApiError(await resp.text(), resp.status);
}
return {
success: true,
};
} catch (e) {
return {
success: false,
details: {
error: String(e),
},
severity: 'fault',
};
}
}
};
const _statusCheckProbe: Probe = {
id: 'health-check',
name: 'Built-in Health check',
apply: async (server, req) => {
const baseUrl = server.getHomeUrl(req);
const url = new URL(baseUrl);
url.pathname = removeTrailingSlash(url.pathname) + '/status';
try {
const resp = await fetch(url);
if (resp.status !== 200) {
throw new Error(`Failed with status ${resp.status}`);
}
const txt = await resp.text();
if (!txt.includes('is alive')) {
throw new Error(`Failed, page has unexpected content`);
}
return {
success: true,
};
} catch (e) {
return {
success: false,
error: String(e),
severity: 'fault',
};
}
},
};
const _userProbe: Probe = {
id: 'system-user',
name: 'System user is sane',
apply: async () => {
if (process.getuid && process.getuid() === 0) {
return {
success: false,
verdict: 'User appears to be root (UID 0)',
severity: 'warning',
};
} else {
return {
success: true,
};
}
},
};
const _bootProbe: Probe = {
id: 'boot-page',
name: 'Boot page exposure',
apply: async (server) => {
if (!server.hasBoot) {
return { success: true };
}
const maybeSecureEnough = String(process.env.GRIST_BOOT_KEY).length > 10;
return {
success: maybeSecureEnough,
severity: 'hmm',
};
},
};
/**
* Based on:
* https://github.com/gristlabs/grist-core/issues/228#issuecomment-1803304438
*
* When GRIST_SERVE_SAME_ORIGIN is set, requests arriving to Grist need
* to have an accurate Host header.
*/
const _hostHeaderProbe: Probe = {
id: 'host-header',
name: 'Host header is sane',
apply: async (server, req) => {
const host = req.header('host');
const url = new URL(server.getHomeUrl(req));
if (url.hostname === 'localhost') {
return {
done: true,
};
}
if (String(url.hostname).toLowerCase() !== String(host).toLowerCase()) {
return {
success: false,
severity: 'hmm',
};
}
return {
done: true,
};
},
};

@ -93,6 +93,7 @@ import * as path from 'path';
import * as t from "ts-interface-checker";
import {Checker} from "ts-interface-checker";
import uuidv4 from "uuid/v4";
import { Document } from "app/gen-server/entity/Document";
// Cap on the number of requests that can be outstanding on a single document via the
// rest doc api. When this limit is exceeded, incoming requests receive an immediate
@ -646,6 +647,9 @@ export class DocWorkerApi {
// full document.
const dryRun = isAffirmative(req.query.dryrun || req.query.dryRun);
const dryRunSuccess = () => res.status(200).json({dryRun: 'allowed'});
const filename = await this._getDownloadFilename(req);
// We want to be have a way download broken docs that ActiveDoc may not be able
// to load. So, if the user owns the document, we unconditionally let them
// download.
@ -655,13 +659,13 @@ export class DocWorkerApi {
// We carefully avoid creating an ActiveDoc for the document being downloaded,
// in case it is broken in some way. It is convenient to be able to download
// broken files for diagnosis/recovery.
return await this._docWorker.downloadDoc(req, res, this._docManager.storageManager);
return await this._docWorker.downloadDoc(req, res, this._docManager.storageManager, filename);
} catch (e) {
if (e.message && e.message.match(/does not exist yet/)) {
// The document has never been seen on file system / s3. It may be new, so
// we try again after having created an ActiveDoc for the document.
await this._getActiveDoc(req);
return this._docWorker.downloadDoc(req, res, this._docManager.storageManager);
return this._docWorker.downloadDoc(req, res, this._docManager.storageManager, filename);
} else {
throw e;
}
@ -674,7 +678,7 @@ export class DocWorkerApi {
throw new ApiError('not authorized to download this document', 403);
}
if (dryRun) { dryRunSuccess(); return; }
return this._docWorker.downloadDoc(req, res, this._docManager.storageManager);
return this._docWorker.downloadDoc(req, res, this._docManager.storageManager, filename);
}
}));
@ -1222,7 +1226,7 @@ export class DocWorkerApi {
this._app.get('/api/docs/:docId/download/table-schema', canView, withDoc(async (activeDoc, req, res) => {
const doc = await this._dbManager.getDoc(req);
const options = this._getDownloadOptions(req, doc.name);
const options = await this._getDownloadOptions(req, doc);
const tableSchema = await collectTableSchemaInFrictionlessFormat(activeDoc, req, options);
const apiPath = await this._grist.getResourceUrl(doc, 'api');
const query = new URLSearchParams(req.query as {[key: string]: string});
@ -1241,18 +1245,16 @@ export class DocWorkerApi {
}));
this._app.get('/api/docs/:docId/download/csv', canView, withDoc(async (activeDoc, req, res) => {
// Query DB for doc metadata to get the doc title.
const {name: docTitle} = await this._dbManager.getDoc(req);
const options = this._getDownloadOptions(req, docTitle);
const options = await this._getDownloadOptions(req);
await downloadCSV(activeDoc, req, res, options);
}));
this._app.get('/api/docs/:docId/download/xlsx', canView, withDoc(async (activeDoc, req, res) => {
// Query DB for doc metadata to get the doc title (to use as the filename).
const {name: docTitle} = await this._dbManager.getDoc(req);
const options: DownloadOptions = !_.isEmpty(req.query) ? this._getDownloadOptions(req, docTitle) : {
filename: docTitle,
const options: DownloadOptions = (!_.isEmpty(req.query) && !_.isEqual(Object.keys(req.query), ["title"]))
? await this._getDownloadOptions(req)
: {
filename: await this._getDownloadFilename(req),
tableId: '',
viewSectionId: undefined,
filters: [],
@ -1734,11 +1736,23 @@ export class DocWorkerApi {
return docAuth.docId!;
}
private _getDownloadOptions(req: Request, name: string): DownloadOptions {
private async _getDownloadFilename(req: Request, tableId?: string, optDoc?: Document): Promise<string> {
let filename = optStringParam(req.query.title, 'title');
if (!filename) {
// Query DB for doc metadata to get the doc data.
const doc = optDoc || await this._dbManager.getDoc(req);
const docTitle = doc.name;
const suffix = tableId ? (tableId === docTitle ? '' : `-${tableId}`) : '';
filename = docTitle + suffix || 'document';
}
return filename;
}
private async _getDownloadOptions(req: Request, doc?: Document): Promise<DownloadOptions> {
const params = parseExportParameters(req);
return {
...params,
filename: name + (params.tableId === name ? '' : '-' + params.tableId),
filename: await this._getDownloadFilename(req, params.tableId, doc),
};
}

@ -68,14 +68,10 @@ export class DocWorker {
}
public async downloadDoc(req: express.Request, res: express.Response,
storageManager: IDocStorageManager): Promise<void> {
storageManager: IDocStorageManager, filename: string): Promise<void> {
const mreq = req as RequestWithLogin;
const docId = getDocId(mreq);
// Query DB for doc metadata to get the doc title.
const doc = await this._dbManager.getDoc(req);
const docTitle = doc.name;
// Get a copy of document for downloading.
const tmpPath = await storageManager.getCopy(docId);
if (isAffirmative(req.query.template)) {
@ -90,7 +86,7 @@ export class DocWorker {
return res.type('application/x-sqlite3')
.download(
tmpPath,
(optStringParam(req.query.title, 'title') || docTitle || 'document') + ".grist",
filename + ".grist",
async (err: any) => {
if (err) {
if (err.message && /Request aborted/.test(err.message)) {

@ -1,6 +1,8 @@
import {ObjMetadata, ObjSnapshot, ObjSnapshotWithMetadata} from 'app/common/DocSnapshot';
import {isAffirmative} from 'app/common/gutil';
import log from 'app/server/lib/log';
import {createTmpDir} from 'app/server/lib/uploads';
import {delay} from 'bluebird';
import * as fse from 'fs-extra';
import * as path from 'path';
@ -226,13 +228,27 @@ export class ChecksummedExternalStorage implements ExternalStorage {
const expectedChecksum = await this._options.sharedHash.load(fromKey);
// Let null docMD5s pass. Otherwise we get stuck if redis is cleared.
// Otherwise, make sure what we've got matches what we expect to get.
// S3 is eventually consistent - if you overwrite an object in it, and then read from it,
// you may get an old version for some time.
// AWS S3 was eventually consistent, but now has stronger guarantees:
// https://aws.amazon.com/blogs/aws/amazon-s3-update-strong-read-after-write-consistency/
//
// Previous to this change, if you overwrote an object in it,
// and then read from it, you may have got an old version for some time.
// We are confident this should not be the case anymore, though this has to be studied carefully.
// If a snapshotId was specified, we can skip this check.
if (expectedChecksum && expectedChecksum !== checksum) {
log.error("ext %s download: data for %s has wrong checksum: %s (expected %s)",
this.label, fromKey, checksum, expectedChecksum);
return undefined;
const message = `ext ${this.label} download: data for ${fromKey} has wrong checksum:` +
` ${checksum} (expected ${expectedChecksum})`;
// If GRIST_SKIP_REDIS_CHECKSUM_MISMATCH is set, issue a warning only and continue,
// rather than issuing an error and failing.
// This flag is experimental and should be removed once we are
// confident that the checksums verification is useless.
if (isAffirmative(process.env.GRIST_SKIP_REDIS_CHECKSUM_MISMATCH)) {
log.warn(message);
} else {
log.error(message);
return undefined;
}
}
}

@ -28,6 +28,7 @@ import {appSettings} from 'app/server/lib/AppSettings';
import {addRequestUser, getTransitiveHeaders, getUser, getUserId, isAnonymousUser,
isSingleUserMode, redirectToLoginUnconditionally} from 'app/server/lib/Authorizer';
import {redirectToLogin, RequestWithLogin, signInStatusMiddleware} from 'app/server/lib/Authorizer';
import {BootProbes} from 'app/server/lib/BootProbes';
import {forceSessionChange} from 'app/server/lib/BrowserSession';
import {Comm} from 'app/server/lib/Comm';
import {create} from 'app/server/lib/create';
@ -175,6 +176,7 @@ export class FlexServer implements GristServer {
private _getLoginSystem?: () => Promise<GristLoginSystem>;
// Set once ready() is called
private _isReady: boolean = false;
private _probes: BootProbes;
constructor(public port: number, public name: string = 'flexServer',
public readonly options: FlexServerOptions = {}) {
@ -481,6 +483,57 @@ export class FlexServer implements GristServer {
});
}
/**
*
* Adds a /boot/$GRIST_BOOT_KEY page that shows diagnostics.
* Accepts any /boot/... URL in order to let the front end
* give some guidance if the user is stumbling around trying
* to find the boot page, but won't actually provide diagnostics
* unless GRIST_BOOT_KEY is set in the environment, and is present
* in the URL.
*
* We take some steps to make the boot page available even when
* things are going wrong, and should take more in future.
*
* When rendering the page a hardcoded 'boot' tag is used, which
* is used to ensure that static assets are served locally and
* we aren't relying on APP_STATIC_URL being set correctly.
*
* We use a boot key so that it is more acceptable to have this
* boot page living outside of the authentication system, which
* could be broken.
*
* TODO: there are some configuration problems that currently
* result in Grist not running at all. ideally they would result in
* Grist running in a limited mode that is enough to bring up the boot
* page.
*
*/
public addBootPage() {
if (this._check('boot')) { return; }
const bootKey = appSettings.section('boot').flag('key').readString({
envVar: 'GRIST_BOOT_KEY'
});
const base = `/boot/${bootKey}`;
this._probes = new BootProbes(this.app, this, base);
// Respond to /boot, /boot/, /boot/KEY, /boot/KEY/ to give
// a helpful message even if user gets KEY wrong or omits it.
this.app.get('/boot(/(:bootKey/?)?)?$', async (req, res) => {
const goodKey = bootKey && req.params.bootKey === bootKey;
return this._sendAppPage(req, res, {
path: 'boot.html', status: 200, config: goodKey ? {
} : {
errMessage: 'not-the-key',
}, tag: 'boot',
});
});
this._probes.addEndpoints();
}
public hasBoot(): boolean {
return Boolean(this._probes);
}
public denyRequestsIfNotReady() {
this.app.use((_req, res, next) => {
if (!this._isReady) {

@ -60,6 +60,7 @@ export interface GristServer {
getPlugins(): LocalPlugin[];
servesPlugins(): boolean;
getBundledWidgets(): ICustomWidget[];
hasBoot(): boolean;
}
export interface GristLoginSystem {
@ -147,6 +148,7 @@ export function createDummyGristServer(): GristServer {
servesPlugins() { return false; },
getPlugins() { return []; },
getBundledWidgets() { return []; },
hasBoot() { return false; },
};
}

@ -1,4 +1,12 @@
import {Features, getPageTitleSuffix, GristLoadConfig, IFeature} from 'app/common/gristUrls';
import {
Features,
getContactSupportUrl,
getFreeCoachingCallUrl,
getHelpCenterUrl,
getPageTitleSuffix,
GristLoadConfig,
IFeature
} from 'app/common/gristUrls';
import {isAffirmative} from 'app/common/gutil';
import {getTagManagerSnippet} from 'app/common/tagManager';
import {Document} from 'app/common/UserAPI';
@ -53,8 +61,9 @@ export function makeGristConfig(options: MakeGristConfigOptions): GristLoadConfi
org: process.env.GRIST_SINGLE_ORG || (mreq && mreq.org),
baseDomain,
singleOrg: process.env.GRIST_SINGLE_ORG,
helpCenterUrl: process.env.GRIST_HELP_CENTER || "https://support.getgrist.com",
freeCoachingCallUrl: process.env.FREE_COACHING_CALL_URL || "https://calendly.com/grist-team/grist-free-coaching-call",
helpCenterUrl: getHelpCenterUrl(),
freeCoachingCallUrl: getFreeCoachingCallUrl(),
contactSupportUrl: getContactSupportUrl(),
pathOnly,
supportAnon: shouldSupportAnon(),
enableAnonPlayground: isAffirmative(process.env.GRIST_ANON_PLAYGROUND ?? true),
@ -139,8 +148,11 @@ export function makeSendAppPage(opts: {
const needTagManager = (options.googleTagManager === 'anon' && isAnonymousUser(req)) ||
options.googleTagManager === true;
const tagManagerSnippet = needTagManager ? getTagManagerSnippet(process.env.GOOGLE_TAG_MANAGER_ID) : '';
const staticOrigin = process.env.APP_STATIC_URL || "";
const staticBaseUrl = `${staticOrigin}/v/${options.tag || tag}/`;
const staticTag = options.tag || tag;
// If boot tag is used, serve assets locally, otherwise respect
// APP_STATIC_URL.
const staticOrigin = staticTag === 'boot' ? '' : (process.env.APP_STATIC_URL || '');
const staticBaseUrl = `${staticOrigin}/v/${staticTag}/`;
const customHeadHtmlSnippet = server.create.getExtraHeadHtml?.() ?? "";
const warning = testLogin ? "<div class=\"dev_warning\">Authentication is not enforced</div>" : "";
// Preload all languages that will be used or are requested by client.

@ -104,6 +104,9 @@ export async function main(port: number, serverTypes: ServerType[],
}
server.addHealthCheck();
if (includeHome || includeApp) {
server.addBootPage();
}
server.denyRequestsIfNotReady();
if (includeHome || includeStatic || includeApp) {

@ -14,6 +14,7 @@ module.exports = {
main: "app/client/app",
errorPages: "app/client/errorMain",
apiconsole: "app/client/apiconsole",
boot: "app/client/boot",
billing: "app/client/billingMain",
form: "app/client/formMain",
// Include client test harness if it is present (it won't be in

@ -0,0 +1,148 @@
# Disposal and Cleanup
Garbage-collected languages make you think that you don't need to worry about cleanup for your objects. In reality, there are still often cases when you do. This page gives some examples, and describes a library to simplify it.
## What's the problem
In the examples, we care about a situation when you have a JS object that is responsible for certain UI, i.e. DOM, listening to DOM changes to update state elsewhere, and listening to outside changes to update state to the DOM.
### DOM Elements
So this JS object knows how to create the DOM. Removing the DOM, when the component is to be removed, is usually easy: `parentNode.removeNode(child)`. Since it's a manual operation, you may define some method to do this, named perhaps "destroy" or "dispose" or "cleanup".
If there is logic tied to your DOM either via JQuery events, or KnockoutJS bindings, you'll want to clean up the node specially: for JQuery, use `.remove()` or `.empty()` methods; for KnockoutJS, use `ko.removeNode()` or `ko.cleanNode()`. KnockoutJS's methods automatically call JQuery-related cleanup functions if JQuery is loaded in the page.
### Subscriptions and Computed Observables
But there is more. Consider this knockout code, adapted from their simplest example of a computed observable:
function FullNameWidget(firstName, lastName) {
this.fullName = ko.computed(function() {
return firstName() + " " + lastName();
});
...
}
Here we have a constructor for a component which takes two observables as constructor parameters, and creates a new observable which depends on the two inputs. Whenever `firstName` or `lastName` changes, `this.fullName` get recomputed. This makes it easy to create knockout-based bindings, e.g. to have a DOM element reflect the full name when either first or last name changes.
Now, what happens when this component is destroyed? It removes its associated DOM. Now when `firstName` or `lastName` change, there are no visible changes. But the function to recompute `this.fullName` still gets called, and still retains a reference to `this`, preventing the object from being garbage-collected.
The issue is that `this.fullName` is subscribed to `firstName` and `lastName` observables. It needs to be unsubscribed when the component is destroyed.
KnockoutJS recognizes it, and makes it easy: just call `this.firstName.dispose()`. We just have to remember to do it when we destroy the component.
This situation would exist without knockout too: the issue is that the component is listening to external changes to update the DOM that it is responsible for. When the component is gone, it should stop listening.
### Tying life of subscriptions to DOM
Since the situation above is so common in KnockoutJS, it offers some assistance. Specifically, when a computed observable is created using knockout's own binding syntax (by specifying a JS expression in an HTML attribute), knockout will clean it up automatically when the DOM node is removed using `ko.removeNode()` or `ko.cleanNode()`.
Knockout also allows to tie other cleanup to DOM node removal, documented at [Custom disposal logic](http://knockoutjs.com/documentation/custom-bindings-disposal.html) page.
In the example above, you could use `ko.utils.domNodeDisposal.addDisposeCallback(node, function() { self.fullName.dispose(); })`, and when you destroy the component and remove the `node` via `ko.removeNode()` or `ko.cleanNode()`, the `fullName` observable will be properly disposed.
### Other knockout subscriptions
There are other situations with subscriptions. For example, we may want to subscribe to a `viewId` observable, and when it changes, replace the currently-rendered View component. This might look like so
function GristDoc() {
this.viewId = ko.observable();
this.viewId.subscribe(function(viewId) {
this.loadView(viewId);
}, this);
}
Once GristDoc is destroyed, the subscription to `this.viewId` still exists, so `this.viewId` retains a reference to `this` (for calling the callback). Technically, there is no problem: as long as there are no references to `this.viewId` from outside this object, the whole cycle should be garbage-collected.
But it's very risky: if anything else has a reference to `this.viewId` (e.g. if `this.viewId` is itself subscribed to, say, `window.history` changes), then the entire `GristDoc` is unavailable to garbage-collection, including all the DOM to which it probably retains references even after that DOM is detached from the page.
Beside the memory leak, it means that when `this.viewId` changes, it will continue calling `this.loadView()`, continuing to update DOM that no one will ever see. Over time, that would of course slow down the browser, but would be hard to detect and debug.
Again, KnockoutJS offers a way to unsubscribe: `.subscribe()` returns a `ko.subscription` object, which in turn has a `dispose()` method. We just need to call it, and the callback will be unsubscribed.
### Backbone Events
To be clear, the problem isn't with Knockout, it's with the idea of subscribing to outside events. Backbone allows listening to events, which creates the same problem, and Backbone offers a similar solution.
For example, let's say you have a component that listens to an outside event and does stuff. With a made-up example, you might have a constructor like:
function Game(basket) {
basket.on('points:scored', function(team, points) {
// Update UI to show updated points for the team.
});
}
Let's say that a `Game` object is destroyed, and a new one created, but the `basket` persists across Games. As the user continues to score points on the basket, the old (supposedly destroyed) Game object continues to have that inline callback called. It may not be showing anything, but only because the DOM it's updating is no longer attached to the page. It's still taking resources, and may even continue to send stuff to the server.
We need to clean up when we destroy the Game object. In this example, it's pretty annoying. We'd have to save the `basket` object and callback in member variables (like `this.basket`, `this.callback`), so that in the cleanup method, we could call `this.basket.off('points:scored', this.callback)`.
Many people have gotten bitten with that in Backbone (see this [stackoverflow post](http://stackoverflow.com/questions/14041042/backbone-0-9-9-difference-between-listento-and-on)) with a bunch of links to blog posts about it).
Backbone's solution is `listenTo()` method. You'd use it like so:
function Game(basket) {
this.listenTo(basket, 'points:scored', function(team, points) {
// Update UI to show updated points for the team.
});
}
Then when you destroy the Game object, you only have to call `this.stopListening()`. It keeps track of what you listened to, and unsubscribes. You just have to remember to call it. (Certain objects in Backbone will call `stopListening()` automatically when they are being cleaned up.)
### Internal events
If a component listens to an event on a DOM element it itself owns, and if it's using JQuery, then we don't need to do anything special. If on destruction of the component, we clean up the DOM element using `ko.removeNode()`, the JQuery event bindings should automatically be removed. (This hasn't been rigorously verified, but if correct, is a reason to use JQuery for browser events rather than native `addEventListener`.)
## How to do cleanup uniformly
Since we need to destroy the components' DOM explicitly, the components should provide a method to call for that. By analogy with KnockoutJS, let's call it `dispose()`.
- We know that it needs to remove the DOM that the component is responsible for, probably using `ko.removeNode`.
- If the component used Backbone's `listenTo()`, it should call `stopListening()` to unsubscribe from Backbone events.
- If the component maintains any knockout subscriptions or computed observables, it should call `.dispose()` on them.
- If the component owns other components, then those should be cleaned up recursively, by calling `.dispose()` on those.
The trick is how to make it easy to remember to do all necessary cleanup. I propose keeping track when the object to clean up first enters the picture.
## 'Disposable' class
The idea is to have a class that can be mixed into (or inherited by) any object, and whose purpose is to keep track of things this object "owns", that it should be responsible for cleaning up. To combine the examples above:
function Component(firstName, lastName, basket) {
this.fullName = this.autoDispose(ko.computed(function() {
return firstName() + " " + lastName();
}));
this.viewId = ko.observable();
this.autoDispose(this.viewId.subscribe(function(viewId) {
this.loadView(viewId);
}, this));
this.ourDom = this.autoDispose(somewhere.appendChild(some_dom_we_create));
this.listenTo(basket, 'points:scored', function(team, points) {
// Update UI to show updated points for the team.
});
}
Note the `this.autoDispose()` calls. They mark the argument as being owned by `this`. When `this.dispose()` is called, those values get disposed of as well.
The disposal itself is fairly straightforward: if the object has a `dispose` method, we'll call that. If it's a DOM node, we'll call `ko.removeNode` on it. The `dispose()` method of Disposable objects will always call `this.stopListening()` if such a method exists, so that subscriptions using Backbone's `listenTo` are cleaned up automatically.
To do additional cleanup when `dispose()` is called, the derived class can override `dispose()`, do its other cleanup, then call `Disposable.prototype.dispose.call(this)`.
For convenience, Disposable class provides a few other methods:
- `disposeRelease(part)`: releases an owned object, so that it doesn't get auto-disposed.
- `disposeDiscard(part)`: disposes of an owned object early (rather than wait for `this.dispose`).
- `isDisposed()`: returns whether `this.dispose()` has already been called.
### Destroying destroyed objects
There is one more thing that Disposable class's `dispose()` method will do: destroy the object, as in ruin, wreck, wipe out. Specifically, it will go through all properties of `this`, and set each to a junk value. This achieves two goals:
1. In any of the examples above, if you forgot to mark anything with `this.autoDispose()`, and some callback continues to be called after the object has been destroyed, you'll get errors. Not just silent waste of resources that slow down the site and are hard to detect.
2. It removes references, potentially breaking references. Imagine that something wrongly retains a reference to a destroyed object (which logically nothing should, but something might by mistake). If it tries to use the object, it will fail (see point 1). But even if it doesn't access the object, it's preventing the garbage collector from cleaning any of the object. If we break references, then in this situation the GC can still collect all the properties of the destroyed object.
## Conclusion
All JS client-side components that need cleanup (e.g. maintain DOM, observables, listen to events, or subscribe to anything), should inherit from `Disposable`. To destroy them, call their `.dispose()` method. Whenever they take responsibility for any piece that requires cleanup, they should wrap that piece in `this.autoDispose()`.
This should go a long way towards avoiding leaks and slowdowns.

@ -0,0 +1,218 @@
# Grist Data Format
Grist Data Format is used to send and receive data from a Grist document. For example, an implementer of an import module would need to translate data to Grist Data Format. A user of Grist Basket APIs would fetch and upload data in Grist Data Format.
The format is optimized for tabular data. A table consists of rows and columns, containing a single value for each row for each column. Various types are supported for the values.
Each column has a name and a type. The type is not strict: a column may contain values of other types. However, the type is the intended type of the value for that column, and allows those values to be represented more efficiently.
Grist Data Format is readily serialized to JSON. Other serializations are possible, for example, see below for a .proto file that allows to serialize Grist Data Format as a protocol buffer.
## Format Specification
### Document
At the top, Grist Data Format is a Document object with a single key “tables” mapping to an array of Tables:
```javascript
{
tables: [Tables…]
}
```
### Table
```javascript
{
name: "TableName",
colinfo: [ColInfo…],
columns: ColData
}
```
The `name` is the name of the table. The `colinfo` array has an item to describe each column, and `columns` is the actual table data in column-oriented layout.
### ColInfo
```javascript
{
name: "ColName",
type: "ColType",
options: <arbitrary options>
}
```
The `name` is the name of the column, and `type` is its type. The field `options` optionally specifies type-specific options that affect the column (e.g. the number of decimal places to display for a floating-point number).
### ColData
```javascript
{
<colName1>: ColValues,
<colName2>: ColValues,
...
}
```
The data in the table is represented as an object mapping a column name to an array of values for the column. This column-oriented representation allows for the representation of data to be more concise.
### ColValues
```javascript
[CellValue, CellValue, ...]
```
ColValues is an array of all values for the column. We'll refer to the type of each value as `CellValue`. ColValues has an entry for each row in the table. In particular, each ColValues array in a ColData object has the same number of entries.
### CellValue
CellValue represents the value in one cell. We support various types of values, documented below. When represented as JSON, CellValue is one of the following JSON types:
- string
- number
- bool
- null
- array of the form `[typeCode, args...]`
The interpretation of CellValue is affected by the columns type, and described in more detail below.
## JSON Schema
The description above can be summarized by this JSON Schema:
```json
{
"definitions": {
"Table": {
"type": "object",
"properties": {
"name": { "type": "string" },
"colinfo": { "type": "array", "items": { "$ref": "#/definitions/ColInfo" } }
"columns": { "$ref": "#/definitions/ColData" }
}
},
"ColInfo": {
"type": "object",
"properties": {
"name": { "type": "string" },
"type": { "type": "string" },
"options": { "type": "object" }
}
},
"ColData": {
"type": "object",
"additionalProperties": { "$ref": "#/definitions/ColValues" }
},
"ColValues": {
"type": "array",
"items": { "type": "CellValue" }
}
},
"type": "object",
"properties": {
"tables": { "type": "array", "items": { "$ref": "#/definitions/Table" } }
}
}
```
## Record identifiers
Each table should have a column named `id`, whose values should be unique across the table. It is used to identify records in queries and actions. Its details, including its type, are left for now outside the scope of this specification, because the format isn't affected by them.
## Naming
Names for tables and columns must consist of alphanumeric ASCII characters or underscore (i.e. `[0-9a-zA-Z_]`). They may not start with an underscore or a digit. Different tables and different columns within a table must have unique names case-insensitively (i.e. they cannot differ in case only).
Certain names (`id` being one of them) may be reserved, e.g. by Grist, for internal purposes, and would not be usable for user data. Such restrictions are outside the scope of this specification.
Note that this combination of rules allows tables and column names to be valid identifiers in pretty much every programming language (including Python and Javascript), as well as valid names of columns in databases.
## Value Types
The format supports a number of data types. Some types have a short representation (e.g. `Numeric` as a JSON `number`, and `Text` as a JSON `string`), but all types have an explicit representation as well.
The explicit representation of a value is an array `[typeCode, args...]`. The first member of the array is a string code that defines the type of the value. The rest of the elements are arguments used to construct the actual value.
The following table lists currently supported types and their short and explicit representations.
| **Type Name** | **Short Repr** | **[Type Code, Args...]** | **Description** |
| `Numeric` | `number`* | `['n',number]` | double-precision floating point number |
| `Text` | `string`* | `['s',string]` | Unicode string |
| `Bool` | `bool`* | `['b',bool]` | Boolean value (true or false) |
| `Null` | `null`* | `null` | Null value (no special explicit representation) |
| `Int` | `number` | `['i',number]` | 32-bit integer |
| `Date` | `number` | `['d',number]` | Calendar date, represented as seconds since Epoch to 00:00 UTC on that date. |
| `DateTime` | `number` | `['D',number]` | Instance in time, represented as seconds since Epoch |
| `Reference` | `number` | `['R',number]` | Identifier of a record in a table. |
| `ReferenceList` | | `['L',number,...]` | List of record identifiers |
| `Choice` | `string` | `['C',string]` | Unicode string selected from a list of choices. |
| `PositionNumber` | `number` | `['P',number]` | a double used to order records relative to each other. |
| `Image` | | `['I',string]` | Binary data representing an image, encoded as base64 |
| `List` | | `['l',values,...]` | List of values of any type. |
| `JSON` | | `['J',object]` | JSON-serializable object |
| `Error` | | `['E',string,string?,value?]` | Exception, with first argument exception type, second an optional message, and optionally a third containing additional info. |
An important goal is to represent data efficiently in the common case. When a value matches the column's type, the short representation is used. For example, in a Numeric column, a Numeric value is represented as a `number`, and in a Date column, a Date value is represented as a `number`.
If a value does not match the column's type, then the short representation is used when it's one of the starred types in the table AND the short type is different from the column's short type.
For example:
- In a Numeric column, Numeric is `number`, Text is `string` (being a starred type), but a Date is `['d',number]`.
- In a Date column, Date is `number`, and Numeric value is `['n',number]`, because even though it's starred, it conflicts with Date's own short type.
- In a Text column, Text is `string`, Numeric is `number` (starred), and Date is `['d',number]` (not starred).
Note how for the common case of a value matching the column's type, we can always use the short representation. But the format still allows values to have an explicit type that's different from the specified one.
Note also that columns of any of the starred types use the same interpretation for contained values.
The primary use case is to allow, for example, storing a value like "N/A" or "TBD" or "Ask Bob" in a column of type Numeric or Date. Another important case is to store errors produced by a computation.
Other complex types may be added in the future.
## Column Types
Any of the types listed in the table above may be specified as a column type.
In addition, a column type may specify type `Any`. For the purpose of type interpretations, it works the same as any of the starred types, but it does not convey anything about the expected type of value for the column.
## Other serializations
Grist Data Format is naturally serialized to JSON, which is fast and convenient to use in Javascript code. It is also possible to serialize it in other ways, e.g. as a Google protobuf.
Here is a `.proto` definition file that allows for efficient protobuf representation of data in Grist Data Format.
```proto
message Document {
repeated Table tables = 1;
}
message Table {
string name = 1;
repeated ColInfo colinfo = 2;
repeated ColData columns = 3;
}
message ColInfo {
string name = 1;
string type = 2;
string options = 3;
}
message ColData {
repeated Value value = 1;
}
message Value {
oneof value {
double vNumeric = 1;
string vText = 2;
bool vBool = 3;
// Absence of a set field represents a null
int32 vInt = 5;
double vDate = 6;
double vDateTime = 7;
int32 vReference = 8;
List vReferenceList = 9;
string vChoice = 10;
double vPositionNumber = 11;
bytes vImage = 12;
List vList = 13;
string vJSON = 14;
List vError = 15;
}
}
message ValueList {
repeated Value value = 1;
}
```

@ -0,0 +1,42 @@
# Migrations
If you change Grist schema, i.e. the schema of the Grist metadata tables (in `sandbox/grist/schema.py`), you'll have to increment the `SCHEMA_VERSION` (on top of that file) and create a migration. A migration is a set of actions that would get applied to a document at the previous version, to make it satisfy the new schema.
To add a migration, add a function to `sandbox/grist/migrations.py`, of this form (using the new version number):
```lang=python
@migration(schema_version=11)
def migration11(tdset):
return tdset.apply_doc_actions([
add_column('_grist_Views_section', 'embedId', 'Text'),
])
```
Some migrations need to actually add or modify the data in a document. You can look at other migrations in that file for examples.
If you are doing anything other than adding a column or a table, you must read this document to the end.
## Philosophy of migrations
Migrations are tricky. Normally, we think about the software we are writing, but migrations work with documents that were created by an older version of the software, which may not have the logic you think our software has, and MAY have logic that the current version knows nothing about.
This is why migrations code uses its own "dumb" implementation for loading and examining data (see `sandbox/grist/table_data_set.py`), because trying to load an older document using our primary code base will usually fail, since the document will not satisfy our current assumptions.
## Restrictions
The rules below should make it at least barely possible to share documents by people who are not all on the same Grist version (even so, it will require more work). It should also make it somewhat safe to upgrade and then open the document with a previous version.
WARNING: Do not remove, modify, or rename metadata tables or columns.
Mark old columns and tables as deprecated using a comment. We may want to add a feature to mark them in code, to prevent their use in new versions. For now, it's enough to add a comment and remove references to the deprecated entities throughout code. An important goal is to prevent adding same-named entities in the future, or reusing the same column with a different meaning. So please add a comment of the form:
```lang=python
# <columnName> is deprecated as of version XX. Do not remove or reuse.
```
To justify keeping old columns around, consider what would happen if A (at version 10) communicates with B (at version 11). If column "foo" exists in v10, and is deleted in v11, then A may send actions that refer to "foo", and B would consider them invalid, since B's code has no idea what "foo" is. The solution is that B needs to still know about "foo", hence we don't remove old columns.
Similar justification applies to renaming columns, or modifying them (e.g. changing a type).
WARNING: If you change the meaning or type of a column, you have to create a new column with a new name.
You'll also need to write a migration to fill it from the old column, and would mark the old column as deprecated.

@ -0,0 +1,53 @@
Document URLs
-----------------
Status: WIP
Options
* An id (e.g. google)
* Several ids (e.g. airtable)
* A text name
* Several text names (e.g. github)
* An id and friendly name (e.g. dropbox)
Leaning towards an id and friendly name. Only id is interpreted by router. Name is checked only to make sure it matches current name of document. If not, we redirect to revised url before proceeding.
Length of ids depends on whether we'll be using them for obscurity to enable anyone-who-has-link-can-view style security.
Possible URLs
---------------
* docs.getgrist.com/viwpHfmtMHmKBUSyh/Document+Name
* orgname.getgrist.com/viwpHfmtMHmKBUSyh/Document+Name
* getgrist.com/d/viwpHfmtMHmKBUSyh/Document+Name
* getgrist.com/d/tblWVZDtvlsIFsuOR/viwpHfmtMHmKBUSyh/Document+Name
* getgrist.com/d/dd5bf494e709246c7601e27722e3aee656b900082c3f5f1598ae1475c35c2c4b/Document+Name
* getgrist.com/doc/fTSIMrZT3fDTvW7XDBq1b7nhWa24Zl55EVpsaO3TBBE/Document%20Name
Organization subdomains
------------------------------
Organizations get to choose a subdomain, and will access their workspaces and documents at `orgname.getgrist.com`. In addition, personal workspaces need to be uniquely determined by a URL, using `docs-` followed by the numeric id of the "personal organization":
* docs-1234.getgrist.com/
* docs.getgrist.com/o/docs-1234/
Since subdomains need to play along with all the other subdomains we use for getgrist.com, the following is a list of names that may NOT be used by any organization:
* `docs-\d+` to identify personal workspaces
* Anything that starts with underscore (`_`) (this includes special subdomains like `_domainkey`)
* Subdomains used by us for various purposes. As of 2018-10-09, these include:
* aws
* gristlogin
* issues
* metrics
* phab
* releases
* test
* vpn
* www
Some more reserved subdomains:
* doc-worker-NN
* v1-* (this could be released eventually, but currently in our code and/or routing "v1-mock", "v1-docs", "v1-static", and any other "v1-*" are special
* docs
* api

@ -1,6 +1,6 @@
{
"name": "grist-core",
"version": "1.1.11",
"version": "1.1.12",
"license": "Apache-2.0",
"description": "Grist is the evolution of spreadsheets",
"homepage": "https://github.com/gristlabs/grist-core",

@ -4,7 +4,7 @@ create tables, add and remove columns, etc, Grist stores various document metada
users' tables, views, etc.) also in tables.
Before changing this file, please review:
https://phab.getgrist.com/w/migrations/
/documentation/migrations.md
"""

@ -0,0 +1,15 @@
<!doctype html>
<html>
<head>
<meta charset="utf8">
<!-- INSERT BASE -->
<link rel="icon" type="image/x-icon" href="icons/favicon.png" />
<link rel="stylesheet" href="icons/icons.css">
<!-- INSERT LOCALE -->
<!-- INSERT CONFIG -->
<title>Loading...<!-- INSERT TITLE SUFFIX --></title>
</head>
<body>
<script crossorigin="anonymous" src="boot.bundle.js"></script>
</body>
</html>

@ -547,7 +547,9 @@
"You do not have write access to this site": "You do not have write access to this site",
"Download full document and history": "Download full document and history",
"Remove all data but keep the structure to use as a template": "Remove all data but keep the structure to use as a template",
"Remove document history (can significantly reduce file size)": "Remove document history (can significantly reduce file size)"
"Remove document history (can significantly reduce file size)": "Remove document history (can significantly reduce file size)",
"Download": "Download",
"Download document": "Download document"
},
"NotifyUI": {
"Ask for help": "Ask for help",
@ -1375,5 +1377,15 @@
"FormSuccessPage": {
"Form Submitted": "Form Submitted",
"Thank you! Your response has been recorded.": "Thank you! Your response has been recorded."
},
"DateRangeOptions": {
"Last 30 days": "Last 30 days",
"Last 7 days": "Last 7 days",
"Last Week": "Last Week",
"Next 7 days": "Next 7 days",
"This month": "This month",
"This week": "This week",
"This year": "This year",
"Today": "Today"
}
}

@ -39,10 +39,10 @@
"View As": "Voir en tant que",
"Remove column {{- colId }} from {{- tableId }} rules": "Supprimer la colonne {{-colId}} des règles de la table {{-tableId}}",
"Seed rules": "Règles par défaut",
"When adding table rules, automatically add a rule to grant OWNER full access.": "Ajouter automatiquement une règle donnant tous les droits au groupe OWNER.",
"When adding table rules, automatically add a rule to grant OWNER full access.": "Pour chaque ajout de règle pour une table, ajouter automatiquement une règle donnant tous les droits au groupe OWNER.",
"Permission to edit document structure": "Droits d'édition de la structure",
"This default should be changed if editors' access is to be limited. ": "Cette valeur par défaut doit être modifiée si l'on souhaite limiter l'accès des éditeurs. ",
"Allow editors to edit structure (e.g. modify and delete tables, columns, layouts), and to write formulas, which give access to all data regardless of read restrictions.": "Autorise les éditeurs à éditer la structure (modifier/supprimer des tables, colonnes, mises en page...) et à écrire des formules, ce qui donne accès à l'ensemble des données sans prendre en compte d'éventuelles restrictions de droits de lecture.",
"Allow editors to edit structure (e.g. modify and delete tables, columns, layouts), and to write formulas, which give access to all data regardless of read restrictions.": "Autorise les éditeurs à éditer la structure (modifier/supprimer des tables, colonnes, mises en page) et à écrire des formules, ce qui donne accès à l'ensemble des données sans prendre en compte d'éventuelles restrictions de droits de lecture.",
"Add Table-wide Rule": "Ajouter une règle pour l'ensemble du tableau"
},
"AccountPage": {
@ -52,15 +52,15 @@
"Email": "E-mail",
"Name": "Nom",
"Save": "Enregistrer",
"Password & Security": "Mot de passe & Sécurité",
"Password & Security": "Mot de passe et sécurité",
"Login Method": "Mode de connexion",
"Change Password": "Modifier le mot de passe",
"Allow signing in to this account with Google": "Autoriser la connexion à ce compte avec Google",
"Two-factor authentication": "Authentification à deux facteurs",
"Two-factor authentication is an extra layer of security for your Grist account designed to ensure that you're the only person who can access your account, even if someone knows your password.": "L'authentification à double facteur est une couche additionnelle de sécurité pour votre compte Grist qui permet de s'assurer que vous êtes la seule personne qui peut accéder à votre compte, même si quelqu'un d'autre connaît votre mot de passe.",
"Two-factor authentication is an extra layer of security for your Grist account designed to ensure that you're the only person who can access your account, even if someone knows your password.": "L'authentification à double facteur est une couche supplémentaire de sécurité pour votre compte Grist qui permet de s'assurer que vous êtes la seule personne qui peut accéder à votre compte, même si quelqu'un d'autre connaît votre mot de passe.",
"Theme": "Thème",
"API Key": "Clé dAPI",
"Names only allow letters, numbers and certain special characters": "Les noms d'utilisateurs ne doivent contenir que des lettres, des chiffres, et certains caractères spéciaux",
"Names only allow letters, numbers and certain special characters": "Les noms d'utilisateurs ne doivent contenir que des lettres, des chiffres et certains caractères spéciaux",
"Language": "Langue"
},
"AccountWidget": {
@ -415,7 +415,7 @@
"Hidden Columns": "Colonnes cachées",
"Lookups": "Champ rapporté",
"No reference columns.": "Pas de colonne de référence.",
"Apply on record changes": "Appliquer lors de modification de la ligne",
"Apply on record changes": "Appliquer lors des modifications de l'enregistrement",
"Duplicate in {{- label}}": "Duplica dans {{-label}}",
"Created By": "Créé(e) par",
"Last Updated At": "Dernière mise à jour le",
@ -544,7 +544,9 @@
"You do not have write access to the selected workspace": "Vous navez pas accès en écriture à ce dossier",
"Remove all data but keep the structure to use as a template": "Supprimer toutes les données mais garder la structure comme modèle",
"Remove document history (can significantly reduce file size)": "Supprimer l'historique du document (peut réduire sensiblement la taille du fichier)",
"Download full document and history": "Télécharger le document complet et l'historique"
"Download full document and history": "Télécharger le document complet et l'historique",
"Download": "Télécharger",
"Download document": "Télécharger le document"
},
"NotifyUI": {
"Upgrade Plan": "Améliorer votre abonnement",
@ -629,7 +631,7 @@
"Detach": "Détacher",
"SELECT BY": "SÉLECTIONNER PAR",
"Select Widget": "Choisir la vue",
"SELECTOR FOR": "SÉLECTEUR",
"SELECTOR FOR": "SÉLECTEUR POUR",
"Save": "Enregistrer",
"You do not have edit access to this document": "Vous navez pas accès en écriture à ce document",
"Add referenced columns": "Ajouter une colonne référencée",
@ -644,11 +646,11 @@
"Hidden field": "Champ caché",
"Layout": "Mise en page",
"Submission": "Soumission",
"Submit button label": "Libellé du bouton de soumission",
"Submit button label": "Libellé du bouton de validation",
"Success text": "Message de succès",
"Table column name": "Nom de la colonne",
"Enter redirect URL": "Saisir l'URL de redirection",
"Reset form": "Restaurer le formulaire",
"Reset form": "Réinitialiser le formulaire",
"Submit another response": "Soumettre une autre réponse",
"Required field": "Champ obligatoire"
},
@ -894,7 +896,7 @@
"Save": "Enregistrer",
"Cancel": "Annuler",
"Ok": "OK",
"Don't show tips": "Ne pas montrer les astuces",
"Don't show tips": "Masquer les astuces",
"Undo to restore": "Annuler et rétablir",
"Don't show again": "Ne plus montrer",
"Delete": "Supprimer",
@ -1314,7 +1316,7 @@
"Maybe Later": "Peut-être plus tard",
"free coaching call": "appel d'assistance gratuit",
"Schedule Call": "Planifier l'appel",
"On the call, we'll take the time to understand your needs and tailor the call to you. We can show you the Grist basics, or start working with your data right away to build the dashboards you need.": "Lors de l'appel, nous prendrons le temps de comprendre vos besoins et d'adapter l'appel à vos besoins. Nous pouvons vous montrer les bases de Grist, ou commencer à travailler avec vos données immédiatement pour construire les tableaux de bord dont vous avez besoin.",
"On the call, we'll take the time to understand your needs and tailor the call to you. We can show you the Grist basics, or start working with your data right away to build the dashboards you need.": "Lors de l'appel, nous prendrons le temps de comprendre vos besoins et d'adapter l'appel à ces derniers. Nous pouvons vous montrer les bases de Grist, ou commencer tout de suite à travailler avec vos données pour construire les tableaux de bord dont vous avez besoin.",
"Schedule your {{freeCoachingCall}} with a member of our team.": "Planifiez votre {{freeCoachingCall}} avec un membre de notre équipe."
},
"FormView": {
@ -1340,10 +1342,10 @@
"Header": "Titre"
},
"UnmappedFieldsConfig": {
"Mapped": "Utilisés",
"Mapped": "Utilisé",
"Select All": "Tout sélectionner",
"Unmap fields": "Champs non utilisés",
"Unmapped": "Non utilisés",
"Unmapped": "Non utilisé",
"Clear": "Effacer",
"Map fields": "Champs utilisés"
},
@ -1355,8 +1357,8 @@
"Delete": "Supprimer"
},
"CustomView": {
"Some required columns aren't mapped": "Certaines colonnes obligatoires ne sont pas mappées",
"To use this widget, please map all non-optional columns from the creator panel on the right.": "Pour utiliser cette vue, mappez toutes les colonnes non optionnelles à partir du panneau du créateur sur la droite."
"Some required columns aren't mapped": "Certaines colonnes obligatoires ne sont pas utilisées",
"To use this widget, please map all non-optional columns from the creator panel on the right.": "Pour utiliser cette vue, utilisez toutes les colonnes obligatoires à partir du panneau du créateur sur la droite."
},
"FormContainer": {
"Build your own form": "Créez votre propre formulaire",

@ -103,16 +103,39 @@
"Reset {{count}} entire columns_one": "列全体をリセット",
"Convert formula to data": "数式をデータに変換する",
"Freeze {{count}} more columns_other": "さらに {{count}} 列固定する",
"Hide {{count}} columns_one": "列非表示",
"Hide {{count}} columns_one": "列非表示",
"Insert column to the left": "左側に列を挿入",
"Sorted (#{{count}})_other": "ソート (#{{count}})"
"Sorted (#{{count}})_other": "ソート (#{{count}})",
"Attachment": "添付ファイル",
"Add column": "列を追加",
"Adding UUID column": "UUID列を追加",
"Detect Duplicates in...": "重複を検出...",
"UUID": "UUID",
"Add column with type": "型を指定して列を追加",
"Add formula column": "数式列を追加",
"Apply to new records": "新しいレコードに適用",
"Apply on record changes": "レコードの変更に適用",
"Authorship": "作成者名",
"Timestamp": "タイムスタンプ",
"Detect duplicates in...": "重複を検出...",
"Numeric": "数値",
"Text": "テキスト",
"Integer": "整数",
"Toggle": "トグル",
"Date": "日付",
"DateTime": "日時",
"Choice": "選択",
"Choice List": "複数選択",
"Lookups": "Lookups",
"Shortcuts": "ショートカット",
"Show hidden columns": "非表示列を再表示"
},
"DocMenu": {
"This service is not available right now": "このサービスは現在ご利用いただけません",
"Workspace not found": "ワークスペースが見つかりません",
"Discover More Templates": "その他のテンプレート",
"Current workspace": "現在のワークスペース",
"Edited {{at}}": "{{at}}による編集",
"Edited {{at}}": "{{at}}に更新",
"Pin Document": "ドキュメントをピン留めする",
"Remove": "削除",
"By Date Modified": "変更日",
@ -175,7 +198,14 @@
"Add referenced columns": "参照列の追加",
"TRANSFORM": "変換",
"Sort & Filter": "ソート&フィルター",
"Widget": "ウィジェット"
"Widget": "ウィジェット",
"Submit button label": "送信ボタンのラベル",
"Submit another response": "別の回答を送信",
"Display button": "ボタンを表示",
"Redirect automatically after submission": "送信後に自動的にリダイレクトする",
"Redirection": "リダイレクト",
"Required field": "必須フィールド",
"Enter redirect URL": "リダイレクト先URL"
},
"FloatingPopup": {
"Maximize": "最大化",
@ -374,9 +404,10 @@
"Rules for table ": "テーブルのルール ",
"Checking...": "チェック中…",
"Special Rules": "特別ルール",
"View As": "として表示",
"View As": "役割で表示",
"Seed rules": "シード・ルール",
"Allow editors to edit structure (e.g. modify and delete tables, columns, layouts), and to write formulas, which give access to all data regardless of read restrictions.": "編集者による構造の編集(例:テーブル、列、レイアウトの変更や削除)、および数式の書き込みを許可し、読み取り制限に関係なくすべてのデータにアクセスできるようにする。"
"Allow editors to edit structure (e.g. modify and delete tables, columns, layouts), and to write formulas, which give access to all data regardless of read restrictions.": "編集者による構造の編集(例:テーブル、列、レイアウトの変更や削除)、および数式の書き込みを許可し、読み取り制限に関係なくすべてのデータにアクセスできるようにする。",
"Add Table-wide Rule": "テーブル全体のルールを追加"
},
"FieldEditor": {
"It should be impossible to save a plain data value into a formula column": "単純なデータ値を数式列に保存することは不可能なはずです",
@ -523,7 +554,7 @@
"Code View": "コードビュー",
"Return to viewing as yourself": "自分自身のビューに戻る",
"Raw Data": "生データ",
"Document History": "ドキュメント履歴"
"Document History": "ドキュメント履歴"
},
"menus": {
"Reference List": "参照リスト",
@ -548,7 +579,7 @@
"You do not have edit access to this document": "このドキュメントの編集権限がありません",
"Add Widget to Page": "ページにウィジェットを追加する",
"Add Page": "ページを追加",
"Document owners can attempt to recover the document. [{{error}}]": "ドキュメントの所有者は、ドキュメントの回復を試みることができます。[{{error}}]",
"Document owners can attempt to recover the document. [{{error}}]": "ドキュメントのオーナーは、ドキュメントの回復を試みることができます。[{{error}}]",
"Reload": "再読み込み",
"Error accessing document": "ドキュメントへのアクセスエラー",
"Enter recovery mode": "回復モードに入る"
@ -722,13 +753,13 @@
"Save Copy": "コピーを保存"
},
"UserManagerModel": {
"View & Edit": "表示と編集",
"View & Edit": "閲覧と編集",
"Owner": "オーナー",
"None": "なし",
"View Only": "閲覧のみ",
"No Default Access": "デフォルトアクセスなし",
"Viewer": "ビューア",
"Editor": "エディター"
"Viewer": "閲覧者",
"Editor": "編集者"
},
"DocumentUsage": {
"Data Size": "データサイズ",
@ -899,7 +930,7 @@
"Close": "閉じる",
"Cancel": "キャンセル",
"Apply on changes to:": "変更を適用する:",
"Apply to new records": "新しいレコードに適用する",
"Apply to new records": "新しいレコードに適用",
"OK": "OK",
"Current field ": "現在のフィールド ",
"Any field": "任意のフィールド",
@ -913,7 +944,13 @@
"Click the Add New button to create new documents or workspaces, or import data.": "「新規追加」ボタンをクリックして、新しいドキュメントまたはワークスペースを作成するか、データをインポートします。",
"Apply conditional formatting to rows based on formulas.": "数式に基づいて条件付き書式を行に適用します。",
"Click on “Open row styles” to apply conditional formatting to rows.": "行に条件付き書式を適用するには、「行書式を開く」をクリックする。",
"Cells in a reference column always identify an {{entire}} record in that table, but you may select which column from that record to show.": "参照列のセルは常にそのテーブル内の {{entire}} レコードを識別しますが、そのレコードからどの列を表示するかを選択することもできます。"
"Cells in a reference column always identify an {{entire}} record in that table, but you may select which column from that record to show.": "参照列のセルは常にそのテーブル内の {{entire}} レコードを識別しますが、そのレコードからどの列を表示するかを選択することもできます。",
"Formulas support many Excel functions, full Python syntax, and include a helpful AI Assistant.": "数式は多くの Excel 関数、完全な Python 構文をサポートし、便利な AI アシスタントが含まれています。",
"A UUID is a randomly-generated string that is useful for unique identifiers and link keys.": "UUIDはランダムに生成される文字列で、一意の識別子やキーとして役立ちます。",
"The total size of all data in this document, excluding attachments.": "添付ファイルを除く、このドキュメント内のすべてのデータの合計サイズ。",
"Lookups return data from related tables.": "Lookupは関連テーブルからデータを返します。",
"Reference columns are the key to {{relational}} data in Grist.": "参照列は、Grist の {{relational}} データへのキーです。",
"These rules are applied after all column rules have been processed, if applicable.": "これらのルールは、列ルールの処理が終わった後に適用されます。"
},
"AppHeader": {
"Personal Site": "個人サイト",
@ -928,7 +965,13 @@
"You do not have edit access to this document": "このドキュメントへのアクセス権がありません",
"Delete {{formattedTableName}} data, and remove it from all pages?": "{{formattedTableName}} データを削除し、すべてのページから削除しますか?",
"Click to copy": "クリックしてコピー",
"Table ID copied to clipboard": "クリップボードにテーブルIDをコピーしました"
"Table ID copied to clipboard": "クリップボードにテーブルIDをコピーしました",
"Edit Record Card": "レコードカードを編集",
"Record Card": "レコードカード",
"Record Card Disabled": "レコードカードを無効化",
"Remove Table": "テーブルを削除",
"Rename Table": "テーブル名を編集",
"{{action}} Record Card": "レコードカードを{{action}}"
},
"NTextBox": {
"false": "false",
@ -1078,5 +1121,17 @@
},
"sendToDrive": {
"Sending file to Google Drive": "Googleドライブにファイルを送信する"
},
"CardContextMenu": {
"Insert card": "カードを挿入",
"Duplicate card": "カードを複製",
"Delete card": "カードを削除"
},
"FormView": {
"Publish your form?": "このフォームを公開しますか?",
"Publish": "公開"
},
"Menu": {
"Paragraph": "段落"
}
}

@ -36,7 +36,8 @@
"Permission to edit document structure": "Разрешение на редактирование структуры документа",
"When adding table rules, automatically add a rule to grant OWNER full access.": "При добавлении правил таблицы, автоматически добавить правило для предоставления ВЛАДЕЛЬЦУ полного доступа.",
"This default should be changed if editors' access is to be limited. ": "Это значение по умолчанию следует изменить, если требуется ограничить доступ редакторов. ",
"Allow editors to edit structure (e.g. modify and delete tables, columns, layouts), and to write formulas, which give access to all data regardless of read restrictions.": "Позволяет редакторам редактировать структуру (например, изменять и удалять таблицы, столбцы, макеты) и писать формулы, которые предоставляют доступ ко всем данным независимо от ограничений на чтение."
"Allow editors to edit structure (e.g. modify and delete tables, columns, layouts), and to write formulas, which give access to all data regardless of read restrictions.": "Позволяет редакторам редактировать структуру (например, изменять и удалять таблицы, столбцы, макеты) и писать формулы, которые предоставляют доступ ко всем данным независимо от ограничений на чтение.",
"Add Table-wide Rule": "Добавить обще-табличное правило"
},
"ACUserManager": {
"Enter email address": "Введите адрес электронной почты",
@ -1119,7 +1120,11 @@
"Lookups return data from related tables.": "Lookups возвращают данные из связанных таблиц.",
"Use reference columns to relate data in different tables.": "Используйте ссылочные столбцы для сопоставления данных в разных таблицах.",
"You can choose from widgets available to you in the dropdown, or embed your own by providing its full URL.": "Вы можете выбрать виджеты, доступные вам в раскрывающемся списке, или встроить свой собственный, указав его полный URL-адрес.",
"Formulas support many Excel functions, full Python syntax, and include a helpful AI Assistant.": "Формулы поддерживают множество функций Excel, полный синтаксис Python и включает полезного помощника AI."
"Formulas support many Excel functions, full Python syntax, and include a helpful AI Assistant.": "Формулы поддерживают множество функций Excel, полный синтаксис Python и включает полезного помощника AI.",
"Build simple forms right in Grist and share in a click with our new widget. {{learnMoreButton}}": "Создавайте простые формы прямо в Grist и делитесь ими одним щелчком мыши с помощью нашего нового виджета.. {{learnMoreButton}}",
"Forms are here!": "Формы уже здесь!",
"Learn more": "Узнать больше",
"These rules are applied after all column rules have been processed, if applicable.": "Эти правила применяются после обработки всех правил столбцов, если это применимо."
},
"DescriptionConfig": {
"DESCRIPTION": "ОПИСАНИЕ"
@ -1342,5 +1347,33 @@
},
"Editor": {
"Delete": "Удалить"
},
"FormConfig": {
"Field rules": "Правила полей",
"Required field": "Обязательное поле"
},
"CustomView": {
"Some required columns aren't mapped": "Некоторые обязательные столбцы не сопоставлены",
"To use this widget, please map all non-optional columns from the creator panel on the right.": "Чтобы использовать этот виджет, сопоставьте все необязательные столбцы с панели создателя справа."
},
"FormContainer": {
"Build your own form": "Создайте свою собственную форму",
"Powered by": "Разработано"
},
"FormModel": {
"There was a problem loading the form.": "Возникла проблема с загрузкой формы.",
"You don't have access to this form.": "У вас нет доступа к этой форме.",
"Oops! The form you're looking for doesn't exist.": "Ой! Форма, которую вы ищете, не существует.",
"Oops! This form is no longer published.": "Ой! Эта форма больше не публикуется."
},
"FormErrorPage": {
"Error": "Ошибка"
},
"FormPage": {
"There was an error submitting your form. Please try again.": "При отправке формы произошла ошибка. Пожалуйста, попробуйте еще раз."
},
"FormSuccessPage": {
"Form Submitted": "Форма отправлена",
"Thank you! Your response has been recorded.": "Спасибо! Ваш ответ учтен."
}
}

@ -644,7 +644,9 @@
"However, it appears to be already identical.": "Vendar se zdi, da je že identična.",
"Update Original": "Posodobitev izvirnika",
"You do not have write access to this site": "Nimate dovoljenja za pisanje za to spletno mesto",
"Download full document and history": "Prenesite celoten dokument in zgodovino"
"Download full document and history": "Prenesite celoten dokument in zgodovino",
"Download": "Prenesi",
"Download document": "Prenesi dokument"
},
"SortConfig": {
"Add Column": "Dodaj stolpec",

@ -0,0 +1,52 @@
import {assert, driver} from 'mocha-webdriver';
import * as gu from 'test/nbrowser/gristUtils';
import {server, setupTestSuite} from 'test/nbrowser/testUtils';
import * as testUtils from 'test/server/testUtils';
describe('Boot', function() {
this.timeout(30000);
setupTestSuite();
let oldEnv: testUtils.EnvironmentSnapshot;
afterEach(() => gu.checkForErrors());
async function hasPrompt() {
assert.include(
await driver.findContentWait('p', /diagnostics page/, 2000).getText(),
'A diagnostics page can be made available');
}
it('gives prompt about how to enable boot page', async function() {
await driver.get(`${server.getHost()}/boot`);
await hasPrompt();
});
describe('with a GRIST_BOOT_KEY', function() {
before(async function() {
oldEnv = new testUtils.EnvironmentSnapshot();
process.env.GRIST_BOOT_KEY = 'lala';
await server.restart();
});
after(async function() {
oldEnv.restore();
await server.restart();
});
it('gives prompt when key is missing', async function() {
await driver.get(`${server.getHost()}/boot`);
await hasPrompt();
});
it('gives prompt when key is wrong', async function() {
await driver.get(`${server.getHost()}/boot/bilbo`);
await hasPrompt();
});
it('gives page when key is right', async function() {
await driver.get(`${server.getHost()}/boot/lala`);
await driver.findContentWait('h2', /Grist is reachable/, 2000);
});
});
});

@ -25,7 +25,7 @@ import {createClient, RedisClient} from 'redis';
import * as sinon from 'sinon';
import {createInitialDb, removeConnection, setUpDB} from 'test/gen-server/seed';
import {createTmpDir, getGlobalPluginManager} from 'test/server/docTools';
import {setTmpLogLevel, useFixtureDoc} from 'test/server/testUtils';
import {EnvironmentSnapshot, setTmpLogLevel, useFixtureDoc} from 'test/server/testUtils';
import {waitForIt} from 'test/server/wait';
import uuidv4 from "uuid/v4";
@ -273,6 +273,17 @@ class TestStore {
private _externalStorageCreate: (purpose: 'doc'|'meta', extraPrefix: string) => ExternalStorage|undefined) {
}
public async run<T>(fn: () => Promise<T>): Promise<T> {
await this.begin();
let result;
try {
result = await fn();
} finally {
await this.end();
}
return result;
}
// Simulates doc worker startup.
public async begin() {
await this.end();
@ -366,6 +377,7 @@ describe('HostedStorageManager', function() {
describe(storage, function() {
const sandbox = sinon.createSandbox();
let oldEnv: EnvironmentSnapshot;
const workerId = 'dw17';
let cli: RedisClient;
@ -376,6 +388,7 @@ describe('HostedStorageManager', function() {
before(async function() {
if (!process.env.TEST_REDIS_URL) { this.skip(); return; }
cli = createClient(process.env.TEST_REDIS_URL);
oldEnv = new EnvironmentSnapshot();
await cli.flushdbAsync();
workers = new DocWorkerMap([cli]);
await workers.addWorker({
@ -439,6 +452,7 @@ describe('HostedStorageManager', function() {
});
afterEach(async function() {
oldEnv.restore();
sandbox.restore();
if (store) {
await store.end();
@ -468,94 +482,105 @@ describe('HostedStorageManager', function() {
assert.equal(await getRedisChecksum(docId), 'null');
// Create an empty document when checksum in redis is 'null'.
await store.begin();
await store.docManager.fetchDoc(docSession, docId);
assert(await store.waitForUpdates());
const checksum = await getRedisChecksum(docId);
assert.notEqual(checksum, 'null');
await store.end();
// Check if we nobble the expected checksum then fetch eventually errors.
const checksum = await store.run(async () => {
await store.docManager.fetchDoc(docSession, docId);
assert(await store.waitForUpdates());
const checksum = await getRedisChecksum(docId);
assert.notEqual(checksum, 'null');
return checksum;
});
// Check what happens when we nobble the expected checksum.
await setRedisChecksum(docId, 'nobble');
await store.removeAll();
await store.begin();
await assert.isRejected(store.docManager.fetchDoc(docSession, docId),
/operation failed to become consistent/);
await store.end();
// With GRIST_SKIP_REDIS_CHECKSUM_MISMATCH set, the fetch should work
process.env.GRIST_SKIP_REDIS_CHECKSUM_MISMATCH = 'true';
await store.run(async () => {
await assert.isFulfilled(store.docManager.fetchDoc(docSession, docId));
});
// By default, the fetch should eventually errors.
delete process.env.GRIST_SKIP_REDIS_CHECKSUM_MISMATCH;
await store.run(async () => {
await assert.isRejected(store.docManager.fetchDoc(docSession, docId),
/operation failed to become consistent/);
});
// Check we get the document back on fresh start if checksum is correct.
await setRedisChecksum(docId, checksum);
await store.removeAll();
await store.begin();
await store.docManager.fetchDoc(docSession, docId);
await store.end();
await store.run(async () => {
await store.docManager.fetchDoc(docSession, docId);
});
});
it('can save modifications', async function() {
await store.begin();
await store.run(async () => {
await workers.assignDocWorker('Hello');
await useFixtureDoc('Hello.grist', store.storageManager);
await workers.assignDocWorker('Hello');
await useFixtureDoc('Hello.grist', store.storageManager);
await workers.assignDocWorker('Hello2');
await workers.assignDocWorker('Hello2');
let doc = await store.docManager.fetchDoc(docSession, 'Hello');
let doc2 = await store.docManager.fetchDoc(docSession, 'Hello2');
await doc.docStorage.exec("update Table1 set A = 'magic_word' where id = 1");
await doc2.docStorage.exec("insert into Table1(id) values(42)");
await store.end();
const doc = await store.docManager.fetchDoc(docSession, 'Hello');
const doc2 = await store.docManager.fetchDoc(docSession, 'Hello2');
await doc.docStorage.exec("update Table1 set A = 'magic_word' where id = 1");
await doc2.docStorage.exec("insert into Table1(id) values(42)");
return { doc, doc2 };
});
await store.removeAll();
await store.begin();
doc = await store.docManager.fetchDoc(docSession, 'Hello');
let result = await doc.docStorage.get("select A from Table1 where id = 1");
assert.equal(result!.A, 'magic_word');
doc2 = await store.docManager.fetchDoc(docSession, 'Hello2');
result = await doc2.docStorage.get("select id from Table1");
assert.equal(result!.id, 42);
await store.end();
await store.run(async () => {
const doc = await store.docManager.fetchDoc(docSession, 'Hello');
let result = await doc.docStorage.get("select A from Table1 where id = 1");
assert.equal(result!.A, 'magic_word');
const doc2 = await store.docManager.fetchDoc(docSession, 'Hello2');
result = await doc2.docStorage.get("select id from Table1");
assert.equal(result!.id, 42);
});
});
it('can save modifications with interfering backup file', async function() {
await store.begin();
await store.run(async () => {
// There was a bug where if a corrupt/truncated backup file was created, all future
// backups would fail. This tickles the condition and makes sure backups now succeed.
await fse.writeFile(path.join(tmpDir, 'Hello.grist-backup'), 'not a sqlite file');
// There was a bug where if a corrupt/truncated backup file was created, all future
// backups would fail. This tickles the condition and makes sure backups now succeed.
await fse.writeFile(path.join(tmpDir, 'Hello.grist-backup'), 'not a sqlite file');
await workers.assignDocWorker('Hello');
await useFixtureDoc('Hello.grist', store.storageManager);
await workers.assignDocWorker('Hello');
await useFixtureDoc('Hello.grist', store.storageManager);
const doc = await store.docManager.fetchDoc(docSession, 'Hello');
await doc.docStorage.exec("update Table1 set A = 'magic_word2' where id = 1");
});
let doc = await store.docManager.fetchDoc(docSession, 'Hello');
await doc.docStorage.exec("update Table1 set A = 'magic_word2' where id = 1");
await store.end(); // S3 push will happen prior to this returning.
// S3 should have happened after store.run()
await store.removeAll();
await store.begin();
doc = await store.docManager.fetchDoc(docSession, 'Hello');
const result = await doc.docStorage.get("select A from Table1 where id = 1");
assert.equal(result!.A, 'magic_word2');
await store.end();
await store.run(async () => {
const doc = await store.docManager.fetchDoc(docSession, 'Hello');
const result = await doc.docStorage.get("select A from Table1 where id = 1");
assert.equal(result!.A, 'magic_word2');
});
});
it('survives if there is a doc marked dirty that turns out to be clean', async function() {
await store.begin();
await workers.assignDocWorker('Hello');
await useFixtureDoc('Hello.grist', store.storageManager);
await store.run(async () => {
await workers.assignDocWorker('Hello');
await useFixtureDoc('Hello.grist', store.storageManager);
let doc = await store.docManager.fetchDoc(docSession, 'Hello');
await doc.docStorage.exec("update Table1 set A = 'magic_word' where id = 1");
await store.end();
const doc = await store.docManager.fetchDoc(docSession, 'Hello');
await doc.docStorage.exec("update Table1 set A = 'magic_word' where id = 1");
});
await store.removeAll();
await store.begin();
doc = await store.docManager.fetchDoc(docSession, 'Hello');
const result = await doc.docStorage.get("select A from Table1 where id = 1");
assert.equal(result!.A, 'magic_word');
store.docManager.markAsChanged(doc);
await store.end();
await store.run(async () => {
const doc = await store.docManager.fetchDoc(docSession, 'Hello');
const result = await doc.docStorage.get("select A from Table1 where id = 1");
assert.equal(result!.A, 'magic_word');
store.docManager.markAsChanged(doc);
});
// The real test is whether this test manages to complete.
});
@ -564,39 +589,39 @@ describe('HostedStorageManager', function() {
await workers.assignDocWorker('Hello');
// put a doc in s3
await store.begin();
await useFixtureDoc('Hello.grist', store.storageManager);
let doc = await store.docManager.fetchDoc(docSession, 'Hello');
await doc.docStorage.exec("update Table1 set A = 'parallel' where id = 1");
await store.end();
await store.run(async () => {
await useFixtureDoc('Hello.grist', store.storageManager);
const doc = await store.docManager.fetchDoc(docSession, 'Hello');
await doc.docStorage.exec("update Table1 set A = 'parallel' where id = 1");
});
// now open it many times in parallel
await store.removeAll();
await store.begin();
const docs = Promise.all([
store.docManager.fetchDoc(docSession, 'Hello'),
store.docManager.fetchDoc(docSession, 'Hello'),
store.docManager.fetchDoc(docSession, 'Hello'),
store.docManager.fetchDoc(docSession, 'Hello'),
]);
await assert.isFulfilled(docs);
doc = (await docs)[0];
const result = await doc.docStorage.get("select A from Table1 where id = 1");
assert.equal(result!.A, 'parallel');
await store.end();
await store.run(async () => {
const docs = Promise.all([
store.docManager.fetchDoc(docSession, 'Hello'),
store.docManager.fetchDoc(docSession, 'Hello'),
store.docManager.fetchDoc(docSession, 'Hello'),
store.docManager.fetchDoc(docSession, 'Hello'),
]);
await assert.isFulfilled(docs);
const doc = (await docs)[0];
const result = await doc.docStorage.get("select A from Table1 where id = 1");
assert.equal(result!.A, 'parallel');
});
// To be sure we are checking something, let's call prepareLocalDoc directly
// on storage manager and make sure it fails.
await store.removeAll();
await store.begin();
const preps = Promise.all([
store.storageManager.prepareLocalDoc('Hello'),
store.storageManager.prepareLocalDoc('Hello'),
store.storageManager.prepareLocalDoc('Hello'),
store.storageManager.prepareLocalDoc('Hello')
]);
await assert.isRejected(preps, /in parallel/);
await store.end();
await store.run(async () => {
const preps = Promise.all([
store.storageManager.prepareLocalDoc('Hello'),
store.storageManager.prepareLocalDoc('Hello'),
store.storageManager.prepareLocalDoc('Hello'),
store.storageManager.prepareLocalDoc('Hello')
]);
await assert.isRejected(preps, /in parallel/);
});
});
it ('can delete a document', async function() {
@ -604,29 +629,29 @@ describe('HostedStorageManager', function() {
await workers.assignDocWorker(docId);
// Create a document
await store.begin();
let doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("insert into Table1(id) values(42)");
await store.end();
await store.run(async () => {
const doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("insert into Table1(id) values(42)");
});
const docPath = store.getDocPath(docId);
const ext = store.storageManager.testGetExternalStorage();
// Check that the document exists on filesystem and in external store.
await store.begin();
doc = await store.docManager.fetchDoc(docSession, docId);
assert.equal(await fse.pathExists(docPath), true);
assert.equal(await fse.pathExists(docPath + '-hash-doc'), true);
await waitForIt(async () => assert.equal(await ext.exists(docId), true), 20000);
await doc.docStorage.exec("insert into Table1(id) values(43)");
// Now delete the document, and check it no longer exists on filesystem or external store.
await store.docManager.deleteDoc(null, docId, true);
assert.equal(await fse.pathExists(docPath), false);
assert.equal(await fse.pathExists(docPath + '-hash-doc'), false);
assert.equal(await getRedisChecksum(docId), DELETED_TOKEN);
await waitForIt(async () => assert.equal(await ext.exists(docId), false), 20000);
await store.end();
await store.run(async () => {
const doc = await store.docManager.fetchDoc(docSession, docId);
assert.equal(await fse.pathExists(docPath), true);
assert.equal(await fse.pathExists(docPath + '-hash-doc'), true);
await waitForIt(async () => assert.equal(await ext.exists(docId), true), 20000);
await doc.docStorage.exec("insert into Table1(id) values(43)");
// Now delete the document, and check it no longer exists on filesystem or external store.
await store.docManager.deleteDoc(null, docId, true);
assert.equal(await fse.pathExists(docPath), false);
assert.equal(await fse.pathExists(docPath + '-hash-doc'), false);
assert.equal(await getRedisChecksum(docId), DELETED_TOKEN);
await waitForIt(async () => assert.equal(await ext.exists(docId), false), 20000);
});
// As far as the underlying storage is concerned it should be
// possible to recreate a doc with the same id after deletion.
@ -634,55 +659,53 @@ describe('HostedStorageManager', function() {
// document it must exist in the db - however we'll need to watch
// out for caching.
// TODO: it could be worth tweaking fetchDoc so creation is explicit.
await store.begin();
doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("insert into Table1(id) values(42)");
await store.end();
await store.begin();
doc = await store.docManager.fetchDoc(docSession, docId);
assert.equal(await fse.pathExists(docPath), true);
assert.equal(await fse.pathExists(docPath + '-hash-doc'), true);
await store.end();
await store.run(async () => {
const doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("insert into Table1(id) values(42)");
});
await store.run(async () => {
await store.docManager.fetchDoc(docSession, docId);
assert.equal(await fse.pathExists(docPath), true);
assert.equal(await fse.pathExists(docPath + '-hash-doc'), true);
});
});
it('individual document close is orderly', async function() {
const docId = `create-${uuidv4()}`;
await workers.assignDocWorker(docId);
await store.begin();
let doc = await store.docManager.fetchDoc(docSession, docId);
await store.closeDoc(doc);
const checksum1 = await getRedisChecksum(docId);
assert.notEqual(checksum1, 'null');
doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("insert into Table1(id) values(42)");
// Add an attachment file with no corresponding metadata. It should be deleted when shutting down.
await doc.docStorage.exec("insert into _gristsys_Files(id, ident) values(23, 'foo')");
let files = await doc.docStorage.all("select * from _gristsys_Files");
assert.isNotEmpty(files);
await store.closeDoc(doc);
const checksum2 = await getRedisChecksum(docId);
assert.notEqual(checksum1, checksum2);
doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("insert into Table1(id) values(43)");
// Attachment file should have been deleted on previous close.
files = await doc.docStorage.all("select * from _gristsys_Files");
assert.isEmpty(files);
const asyncClose = store.closeDoc(doc); // this time, don't explicitly wait for closeDoc.
doc = await store.docManager.fetchDoc(docSession, docId);
const checksum3 = await getRedisChecksum(docId);
assert.notEqual(checksum2, checksum3);
await asyncClose;
await store.end();
await store.run(async () => {
let doc = await store.docManager.fetchDoc(docSession, docId);
await store.closeDoc(doc);
const checksum1 = await getRedisChecksum(docId);
assert.notEqual(checksum1, 'null');
doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("insert into Table1(id) values(42)");
// Add an attachment file with no corresponding metadata. It should be deleted when shutting down.
await doc.docStorage.exec("insert into _gristsys_Files(id, ident) values(23, 'foo')");
let files = await doc.docStorage.all("select * from _gristsys_Files");
assert.isNotEmpty(files);
await store.closeDoc(doc);
const checksum2 = await getRedisChecksum(docId);
assert.notEqual(checksum1, checksum2);
doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("insert into Table1(id) values(43)");
// Attachment file should have been deleted on previous close.
files = await doc.docStorage.all("select * from _gristsys_Files");
assert.isEmpty(files);
const asyncClose = store.closeDoc(doc); // this time, don't explicitly wait for closeDoc.
doc = await store.docManager.fetchDoc(docSession, docId);
const checksum3 = await getRedisChecksum(docId);
assert.notEqual(checksum2, checksum3);
await asyncClose;
});
});
// Viewing a document should not mark it as changed (unless a document-level migration
@ -691,24 +714,22 @@ describe('HostedStorageManager', function() {
const docId = `create-${uuidv4()}`;
await workers.assignDocWorker(docId);
await store.begin();
const markAsChanged: {callCount: number} = store.storageManager.markAsChanged as any;
await store.run(async () => {
const markAsChanged: {callCount: number} = store.storageManager.markAsChanged as any;
const changesInitial = markAsChanged.callCount;
let doc = await store.docManager.fetchDoc(docSession, docId);
await doc.waitForInitialization();
await store.closeDoc(doc);
const changesAfterCreation = markAsChanged.callCount;
assert.isAbove(changesAfterCreation, changesInitial);
doc = await store.docManager.fetchDoc(docSession, docId);
await doc.waitForInitialization();
await store.closeDoc(doc);
const changesAfterViewing = markAsChanged.callCount;
assert.equal(changesAfterViewing, changesAfterCreation);
await store.end();
const changesInitial = markAsChanged.callCount;
let doc = await store.docManager.fetchDoc(docSession, docId);
await doc.waitForInitialization();
await store.closeDoc(doc);
const changesAfterCreation = markAsChanged.callCount;
assert.isAbove(changesAfterCreation, changesInitial);
doc = await store.docManager.fetchDoc(docSession, docId);
await doc.waitForInitialization();
await store.closeDoc(doc);
const changesAfterViewing = markAsChanged.callCount;
assert.equal(changesAfterViewing, changesAfterCreation);
});
});
it('can fork documents', async function() {
@ -717,35 +738,35 @@ describe('HostedStorageManager', function() {
await workers.assignDocWorker(docId);
await workers.assignDocWorker(forkId);
await store.begin();
await useFixtureDoc('Hello.grist', store.storageManager, `${docId}.grist`);
let doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("update Table1 set A = 'trunk' where id = 1");
await store.end();
await store.run(async () => {
await useFixtureDoc('Hello.grist', store.storageManager, `${docId}.grist`);
const doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("update Table1 set A = 'trunk' where id = 1");
});
await store.begin();
await store.docManager.storageManager.prepareFork(docId, forkId);
doc = await store.docManager.fetchDoc(docSession, forkId);
assert.equal('trunk', (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
await doc.docStorage.exec("update Table1 set A = 'fork' where id = 1");
await store.end();
await store.run(async () => {
await store.docManager.storageManager.prepareFork(docId, forkId);
const doc = await store.docManager.fetchDoc(docSession, forkId);
assert.equal('trunk', (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
await doc.docStorage.exec("update Table1 set A = 'fork' where id = 1");
});
await store.removeAll();
await store.begin();
doc = await store.docManager.fetchDoc(docSession, docId);
assert.equal('trunk', (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
doc = await store.docManager.fetchDoc(docSession, forkId);
assert.equal('fork', (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
await store.end();
await store.run(async () => {
let doc = await store.docManager.fetchDoc(docSession, docId);
assert.equal('trunk', (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
doc = await store.docManager.fetchDoc(docSession, forkId);
assert.equal('fork', (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
});
// Check that the trunk can be replaced by a fork
await store.removeAll();
await store.begin();
await store.storageManager.replace(docId, {sourceDocId: forkId});
doc = await store.docManager.fetchDoc(docSession, docId);
assert.equal('fork', (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
await store.end();
await store.run(async () => {
await store.storageManager.replace(docId, {sourceDocId: forkId});
const doc = await store.docManager.fetchDoc(docSession, docId);
assert.equal('fork', (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
});
});
it('can persist a fork with no modifications', async function() {
@ -755,16 +776,16 @@ describe('HostedStorageManager', function() {
await workers.assignDocWorker(forkId);
// Create a document.
await store.begin();
await useFixtureDoc('Hello.grist', store.storageManager, `${docId}.grist`);
let doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("update Table1 set A = 'trunk' where id = 1");
await store.end();
await store.run(async () => {
await useFixtureDoc('Hello.grist', store.storageManager, `${docId}.grist`);
const doc = await store.docManager.fetchDoc(docSession, docId);
await doc.docStorage.exec("update Table1 set A = 'trunk' where id = 1");
});
// Create a fork with no modifications.
await store.begin();
await store.docManager.storageManager.prepareFork(docId, forkId);
await store.end();
await store.run(async () => {
await store.docManager.storageManager.prepareFork(docId, forkId);
});
await store.waitForUpdates();
await store.removeAll();
@ -772,10 +793,10 @@ describe('HostedStorageManager', function() {
await fse.remove(store.getDocPath(docId));
// Make sure opening the fork works as expected.
await store.begin();
doc = await store.docManager.fetchDoc(docSession, forkId);
assert.equal('trunk', (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
await store.end();
await store.run(async () => {
const doc = await store.docManager.fetchDoc(docSession, forkId);
assert.equal('trunk', (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
});
await store.removeAll();
});
@ -792,70 +813,72 @@ describe('HostedStorageManager', function() {
await workers.assignDocWorker(forkId2);
await workers.assignDocWorker(forkId3);
await store.begin();
await useFixtureDoc('Hello.grist', store.storageManager, `${docId}.grist`);
let doc = await store.docManager.fetchDoc(docSession, docId);
await doc.waitForInitialization();
for (let i = 0; i < forks; i++) {
await doc.docStorage.exec(`update Table1 set A = 'v${i}' where id = 1`);
await doc.testKeepOpen();
await store.waitForUpdates();
}
await store.end();
const doc = await store.run(async () => {
await useFixtureDoc('Hello.grist', store.storageManager, `${docId}.grist`);
const doc = await store.docManager.fetchDoc(docSession, docId);
await doc.waitForInitialization();
for (let i = 0; i < forks; i++) {
await doc.docStorage.exec(`update Table1 set A = 'v${i}' where id = 1`);
await doc.testKeepOpen();
await store.waitForUpdates();
}
return doc;
});
const {snapshots} = await store.storageManager.getSnapshots(doc.docName);
assert.isAtLeast(snapshots.length, forks + 1); // May be 1 greater depending on how long
// it takes to run initial migrations.
await store.begin();
for (let i = forks - 1; i >= 0; i--) {
const snapshot = snapshots.shift()!;
const forkId = snapshot.docId;
await workers.assignDocWorker(forkId);
doc = await store.docManager.fetchDoc(docSession, forkId);
assert.equal(`v${i}`, (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
}
await store.end();
await store.run(async () => {
for (let i = forks - 1; i >= 0; i--) {
const snapshot = snapshots.shift()!;
const forkId = snapshot.docId;
await workers.assignDocWorker(forkId);
const doc = await store.docManager.fetchDoc(docSession, forkId);
assert.equal(`v${i}`, (await doc.docStorage.get("select A from Table1 where id = 1"))!.A);
}
});
});
it('can access snapshots with old schema versions', async function() {
const snapshotId = `World~v=1`;
await workers.assignDocWorker(snapshotId);
await store.begin();
// Pretend we have a snapshot of World-v33.grist and fetch/load it.
await useFixtureDoc('World-v33.grist', store.storageManager, `${snapshotId}.grist`);
const doc = await store.docManager.fetchDoc(docSession, snapshotId);
// Check that the snapshot isn't broken.
assert.doesNotThrow(async () => await doc.waitForInitialization());
// Check that the snapshot was migrated to the latest schema version.
assert.equal(
SCHEMA_VERSION,
(await doc.docStorage.get("select schemaVersion from _grist_DocInfo where id = 1"))!.schemaVersion
);
// Check that the document is actually a snapshot.
await assert.isRejected(doc.replace(docSession, {sourceDocId: 'docId'}),
/Snapshots cannot be replaced/);
await assert.isRejected(doc.applyUserActions(docSession, [['AddTable', 'NewTable', [{id: 'A'}]]]),
/pyCall is not available in snapshots/);
await store.end();
await store.run(async () => {
// Pretend we have a snapshot of World-v33.grist and fetch/load it.
await useFixtureDoc('World-v33.grist', store.storageManager, `${snapshotId}.grist`);
const doc = await store.docManager.fetchDoc(docSession, snapshotId);
// Check that the snapshot isn't broken.
assert.doesNotThrow(async () => await doc.waitForInitialization());
// Check that the snapshot was migrated to the latest schema version.
assert.equal(
SCHEMA_VERSION,
(await doc.docStorage.get("select schemaVersion from _grist_DocInfo where id = 1"))!.schemaVersion
);
// Check that the document is actually a snapshot.
await assert.isRejected(doc.replace(docSession, {sourceDocId: 'docId'}),
/Snapshots cannot be replaced/);
await assert.isRejected(doc.applyUserActions(docSession, [['AddTable', 'NewTable', [{id: 'A'}]]]),
/pyCall is not available in snapshots/);
});
});
it('can prune snapshots', async function() {
const versions = 8;
const docId = `create-${uuidv4()}`;
await store.begin();
await useFixtureDoc('Hello.grist', store.storageManager, `${docId}.grist`);
const doc = await store.docManager.fetchDoc(docSession, docId);
for (let i = 0; i < versions; i++) {
await doc.docStorage.exec(`update Table1 set A = 'v${i}' where id = 1`);
await doc.testKeepOpen();
await store.waitForUpdates();
}
await store.storageManager.testWaitForPrunes();
await store.end();
const doc = await store.run(async () => {
await useFixtureDoc('Hello.grist', store.storageManager, `${docId}.grist`);
const doc = await store.docManager.fetchDoc(docSession, docId);
for (let i = 0; i < versions; i++) {
await doc.docStorage.exec(`update Table1 set A = 'v${i}' where id = 1`);
await doc.testKeepOpen();
await store.waitForUpdates();
}
await store.storageManager.testWaitForPrunes();
return doc;
});
await waitForIt(async () => {
const {snapshots} = await store.storageManager.getSnapshots(doc.docName);
// Should be keeping at least five, and then maybe 1 more if the hour changed
@ -878,20 +901,20 @@ describe('HostedStorageManager', function() {
// Create a series of versions of a document, and fetch them sequentially
// so that they are potentially available as stale values.
await store.begin();
await useFixtureDoc('Hello.grist', store.storageManager, `${docId}.grist`);
let doc = await store.docManager.fetchDoc(docSession, docId);
await store.end();
await store.run(async () => {
await useFixtureDoc('Hello.grist', store.storageManager, `${docId}.grist`);
await store.docManager.fetchDoc(docSession, docId);
});
for (let i = 0; i < 3; i++) {
await store.removeAll();
await store.begin();
doc = await store.docManager.fetchDoc(docSession, docId);
if (i > 0) {
const prev = await doc.docStorage.get("select A from Table1 where id = 1");
assert.equal(prev!.A, `magic_word${i - 1}`);
}
await doc.docStorage.exec(`update Table1 set A = 'magic_word${i}' where id = 1`);
await store.end();
await store.run(async () => {
const doc = await store.docManager.fetchDoc(docSession, docId);
if (i > 0) {
const prev = await doc.docStorage.get("select A from Table1 where id = 1");
assert.equal(prev!.A, `magic_word${i - 1}`);
}
await doc.docStorage.exec(`update Table1 set A = 'magic_word${i}' where id = 1`);
});
}
// Wipe all checksums and make sure (1) we don't get any errors and (2) the
@ -903,10 +926,10 @@ describe('HostedStorageManager', function() {
// Optionally wipe all local files.
await store.removeAll();
}
await store.begin();
doc = await store.docManager.fetchDoc(docSession, docId);
result = (await doc.docStorage.get("select A from Table1 where id = 1"))?.A;
await store.end();
await store.run(async () => {
const doc = await store.docManager.fetchDoc(docSession, docId);
result = (await doc.docStorage.get("select A from Table1 where id = 1"))?.A;
});
if (result !== 'magic_word2') {
throw new Error(`inconsistent result: ${result}`);
}
@ -917,16 +940,17 @@ describe('HostedStorageManager', function() {
it('can access metadata', async function() {
const docId = `create-${uuidv4()}`;
await store.begin();
// Use a doc that's up-to-date on storage migrations, but needs a python schema migration.
await useFixtureDoc('BlobMigrationV8.grist', store.storageManager, `${docId}.grist`);
const doc = await store.docManager.fetchDoc(docSession, docId);
await doc.waitForInitialization();
const rec = await doc.fetchTable(makeExceptionalDocSession('system'), '_grist_DocInfo');
const tz = rec.tableData[3].timezone[0];
const h = (await doc.getRecentStates(makeExceptionalDocSession('system')))[0].h;
await store.docManager.makeBackup(doc, 'hello');
await store.end();
const { tz, h, doc } = await store.run(async () => {
// Use a doc that's up-to-date on storage migrations, but needs a python schema migration.
await useFixtureDoc('BlobMigrationV8.grist', store.storageManager, `${docId}.grist`);
const doc = await store.docManager.fetchDoc(docSession, docId);
await doc.waitForInitialization();
const rec = await doc.fetchTable(makeExceptionalDocSession('system'), '_grist_DocInfo');
const tz = rec.tableData[3].timezone[0];
const h = (await doc.getRecentStates(makeExceptionalDocSession('system')))[0].h;
await store.docManager.makeBackup(doc, 'hello');
return { tz, h, doc };
});
const {snapshots} = await store.storageManager.getSnapshots(doc.docName);
assert.equal(snapshots[0]?.metadata?.label, 'hello');
// There can be extra snapshots, depending on timing.

Loading…
Cancel
Save