mirror of
https://github.com/gristlabs/grist-core.git
synced 2024-10-27 20:44:07 +00:00
support other SQLite wrappers, and various hooks needed by grist-static (#516)
This commit is contained in:
parent
bd474a382f
commit
7be0ee289d
11
app/client/DefaultHooks.ts
Normal file
11
app/client/DefaultHooks.ts
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
import { UrlTweaks } from 'app/common/gristUrls';
|
||||||
|
|
||||||
|
export interface IHooks {
|
||||||
|
iframeAttributes?: Record<string, any>,
|
||||||
|
fetch?: typeof fetch,
|
||||||
|
baseURI?: string,
|
||||||
|
urlTweaks?: UrlTweaks,
|
||||||
|
}
|
||||||
|
|
||||||
|
export const defaultHooks: IHooks = {
|
||||||
|
};
|
3
app/client/Hooks.ts
Normal file
3
app/client/Hooks.ts
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
import {defaultHooks} from 'app/client/DefaultHooks';
|
||||||
|
|
||||||
|
export const hooks = defaultHooks;
|
@ -1,11 +1,12 @@
|
|||||||
import {get as getBrowserGlobals} from 'app/client/lib/browserGlobals';
|
import {get as getBrowserGlobals} from 'app/client/lib/browserGlobals';
|
||||||
import {guessTimezone} from 'app/client/lib/guessTimezone';
|
import {guessTimezone} from 'app/client/lib/guessTimezone';
|
||||||
import {getSessionStorage} from 'app/client/lib/storage';
|
import {getSessionStorage} from 'app/client/lib/storage';
|
||||||
|
import {newUserAPIImpl} from 'app/client/models/AppModel';
|
||||||
import {getWorker} from 'app/client/models/gristConfigCache';
|
import {getWorker} from 'app/client/models/gristConfigCache';
|
||||||
import {CommResponseBase} from 'app/common/CommTypes';
|
import {CommResponseBase} from 'app/common/CommTypes';
|
||||||
import * as gutil from 'app/common/gutil';
|
import * as gutil from 'app/common/gutil';
|
||||||
import {addOrgToPath, docUrl, getGristConfig} from 'app/common/urlUtils';
|
import {addOrgToPath, docUrl, getGristConfig} from 'app/common/urlUtils';
|
||||||
import {UserAPI, UserAPIImpl} from 'app/common/UserAPI';
|
import {UserAPI} from 'app/common/UserAPI';
|
||||||
import {Events as BackboneEvents} from 'backbone';
|
import {Events as BackboneEvents} from 'backbone';
|
||||||
import {Disposable} from 'grainjs';
|
import {Disposable} from 'grainjs';
|
||||||
|
|
||||||
@ -25,7 +26,7 @@ async function getDocWorkerUrl(assignmentId: string|null): Promise<string|null>
|
|||||||
// never changes.
|
// never changes.
|
||||||
if (assignmentId === null) { return docUrl(null); }
|
if (assignmentId === null) { return docUrl(null); }
|
||||||
|
|
||||||
const api: UserAPI = new UserAPIImpl(getGristConfig().homeUrl!);
|
const api: UserAPI = newUserAPIImpl();
|
||||||
return getWorker(api, assignmentId);
|
return getWorker(api, assignmentId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import BaseView from 'app/client/components/BaseView';
|
import BaseView from 'app/client/components/BaseView';
|
||||||
import {GristDoc} from 'app/client/components/GristDoc';
|
import {GristDoc} from 'app/client/components/GristDoc';
|
||||||
|
import {hooks} from 'app/client/Hooks';
|
||||||
import {get as getBrowserGlobals} from 'app/client/lib/browserGlobals';
|
import {get as getBrowserGlobals} from 'app/client/lib/browserGlobals';
|
||||||
import {ColumnRec, ViewSectionRec} from 'app/client/models/DocModel';
|
import {ColumnRec, ViewSectionRec} from 'app/client/models/DocModel';
|
||||||
import {AccessLevel, isSatisfied} from 'app/common/CustomWidget';
|
import {AccessLevel, isSatisfied} from 'app/common/CustomWidget';
|
||||||
@ -157,6 +158,7 @@ export class WidgetFrame extends DisposableWithEvents {
|
|||||||
return onElem(
|
return onElem(
|
||||||
(this._iframe = dom('iframe', dom.cls('clipboard_focus'), dom.cls('custom_view'), {
|
(this._iframe = dom('iframe', dom.cls('clipboard_focus'), dom.cls('custom_view'), {
|
||||||
src: fullUrl,
|
src: fullUrl,
|
||||||
|
...hooks.iframeAttributes,
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,7 @@ Object.assign(window.exposedModules, {
|
|||||||
grainjs: require('grainjs'),
|
grainjs: require('grainjs'),
|
||||||
ko: require('knockout'),
|
ko: require('knockout'),
|
||||||
moment: require('moment-timezone'),
|
moment: require('moment-timezone'),
|
||||||
Comm: require('./components/Comm'),
|
Comm: require('app/client/components/Comm'),
|
||||||
_loadScript: require('./lib/loadScript'),
|
_loadScript: require('./lib/loadScript'),
|
||||||
ConnectState: require('./models/ConnectState'),
|
ConnectState: require('./models/ConnectState'),
|
||||||
});
|
});
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import {hooks} from 'app/client/Hooks';
|
||||||
import {getGristConfig} from 'app/common/urlUtils';
|
import {getGristConfig} from 'app/common/urlUtils';
|
||||||
import {DomContents} from 'grainjs';
|
import {DomContents} from 'grainjs';
|
||||||
import i18next from 'i18next';
|
import i18next from 'i18next';
|
||||||
@ -34,7 +35,7 @@ export async function setupLocale() {
|
|||||||
// Detect what is resolved languages to load.
|
// Detect what is resolved languages to load.
|
||||||
const languages = i18next.languages;
|
const languages = i18next.languages;
|
||||||
// Fetch all json files (all of which should be already preloaded);
|
// Fetch all json files (all of which should be already preloaded);
|
||||||
const loadPath = `${document.baseURI}locales/{{lng}}.{{ns}}.json`;
|
const loadPath = `${hooks.baseURI || document.baseURI}locales/{{lng}}.{{ns}}.json`;
|
||||||
const pathsToLoad: Promise<any>[] = [];
|
const pathsToLoad: Promise<any>[] = [];
|
||||||
async function load(lang: string, n: string) {
|
async function load(lang: string, n: string) {
|
||||||
const resourceUrl = loadPath.replace('{{lng}}', lang.replace("-", "_")).replace('{{ns}}', n);
|
const resourceUrl = loadPath.replace('{{lng}}', lang.replace("-", "_")).replace('{{ns}}', n);
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import {BehavioralPromptsManager} from 'app/client/components/BehavioralPromptsManager';
|
import {BehavioralPromptsManager} from 'app/client/components/BehavioralPromptsManager';
|
||||||
|
import {hooks} from 'app/client/Hooks';
|
||||||
import {get as getBrowserGlobals} from 'app/client/lib/browserGlobals';
|
import {get as getBrowserGlobals} from 'app/client/lib/browserGlobals';
|
||||||
import {makeT} from 'app/client/lib/localization';
|
import {makeT} from 'app/client/lib/localization';
|
||||||
import {sessionStorageObs} from 'app/client/lib/localStorageObs';
|
import {sessionStorageObs} from 'app/client/lib/localStorageObs';
|
||||||
@ -131,7 +132,7 @@ export class TopAppModelImpl extends Disposable implements TopAppModel {
|
|||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
window: {gristConfig?: GristLoadConfig},
|
window: {gristConfig?: GristLoadConfig},
|
||||||
public readonly api: UserAPI = new UserAPIImpl(getHomeUrl()),
|
public readonly api: UserAPI = newUserAPIImpl(),
|
||||||
) {
|
) {
|
||||||
super();
|
super();
|
||||||
setErrorNotifier(this.notifier);
|
setErrorNotifier(this.notifier);
|
||||||
@ -436,6 +437,12 @@ export function getHomeUrl(): string {
|
|||||||
return (gristConfig && gristConfig.homeUrl) || `${protocol}//${host}`;
|
return (gristConfig && gristConfig.homeUrl) || `${protocol}//${host}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function newUserAPIImpl(): UserAPIImpl {
|
||||||
|
return new UserAPIImpl(getHomeUrl(), {
|
||||||
|
fetch: hooks.fetch,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export function getOrgNameOrGuest(org: Organization|null, user: FullUser|null) {
|
export function getOrgNameOrGuest(org: Organization|null, user: FullUser|null) {
|
||||||
if (!org) { return ''; }
|
if (!org) { return ''; }
|
||||||
if (user && user.anonymous && org.owner && org.owner.id === user.id) {
|
if (user && user.anonymous && org.owner && org.owner.id === user.id) {
|
||||||
|
@ -23,6 +23,7 @@
|
|||||||
* Note that the form of URLs depends on the settings in window.gristConfig object.
|
* Note that the form of URLs depends on the settings in window.gristConfig object.
|
||||||
*/
|
*/
|
||||||
import {unsavedChanges} from 'app/client/components/UnsavedChanges';
|
import {unsavedChanges} from 'app/client/components/UnsavedChanges';
|
||||||
|
import {hooks} from 'app/client/Hooks';
|
||||||
import {UrlState} from 'app/client/lib/UrlState';
|
import {UrlState} from 'app/client/lib/UrlState';
|
||||||
import {decodeUrl, encodeUrl, getSlugIfNeeded, GristLoadConfig, IGristUrlState,
|
import {decodeUrl, encodeUrl, getSlugIfNeeded, GristLoadConfig, IGristUrlState,
|
||||||
parseFirstUrlPart} from 'app/common/gristUrls';
|
parseFirstUrlPart} from 'app/common/gristUrls';
|
||||||
@ -134,7 +135,9 @@ export class UrlStateImpl {
|
|||||||
*/
|
*/
|
||||||
public encodeUrl(state: IGristUrlState, baseLocation: Location | URL): string {
|
public encodeUrl(state: IGristUrlState, baseLocation: Location | URL): string {
|
||||||
const gristConfig = this._window.gristConfig || {};
|
const gristConfig = this._window.gristConfig || {};
|
||||||
return encodeUrl(gristConfig, state, baseLocation);
|
return encodeUrl(gristConfig, state, baseLocation, {
|
||||||
|
tweaks: hooks.urlTweaks,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -142,7 +145,9 @@ export class UrlStateImpl {
|
|||||||
*/
|
*/
|
||||||
public decodeUrl(location: Location | URL): IGristUrlState {
|
public decodeUrl(location: Location | URL): IGristUrlState {
|
||||||
const gristConfig = this._window.gristConfig || {};
|
const gristConfig = this._window.gristConfig || {};
|
||||||
return decodeUrl(gristConfig, location);
|
return decodeUrl(gristConfig, location, {
|
||||||
|
tweaks: hooks.urlTweaks,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -84,7 +84,7 @@ export class App extends DisposableWithEvents {
|
|||||||
|
|
||||||
const isHelpPaneVisible = ko.observable(false);
|
const isHelpPaneVisible = ko.observable(false);
|
||||||
|
|
||||||
G.document.querySelector('#grist-logo-wrapper').remove();
|
G.document.querySelector('#grist-logo-wrapper')?.remove();
|
||||||
|
|
||||||
// Help pop-up pane
|
// Help pop-up pane
|
||||||
const helpDiv = document.body.appendChild(
|
const helpDiv = document.body.appendChild(
|
||||||
|
@ -198,7 +198,8 @@ export function encodeUrl(gristConfig: Partial<GristLoadConfig>,
|
|||||||
options: {
|
options: {
|
||||||
// make an api url - warning: just barely works, and
|
// make an api url - warning: just barely works, and
|
||||||
// only for documents
|
// only for documents
|
||||||
api?: boolean
|
api?: boolean,
|
||||||
|
tweaks?: UrlTweaks,
|
||||||
} = {}): string {
|
} = {}): string {
|
||||||
const url = new URL(baseLocation.href);
|
const url = new URL(baseLocation.href);
|
||||||
const parts = ['/'];
|
const parts = ['/'];
|
||||||
@ -269,8 +270,10 @@ export function encodeUrl(gristConfig: Partial<GristLoadConfig>,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
const queryStr = encodeQueryParams(queryParams);
|
const queryStr = encodeQueryParams(queryParams);
|
||||||
|
|
||||||
url.pathname = parts.join('');
|
url.pathname = parts.join('');
|
||||||
url.search = queryStr;
|
url.search = queryStr;
|
||||||
|
|
||||||
if (state.hash) {
|
if (state.hash) {
|
||||||
// Project tests use hashes, so only set hash if there is an anchor.
|
// Project tests use hashes, so only set hash if there is an anchor.
|
||||||
url.hash = hashParts.join('.');
|
url.hash = hashParts.join('.');
|
||||||
@ -285,13 +288,23 @@ export function encodeUrl(gristConfig: Partial<GristLoadConfig>,
|
|||||||
} else {
|
} else {
|
||||||
url.hash = '';
|
url.hash = '';
|
||||||
}
|
}
|
||||||
|
options.tweaks?.postEncode?.({
|
||||||
|
url,
|
||||||
|
parts,
|
||||||
|
state,
|
||||||
|
baseLocation,
|
||||||
|
});
|
||||||
return url.href;
|
return url.href;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse a URL location into an IGristUrlState object. See encodeUrl() documentation.
|
* Parse a URL location into an IGristUrlState object. See encodeUrl() documentation.
|
||||||
*/
|
*/
|
||||||
export function decodeUrl(gristConfig: Partial<GristLoadConfig>, location: Location | URL): IGristUrlState {
|
export function decodeUrl(gristConfig: Partial<GristLoadConfig>, location: Location | URL, options?: {
|
||||||
|
tweaks?: UrlTweaks,
|
||||||
|
}): IGristUrlState {
|
||||||
|
location = new URL(location.href); // Make sure location is a URL.
|
||||||
|
options?.tweaks?.preDecode?.({ url: location });
|
||||||
const parts = location.pathname.slice(1).split('/');
|
const parts = location.pathname.slice(1).split('/');
|
||||||
const map = new Map<string, string>();
|
const map = new Map<string, string>();
|
||||||
for (let i = 0; i < parts.length; i += 2) {
|
for (let i = 0; i < parts.length; i += 2) {
|
||||||
@ -871,3 +884,28 @@ export function getSlugIfNeeded(doc: {id: string, urlId: string|null, name: stri
|
|||||||
if (!shouldIncludeSlug(doc)) { return; }
|
if (!shouldIncludeSlug(doc)) { return; }
|
||||||
return nameToSlug(doc.name);
|
return nameToSlug(doc.name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* It is possible we want to remap Grist URLs in some way - specifically,
|
||||||
|
* grist-static does this. We allow for a hook that is called after
|
||||||
|
* encoding state as a URL, and a hook that is called before decoding
|
||||||
|
* state from a URL.
|
||||||
|
*/
|
||||||
|
export interface UrlTweaks {
|
||||||
|
/**
|
||||||
|
* Tweak an encoded URL. Operates on the URL directly, in place.
|
||||||
|
*/
|
||||||
|
postEncode?(options: {
|
||||||
|
url: URL,
|
||||||
|
parts: string[],
|
||||||
|
state: IGristUrlState,
|
||||||
|
baseLocation: Location | URL,
|
||||||
|
}): void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tweak a URL prior to decoding it. Operates on the URL directly, in place.
|
||||||
|
*/
|
||||||
|
preDecode?(options: {
|
||||||
|
url: URL,
|
||||||
|
}): void;
|
||||||
|
}
|
||||||
|
@ -33,6 +33,13 @@ import * as util from 'util';
|
|||||||
export interface MarshalOptions {
|
export interface MarshalOptions {
|
||||||
stringToBuffer?: boolean;
|
stringToBuffer?: boolean;
|
||||||
version?: number;
|
version?: number;
|
||||||
|
|
||||||
|
// True if we want keys in dicts to be buffers.
|
||||||
|
// It is convenient to have some freedom here to simplify implementation
|
||||||
|
// of marshaling for some SQLite wrappers. This flag was initially
|
||||||
|
// introduced for a fork of Grist using better-sqlite3, and I don't
|
||||||
|
// remember exactly what the issues were.
|
||||||
|
keysAreBuffers?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface UnmarshalOptions {
|
export interface UnmarshalOptions {
|
||||||
@ -129,11 +136,13 @@ export class Marshaller {
|
|||||||
private _memBuf: MemBuffer;
|
private _memBuf: MemBuffer;
|
||||||
private readonly _floatCode: number;
|
private readonly _floatCode: number;
|
||||||
private readonly _stringCode: number;
|
private readonly _stringCode: number;
|
||||||
|
private readonly _keysAreBuffers: boolean;
|
||||||
|
|
||||||
constructor(options?: MarshalOptions) {
|
constructor(options?: MarshalOptions) {
|
||||||
this._memBuf = new MemBuffer(undefined);
|
this._memBuf = new MemBuffer(undefined);
|
||||||
this._floatCode = options && options.version && options.version >= 2 ? marshalCodes.BFLOAT : marshalCodes.FLOAT;
|
this._floatCode = options && options.version && options.version >= 2 ? marshalCodes.BFLOAT : marshalCodes.FLOAT;
|
||||||
this._stringCode = options && options.stringToBuffer ? marshalCodes.STRING : marshalCodes.UNICODE;
|
this._stringCode = options && options.stringToBuffer ? marshalCodes.STRING : marshalCodes.UNICODE;
|
||||||
|
this._keysAreBuffers = Boolean(options?.keysAreBuffers);
|
||||||
}
|
}
|
||||||
|
|
||||||
public dump(): Uint8Array {
|
public dump(): Uint8Array {
|
||||||
@ -261,7 +270,7 @@ export class Marshaller {
|
|||||||
const keys = Object.keys(obj);
|
const keys = Object.keys(obj);
|
||||||
keys.sort();
|
keys.sort();
|
||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
this.marshal(key);
|
this.marshal(this._keysAreBuffers ? Buffer.from(key) : key);
|
||||||
this.marshal(obj[key]);
|
this.marshal(obj[key]);
|
||||||
}
|
}
|
||||||
this._memBuf.writeUint8(marshalCodes.NULL);
|
this._memBuf.writeUint8(marshalCodes.NULL);
|
||||||
|
@ -13,7 +13,7 @@ export class AclRule extends BaseEntity {
|
|||||||
@PrimaryGeneratedColumn()
|
@PrimaryGeneratedColumn()
|
||||||
public id: number;
|
public id: number;
|
||||||
|
|
||||||
@Column()
|
@Column({type: Number})
|
||||||
public permissions: number;
|
public permissions: number;
|
||||||
|
|
||||||
@OneToOne(type => Group, group => group.aclRule)
|
@OneToOne(type => Group, group => group.aclRule)
|
||||||
|
@ -5,13 +5,13 @@ import {Organization} from './Organization';
|
|||||||
|
|
||||||
@Entity({name: 'aliases'})
|
@Entity({name: 'aliases'})
|
||||||
export class Alias extends BaseEntity {
|
export class Alias extends BaseEntity {
|
||||||
@PrimaryColumn({name: 'org_id'})
|
@PrimaryColumn({name: 'org_id', type: Number})
|
||||||
public orgId: number;
|
public orgId: number;
|
||||||
|
|
||||||
@PrimaryColumn({name: 'url_id'})
|
@PrimaryColumn({name: 'url_id', type: String})
|
||||||
public urlId: string;
|
public urlId: string;
|
||||||
|
|
||||||
@Column({name: 'doc_id'})
|
@Column({name: 'doc_id', type: String})
|
||||||
public docId: string;
|
public docId: string;
|
||||||
|
|
||||||
@ManyToOne(type => Document)
|
@ManyToOne(type => Document)
|
||||||
|
@ -34,14 +34,14 @@ export class BillingAccount extends BaseEntity {
|
|||||||
@JoinColumn({name: 'product_id'})
|
@JoinColumn({name: 'product_id'})
|
||||||
public product: Product;
|
public product: Product;
|
||||||
|
|
||||||
@Column()
|
@Column({type: Boolean})
|
||||||
public individual: boolean;
|
public individual: boolean;
|
||||||
|
|
||||||
// A flag for when all is well with the user's subscription.
|
// A flag for when all is well with the user's subscription.
|
||||||
// Probably shouldn't use this to drive whether service is provided or not.
|
// Probably shouldn't use this to drive whether service is provided or not.
|
||||||
// Strip recommends updating an end-of-service datetime every time payment
|
// Strip recommends updating an end-of-service datetime every time payment
|
||||||
// is received, adding on a grace period of some days.
|
// is received, adding on a grace period of some days.
|
||||||
@Column({name: 'in_good_standing', default: nativeValues.trueValue})
|
@Column({name: 'in_good_standing', type: Boolean, default: nativeValues.trueValue})
|
||||||
public inGoodStanding: boolean;
|
public inGoodStanding: boolean;
|
||||||
|
|
||||||
@Column({type: nativeValues.jsonEntityType, nullable: true})
|
@Column({type: nativeValues.jsonEntityType, nullable: true})
|
||||||
|
@ -10,14 +10,14 @@ export class BillingAccountManager extends BaseEntity {
|
|||||||
@PrimaryGeneratedColumn()
|
@PrimaryGeneratedColumn()
|
||||||
public id: number;
|
public id: number;
|
||||||
|
|
||||||
@Column({name: 'billing_account_id'})
|
@Column({name: 'billing_account_id', type: Number})
|
||||||
public billingAccountId: number;
|
public billingAccountId: number;
|
||||||
|
|
||||||
@ManyToOne(type => BillingAccount, { onDelete: 'CASCADE' })
|
@ManyToOne(type => BillingAccount, { onDelete: 'CASCADE' })
|
||||||
@JoinColumn({name: 'billing_account_id'})
|
@JoinColumn({name: 'billing_account_id'})
|
||||||
public billingAccount: BillingAccount;
|
public billingAccount: BillingAccount;
|
||||||
|
|
||||||
@Column({name: 'user_id'})
|
@Column({name: 'user_id', type: Number})
|
||||||
public userId: number;
|
public userId: number;
|
||||||
|
|
||||||
@ManyToOne(type => User, { onDelete: 'CASCADE' })
|
@ManyToOne(type => User, { onDelete: 'CASCADE' })
|
||||||
|
@ -24,7 +24,7 @@ function isValidUrlId(urlId: string) {
|
|||||||
@Entity({name: 'docs'})
|
@Entity({name: 'docs'})
|
||||||
export class Document extends Resource {
|
export class Document extends Resource {
|
||||||
|
|
||||||
@PrimaryColumn()
|
@PrimaryColumn({type: String})
|
||||||
public id: string;
|
public id: string;
|
||||||
|
|
||||||
@ManyToOne(type => Workspace)
|
@ManyToOne(type => Workspace)
|
||||||
@ -35,7 +35,7 @@ export class Document extends Resource {
|
|||||||
public aclRules: AclRuleDoc[];
|
public aclRules: AclRuleDoc[];
|
||||||
|
|
||||||
// Indicates whether the doc is pinned to the org it lives in.
|
// Indicates whether the doc is pinned to the org it lives in.
|
||||||
@Column({name: 'is_pinned', default: false})
|
@Column({name: 'is_pinned', type: Boolean, default: false})
|
||||||
public isPinned: boolean;
|
public isPinned: boolean;
|
||||||
|
|
||||||
// Property that may be returned when the doc is fetched to indicate the access the
|
// Property that may be returned when the doc is fetched to indicate the access the
|
||||||
|
@ -9,7 +9,7 @@ export class Group extends BaseEntity {
|
|||||||
@PrimaryGeneratedColumn()
|
@PrimaryGeneratedColumn()
|
||||||
public id: number;
|
public id: number;
|
||||||
|
|
||||||
@Column()
|
@Column({type: String})
|
||||||
public name: string;
|
public name: string;
|
||||||
|
|
||||||
@ManyToMany(type => User)
|
@ManyToMany(type => User)
|
||||||
|
@ -5,18 +5,18 @@ import {User} from "./User";
|
|||||||
@Entity({name: 'logins'})
|
@Entity({name: 'logins'})
|
||||||
export class Login extends BaseEntity {
|
export class Login extends BaseEntity {
|
||||||
|
|
||||||
@PrimaryColumn()
|
@PrimaryColumn({type: Number})
|
||||||
public id: number;
|
public id: number;
|
||||||
|
|
||||||
// This is the normalized email address we use for equality and indexing.
|
// This is the normalized email address we use for equality and indexing.
|
||||||
@Column()
|
@Column({type: String})
|
||||||
public email: string;
|
public email: string;
|
||||||
|
|
||||||
// This is how the user's email address should be displayed.
|
// This is how the user's email address should be displayed.
|
||||||
@Column({name: 'display_email'})
|
@Column({name: 'display_email', type: String})
|
||||||
public displayEmail: string;
|
public displayEmail: string;
|
||||||
|
|
||||||
@Column({name: 'user_id'})
|
@Column({name: 'user_id', type: Number})
|
||||||
public userId: number;
|
public userId: number;
|
||||||
|
|
||||||
@ManyToOne(type => User)
|
@ManyToOne(type => User)
|
||||||
|
@ -29,6 +29,7 @@ export class Organization extends Resource {
|
|||||||
public id: number;
|
public id: number;
|
||||||
|
|
||||||
@Column({
|
@Column({
|
||||||
|
type: String,
|
||||||
nullable: true
|
nullable: true
|
||||||
})
|
})
|
||||||
public domain: string;
|
public domain: string;
|
||||||
@ -46,7 +47,7 @@ export class Organization extends Resource {
|
|||||||
@OneToMany(type => AclRuleOrg, aclRule => aclRule.organization)
|
@OneToMany(type => AclRuleOrg, aclRule => aclRule.organization)
|
||||||
public aclRules: AclRuleOrg[];
|
public aclRules: AclRuleOrg[];
|
||||||
|
|
||||||
@Column({name: 'billing_account_id'})
|
@Column({name: 'billing_account_id', type: Number})
|
||||||
public billingAccountId: number;
|
public billingAccountId: number;
|
||||||
|
|
||||||
@ManyToOne(type => BillingAccount)
|
@ManyToOne(type => BillingAccount)
|
||||||
|
@ -11,10 +11,10 @@ export class Pref {
|
|||||||
// one, but we haven't marked them as so in the DB since the SQL standard frowns
|
// one, but we haven't marked them as so in the DB since the SQL standard frowns
|
||||||
// on nullable primary keys (and Postgres doesn't support them). We could add
|
// on nullable primary keys (and Postgres doesn't support them). We could add
|
||||||
// another primary key, but we don't actually need one.
|
// another primary key, but we don't actually need one.
|
||||||
@PrimaryColumn({name: 'user_id'})
|
@PrimaryColumn({name: 'user_id', type: Number})
|
||||||
public userId: number|null;
|
public userId: number|null;
|
||||||
|
|
||||||
@PrimaryColumn({name: 'org_id'})
|
@PrimaryColumn({name: 'org_id', type: Number})
|
||||||
public orgId: number|null;
|
public orgId: number|null;
|
||||||
|
|
||||||
@ManyToOne(type => User)
|
@ManyToOne(type => User)
|
||||||
|
@ -169,7 +169,7 @@ export class Product extends BaseEntity {
|
|||||||
@PrimaryGeneratedColumn()
|
@PrimaryGeneratedColumn()
|
||||||
public id: number;
|
public id: number;
|
||||||
|
|
||||||
@Column()
|
@Column({type: String})
|
||||||
public name: string;
|
public name: string;
|
||||||
|
|
||||||
@Column({type: nativeValues.jsonEntityType})
|
@Column({type: nativeValues.jsonEntityType})
|
||||||
|
@ -3,13 +3,13 @@ import {ApiError} from 'app/common/ApiError';
|
|||||||
import {CommonProperties} from "app/common/UserAPI";
|
import {CommonProperties} from "app/common/UserAPI";
|
||||||
|
|
||||||
export class Resource extends BaseEntity {
|
export class Resource extends BaseEntity {
|
||||||
@Column()
|
@Column({type: String})
|
||||||
public name: string;
|
public name: string;
|
||||||
|
|
||||||
@Column({name: 'created_at', default: () => "CURRENT_TIMESTAMP"})
|
@Column({name: 'created_at', type: Date, default: () => "CURRENT_TIMESTAMP"})
|
||||||
public createdAt: Date;
|
public createdAt: Date;
|
||||||
|
|
||||||
@Column({name: 'updated_at', default: () => "CURRENT_TIMESTAMP"})
|
@Column({name: 'updated_at', type: Date, default: () => "CURRENT_TIMESTAMP"})
|
||||||
public updatedAt: Date;
|
public updatedAt: Date;
|
||||||
|
|
||||||
// a computed column which, when present, means the entity should be filtered out
|
// a computed column which, when present, means the entity should be filtered out
|
||||||
|
@ -3,10 +3,10 @@ import {Document} from "./Document";
|
|||||||
|
|
||||||
@Entity({name: 'secrets'})
|
@Entity({name: 'secrets'})
|
||||||
export class Secret extends BaseEntity {
|
export class Secret extends BaseEntity {
|
||||||
@PrimaryColumn()
|
@PrimaryColumn({type: String})
|
||||||
public id: string; // generally a UUID
|
public id: string; // generally a UUID
|
||||||
|
|
||||||
@Column({name: 'value'})
|
@Column({name: 'value', type: String})
|
||||||
public value: string;
|
public value: string;
|
||||||
|
|
||||||
@ManyToOne(_type => Document, { onDelete: 'CASCADE' })
|
@ManyToOne(_type => Document, { onDelete: 'CASCADE' })
|
||||||
|
@ -15,7 +15,7 @@ export class User extends BaseEntity {
|
|||||||
@PrimaryGeneratedColumn()
|
@PrimaryGeneratedColumn()
|
||||||
public id: number;
|
public id: number;
|
||||||
|
|
||||||
@Column()
|
@Column({type: String})
|
||||||
public name: string;
|
public name: string;
|
||||||
|
|
||||||
@Column({name: 'api_key', type: String, nullable: true})
|
@Column({name: 'api_key', type: String, nullable: true})
|
||||||
@ -46,7 +46,7 @@ export class User extends BaseEntity {
|
|||||||
})
|
})
|
||||||
public groups: Group[];
|
public groups: Group[];
|
||||||
|
|
||||||
@Column({name: 'is_first_time_user', default: false})
|
@Column({name: 'is_first_time_user', type: Boolean, default: false})
|
||||||
public isFirstTimeUser: boolean;
|
public isFirstTimeUser: boolean;
|
||||||
|
|
||||||
@Column({name: 'options', type: nativeValues.jsonEntityType, nullable: true})
|
@Column({name: 'options', type: nativeValues.jsonEntityType, nullable: true})
|
||||||
|
@ -314,7 +314,7 @@ export class ActionHistoryImpl implements ActionHistory {
|
|||||||
} finally {
|
} finally {
|
||||||
if (tip) {
|
if (tip) {
|
||||||
await this._db.run(`UPDATE _gristsys_ActionHistoryBranch SET actionRef = ?
|
await this._db.run(`UPDATE _gristsys_ActionHistoryBranch SET actionRef = ?
|
||||||
WHERE name = "local_sent"`,
|
WHERE name = 'local_sent'`,
|
||||||
tip);
|
tip);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -336,7 +336,7 @@ export class ActionHistoryImpl implements ActionHistory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
await this._db.run(`UPDATE _gristsys_ActionHistoryBranch SET actionRef = ?
|
await this._db.run(`UPDATE _gristsys_ActionHistoryBranch SET actionRef = ?
|
||||||
WHERE name = "shared"`,
|
WHERE name = 'shared'`,
|
||||||
candidate.id);
|
candidate.id);
|
||||||
if (candidates.length === 1) {
|
if (candidates.length === 1) {
|
||||||
this._haveLocalSent = false;
|
this._haveLocalSent = false;
|
||||||
@ -405,9 +405,10 @@ export class ActionHistoryImpl implements ActionHistory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async getActions(actionNums: number[]): Promise<Array<LocalActionBundle|undefined>> {
|
public async getActions(actionNums: number[]): Promise<Array<LocalActionBundle|undefined>> {
|
||||||
const actions = await this._db.all(`SELECT actionHash, actionNum, body FROM _gristsys_ActionHistory
|
const actions = await this._db.all(
|
||||||
|
`SELECT actionHash, actionNum, body FROM _gristsys_ActionHistory
|
||||||
where actionNum in (${actionNums.map(x => '?').join(',')})`,
|
where actionNum in (${actionNums.map(x => '?').join(',')})`,
|
||||||
actionNums);
|
...actionNums);
|
||||||
return reportTimeTaken("getActions", () => {
|
return reportTimeTaken("getActions", () => {
|
||||||
const actionsByActionNum = keyBy(actions, 'actionNum');
|
const actionsByActionNum = keyBy(actions, 'actionNum');
|
||||||
return actionNums
|
return actionNums
|
||||||
@ -516,7 +517,7 @@ export class ActionHistoryImpl implements ActionHistory {
|
|||||||
FROM _gristsys_ActionHistoryBranch as Branch
|
FROM _gristsys_ActionHistoryBranch as Branch
|
||||||
LEFT JOIN _gristsys_ActionHistory as History
|
LEFT JOIN _gristsys_ActionHistory as History
|
||||||
ON History.id = Branch.actionRef
|
ON History.id = Branch.actionRef
|
||||||
WHERE name in ("shared", "local_sent", "local_unsent")`);
|
WHERE name in ('shared', 'local_sent', 'local_unsent')`);
|
||||||
const bits = mapValues(keyBy(rows, 'name'), this._asActionIdentifiers);
|
const bits = mapValues(keyBy(rows, 'name'), this._asActionIdentifiers);
|
||||||
const missing = { actionHash: null, actionRef: null, actionNum: null } as ActionIdentifiers;
|
const missing = { actionHash: null, actionRef: null, actionNum: null } as ActionIdentifiers;
|
||||||
return {
|
return {
|
||||||
|
@ -169,7 +169,10 @@ const UPDATE_DATA_SIZE_DELAY = {delayMs: 5 * 60 * 1000, varianceMs: 30 * 1000};
|
|||||||
const LOG_DOCUMENT_METRICS_DELAY = {delayMs: 60 * 60 * 1000, varianceMs: 30 * 1000};
|
const LOG_DOCUMENT_METRICS_DELAY = {delayMs: 60 * 60 * 1000, varianceMs: 30 * 1000};
|
||||||
|
|
||||||
// A hook for dependency injection.
|
// A hook for dependency injection.
|
||||||
export const Deps = {ACTIVEDOC_TIMEOUT};
|
export const Deps = {
|
||||||
|
ACTIVEDOC_TIMEOUT,
|
||||||
|
ACTIVEDOC_TIMEOUT_ACTION: 'shutdown' as 'shutdown'|'ignore',
|
||||||
|
};
|
||||||
|
|
||||||
interface UpdateUsageOptions {
|
interface UpdateUsageOptions {
|
||||||
// Whether usage should be synced to the home database. Defaults to true.
|
// Whether usage should be synced to the home database. Defaults to true.
|
||||||
@ -242,7 +245,7 @@ export class ActiveDoc extends EventEmitter implements AssistanceDoc {
|
|||||||
// Timer for shutting down the ActiveDoc a bit after all clients are gone.
|
// Timer for shutting down the ActiveDoc a bit after all clients are gone.
|
||||||
private _inactivityTimer = new InactivityTimer(() => {
|
private _inactivityTimer = new InactivityTimer(() => {
|
||||||
this._log.debug(null, 'inactivity timeout');
|
this._log.debug(null, 'inactivity timeout');
|
||||||
return this.shutdown();
|
return this._onInactive();
|
||||||
}, Deps.ACTIVEDOC_TIMEOUT * 1000);
|
}, Deps.ACTIVEDOC_TIMEOUT * 1000);
|
||||||
private _recoveryMode: boolean = false;
|
private _recoveryMode: boolean = false;
|
||||||
private _shuttingDown: boolean = false;
|
private _shuttingDown: boolean = false;
|
||||||
@ -1509,8 +1512,7 @@ export class ActiveDoc extends EventEmitter implements AssistanceDoc {
|
|||||||
*/
|
*/
|
||||||
public async getUsersForViewAs(docSession: OptDocSession): Promise<PermissionDataWithExtraUsers> {
|
public async getUsersForViewAs(docSession: OptDocSession): Promise<PermissionDataWithExtraUsers> {
|
||||||
// Make sure we have rights to view access rules.
|
// Make sure we have rights to view access rules.
|
||||||
const db = this.getHomeDbManager();
|
if (!await this._granularAccess.hasAccessRulesPermission(docSession)) {
|
||||||
if (!db || !await this._granularAccess.hasAccessRulesPermission(docSession)) {
|
|
||||||
throw new Error('Cannot list ACL users');
|
throw new Error('Cannot list ACL users');
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1525,12 +1527,15 @@ export class ActiveDoc extends EventEmitter implements AssistanceDoc {
|
|||||||
// Collect users the document is shared with.
|
// Collect users the document is shared with.
|
||||||
const userId = getDocSessionUserId(docSession);
|
const userId = getDocSessionUserId(docSession);
|
||||||
if (!userId) { throw new Error('Cannot determine user'); }
|
if (!userId) { throw new Error('Cannot determine user'); }
|
||||||
|
const db = this.getHomeDbManager();
|
||||||
|
if (db) {
|
||||||
const access = db.unwrapQueryResult(
|
const access = db.unwrapQueryResult(
|
||||||
await db.getDocAccess({userId, urlId: this.docName}, {
|
await db.getDocAccess({userId, urlId: this.docName}, {
|
||||||
flatten: true, excludeUsersWithoutAccess: true,
|
flatten: true, excludeUsersWithoutAccess: true,
|
||||||
}));
|
}));
|
||||||
result.users = access.users;
|
result.users = access.users;
|
||||||
result.users.forEach(user => isShared.add(normalizeEmail(user.email)));
|
result.users.forEach(user => isShared.add(normalizeEmail(user.email)));
|
||||||
|
}
|
||||||
|
|
||||||
// Collect users from user attribute tables. Omit duplicates with users the document is
|
// Collect users from user attribute tables. Omit duplicates with users the document is
|
||||||
// shared with.
|
// shared with.
|
||||||
@ -2048,7 +2053,7 @@ export class ActiveDoc extends EventEmitter implements AssistanceDoc {
|
|||||||
documentSettings.engine = (pythonVersion === '2') ? 'python2' : 'python3';
|
documentSettings.engine = (pythonVersion === '2') ? 'python2' : 'python3';
|
||||||
}
|
}
|
||||||
await this.docStorage.run('UPDATE _grist_DocInfo SET timezone = ?, documentSettings = ?',
|
await this.docStorage.run('UPDATE _grist_DocInfo SET timezone = ?, documentSettings = ?',
|
||||||
[timezone, JSON.stringify(documentSettings)]);
|
timezone, JSON.stringify(documentSettings));
|
||||||
}
|
}
|
||||||
|
|
||||||
private _makeInfo(docSession: OptDocSession, options: ApplyUAOptions = {}) {
|
private _makeInfo(docSession: OptDocSession, options: ApplyUAOptions = {}) {
|
||||||
@ -2657,6 +2662,12 @@ export class ActiveDoc extends EventEmitter implements AssistanceDoc {
|
|||||||
}
|
}
|
||||||
return this._attachmentColumns;
|
return this._attachmentColumns;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async _onInactive() {
|
||||||
|
if (Deps.ACTIVEDOC_TIMEOUT_ACTION === 'shutdown') {
|
||||||
|
await this.shutdown();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper to initialize a sandbox action bundle with no values.
|
// Helper to initialize a sandbox action bundle with no values.
|
||||||
|
@ -7,7 +7,6 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
import * as sqlite3 from '@gristlabs/sqlite3';
|
|
||||||
import {LocalActionBundle} from 'app/common/ActionBundle';
|
import {LocalActionBundle} from 'app/common/ActionBundle';
|
||||||
import {BulkColValues, DocAction, TableColValues, TableDataAction, toTableDataAction} from 'app/common/DocActions';
|
import {BulkColValues, DocAction, TableColValues, TableDataAction, toTableDataAction} from 'app/common/DocActions';
|
||||||
import * as gristTypes from 'app/common/gristTypes';
|
import * as gristTypes from 'app/common/gristTypes';
|
||||||
@ -23,12 +22,12 @@ import log from 'app/server/lib/log';
|
|||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import * as bluebird from 'bluebird';
|
import * as bluebird from 'bluebird';
|
||||||
import * as fse from 'fs-extra';
|
import * as fse from 'fs-extra';
|
||||||
import {RunResult} from 'sqlite3';
|
|
||||||
import * as _ from 'underscore';
|
import * as _ from 'underscore';
|
||||||
import * as util from 'util';
|
import * as util from 'util';
|
||||||
import uuidv4 from "uuid/v4";
|
import uuidv4 from "uuid/v4";
|
||||||
import {OnDemandStorage} from './OnDemandActions';
|
import {OnDemandStorage} from './OnDemandActions';
|
||||||
import {ISQLiteDB, MigrationHooks, OpenMode, quoteIdent, ResultRow, SchemaInfo, SQLiteDB} from './SQLiteDB';
|
import {ISQLiteDB, MigrationHooks, OpenMode, PreparedStatement, quoteIdent,
|
||||||
|
ResultRow, RunResult, SchemaInfo, SQLiteDB} from 'app/server/lib/SQLiteDB';
|
||||||
import chunk = require('lodash/chunk');
|
import chunk = require('lodash/chunk');
|
||||||
import cloneDeep = require('lodash/cloneDeep');
|
import cloneDeep = require('lodash/cloneDeep');
|
||||||
import groupBy = require('lodash/groupBy');
|
import groupBy = require('lodash/groupBy');
|
||||||
@ -447,7 +446,7 @@ export class DocStorage implements ISQLiteDB, OnDemandStorage {
|
|||||||
* Converts an array of columns to an array of rows (suitable to use as sqlParams), encoding all
|
* Converts an array of columns to an array of rows (suitable to use as sqlParams), encoding all
|
||||||
* values as needed, according to an array of Grist type strings (must be parallel to columns).
|
* values as needed, according to an array of Grist type strings (must be parallel to columns).
|
||||||
*/
|
*/
|
||||||
private static _encodeColumnsToRows(types: string[], valueColumns: any[]): any[] {
|
private static _encodeColumnsToRows(types: string[], valueColumns: any[]): any[][] {
|
||||||
const marshaller = new marshal.Marshaller({version: 2});
|
const marshaller = new marshal.Marshaller({version: 2});
|
||||||
const rows = _.unzip(valueColumns);
|
const rows = _.unzip(valueColumns);
|
||||||
for (const row of rows) {
|
for (const row of rows) {
|
||||||
@ -734,8 +733,11 @@ export class DocStorage implements ISQLiteDB, OnDemandStorage {
|
|||||||
})
|
})
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
// This replicates previous logic for _updateMetadata.
|
// This replicates previous logic for _updateMetadata.
|
||||||
if (err.message.startsWith('SQLITE_ERROR: no such table')) {
|
// It matches errors from node-sqlite3 and better-sqlite3
|
||||||
|
if (err.message.startsWith('SQLITE_ERROR: no such table') ||
|
||||||
|
err.message.startsWith('no such table:')) {
|
||||||
err.message = `NO_METADATA_ERROR: ${this.docName} has no metadata`;
|
err.message = `NO_METADATA_ERROR: ${this.docName} has no metadata`;
|
||||||
|
if (!err.cause) { err.cause = {}; }
|
||||||
err.cause.code = 'NO_METADATA_ERROR';
|
err.cause.code = 'NO_METADATA_ERROR';
|
||||||
}
|
}
|
||||||
throw err;
|
throw err;
|
||||||
@ -781,7 +783,7 @@ export class DocStorage implements ISQLiteDB, OnDemandStorage {
|
|||||||
.then(() => true)
|
.then(() => true)
|
||||||
// If UNIQUE constraint failed, this ident must already exists, so return false.
|
// If UNIQUE constraint failed, this ident must already exists, so return false.
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
if (/^SQLITE_CONSTRAINT: UNIQUE constraint failed/.test(err.message)) {
|
if (/^(SQLITE_CONSTRAINT: )?UNIQUE constraint failed/.test(err.message)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
throw err;
|
throw err;
|
||||||
@ -879,7 +881,7 @@ export class DocStorage implements ISQLiteDB, OnDemandStorage {
|
|||||||
}
|
}
|
||||||
whereParts = whereParts.concat(query.wheres ?? []);
|
whereParts = whereParts.concat(query.wheres ?? []);
|
||||||
const sql = this._getSqlForQuery(query, whereParts);
|
const sql = this._getSqlForQuery(query, whereParts);
|
||||||
return this._getDB().allMarshal(sql, params);
|
return this._getDB().allMarshal(sql, ...params);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1125,16 +1127,12 @@ export class DocStorage implements ISQLiteDB, OnDemandStorage {
|
|||||||
if (numChunks > 0) {
|
if (numChunks > 0) {
|
||||||
debuglog("DocStorage.BulkRemoveRecord: splitting " + rowIds.length +
|
debuglog("DocStorage.BulkRemoveRecord: splitting " + rowIds.length +
|
||||||
" deletes into chunks of size " + chunkSize);
|
" deletes into chunks of size " + chunkSize);
|
||||||
await this.prepare(preSql + chunkParams + postSql)
|
const stmt = await this.prepare(preSql + chunkParams + postSql);
|
||||||
.then(function(stmt) {
|
for (const index of _.range(0, numChunks * chunkSize, chunkSize)) {
|
||||||
return bluebird.Promise.each(_.range(0, numChunks * chunkSize, chunkSize), function(index: number) {
|
|
||||||
debuglog("DocStorage.BulkRemoveRecord: chunk delete " + index + "-" + (index + chunkSize - 1));
|
debuglog("DocStorage.BulkRemoveRecord: chunk delete " + index + "-" + (index + chunkSize - 1));
|
||||||
return bluebird.Promise.fromCallback((cb: any) => stmt.run(rowIds.slice(index, index + chunkSize), cb));
|
await stmt.run(rowIds.slice(index, index + chunkSize));
|
||||||
})
|
}
|
||||||
.then(function() {
|
await stmt.finalize();
|
||||||
return bluebird.Promise.fromCallback((cb: any) => stmt.finalize(cb));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (numLeftovers > 0) {
|
if (numLeftovers > 0) {
|
||||||
@ -1433,8 +1431,8 @@ export class DocStorage implements ISQLiteDB, OnDemandStorage {
|
|||||||
return this._markAsChanged(this._getDB().exec(sql));
|
return this._markAsChanged(this._getDB().exec(sql));
|
||||||
}
|
}
|
||||||
|
|
||||||
public prepare(sql: string, ...args: any[]): Promise<sqlite3.Statement> {
|
public prepare(sql: string): Promise<PreparedStatement> {
|
||||||
return this._getDB().prepare(sql, ...args);
|
return this._getDB().prepare(sql);
|
||||||
}
|
}
|
||||||
|
|
||||||
public get(sql: string, ...args: any[]): Promise<ResultRow|undefined> {
|
public get(sql: string, ...args: any[]): Promise<ResultRow|undefined> {
|
||||||
@ -1545,7 +1543,16 @@ export class DocStorage implements ISQLiteDB, OnDemandStorage {
|
|||||||
name LIKE 'sqlite_%' OR
|
name LIKE 'sqlite_%' OR
|
||||||
name LIKE '_gristsys_%'
|
name LIKE '_gristsys_%'
|
||||||
);
|
);
|
||||||
`);
|
`).catch(e => {
|
||||||
|
if (String(e).match(/no such table: dbstat/)) {
|
||||||
|
// We are using a version of SQLite that doesn't have
|
||||||
|
// dbstat compiled in. But it would be sad to disable
|
||||||
|
// Grist entirely just because we can't track byte-count.
|
||||||
|
// So return NaN in this case.
|
||||||
|
return {totalSize: NaN};
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
|
});
|
||||||
return result!.totalSize;
|
return result!.totalSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1576,19 +1583,15 @@ export class DocStorage implements ISQLiteDB, OnDemandStorage {
|
|||||||
/**
|
/**
|
||||||
* Internal helper for applying Bulk Update or Add Record sql
|
* Internal helper for applying Bulk Update or Add Record sql
|
||||||
*/
|
*/
|
||||||
private async _applyMaybeBulkUpdateOrAddSql(sql: string, sqlParams: any[]): Promise<void> {
|
private async _applyMaybeBulkUpdateOrAddSql(sql: string, sqlParams: any[][]): Promise<void> {
|
||||||
if (sqlParams.length === 1) {
|
if (sqlParams.length === 1) {
|
||||||
await this.run(sql, sqlParams[0]);
|
await this.run(sql, ...sqlParams[0]);
|
||||||
} else {
|
} else {
|
||||||
return this.prepare(sql)
|
const stmt = await this.prepare(sql);
|
||||||
.then(function(stmt) {
|
for (const param of sqlParams) {
|
||||||
return bluebird.Promise.each(sqlParams, function(param: string) {
|
await stmt.run(...param);
|
||||||
return bluebird.Promise.fromCallback((cb: any) => stmt.run(param, cb));
|
}
|
||||||
})
|
await stmt.finalize();
|
||||||
.then(function() {
|
|
||||||
return bluebird.Promise.fromCallback((cb: any) => stmt.finalize(cb));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1613,9 +1616,9 @@ export class DocStorage implements ISQLiteDB, OnDemandStorage {
|
|||||||
}
|
}
|
||||||
const oldGristType = this._getGristType(tableId, colId);
|
const oldGristType = this._getGristType(tableId, colId);
|
||||||
const oldSqlType = colInfo.type || 'BLOB';
|
const oldSqlType = colInfo.type || 'BLOB';
|
||||||
const oldDefault = colInfo.dflt_value;
|
const oldDefault = fixDefault(colInfo.dflt_value);
|
||||||
const newSqlType = newColType ? DocStorage._getSqlType(newColType) : oldSqlType;
|
const newSqlType = newColType ? DocStorage._getSqlType(newColType) : oldSqlType;
|
||||||
const newDefault = newColType ? DocStorage._formattedDefault(newColType) : oldDefault;
|
const newDefault = fixDefault(newColType ? DocStorage._formattedDefault(newColType) : oldDefault);
|
||||||
const newInfo = {name: newColId, type: newSqlType, dflt_value: newDefault};
|
const newInfo = {name: newColId, type: newSqlType, dflt_value: newDefault};
|
||||||
// Check if anything actually changed, and only rebuild the table then.
|
// Check if anything actually changed, and only rebuild the table then.
|
||||||
if (Object.keys(newInfo).every(p => ((newInfo as any)[p] === colInfo[p]))) {
|
if (Object.keys(newInfo).every(p => ((newInfo as any)[p] === colInfo[p]))) {
|
||||||
@ -1832,3 +1835,10 @@ export interface IndexInfo extends IndexColumns {
|
|||||||
export async function createAttachmentsIndex(db: ISQLiteDB) {
|
export async function createAttachmentsIndex(db: ISQLiteDB) {
|
||||||
await db.exec(`CREATE INDEX _grist_Attachments_fileIdent ON _grist_Attachments(fileIdent)`);
|
await db.exec(`CREATE INDEX _grist_Attachments_fileIdent ON _grist_Attachments(fileIdent)`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Old docs may have incorrect quotes in their schema for default values
|
||||||
|
// that node-sqlite3 may tolerate but not other wrappers. Patch such
|
||||||
|
// material as we run into it.
|
||||||
|
function fixDefault(def: string) {
|
||||||
|
return (def === '""') ? "''" : def;
|
||||||
|
}
|
||||||
|
@ -1879,11 +1879,10 @@ export class GranularAccess implements GranularAccessForBundle {
|
|||||||
* tables or examples.
|
* tables or examples.
|
||||||
*/
|
*/
|
||||||
private async _getViewAsUser(linkParameters: Record<string, string>): Promise<UserOverride> {
|
private async _getViewAsUser(linkParameters: Record<string, string>): Promise<UserOverride> {
|
||||||
// Look up user information in database.
|
// Look up user information in database, if available
|
||||||
if (!this._homeDbManager) { throw new Error('database required'); }
|
|
||||||
const dbUser = linkParameters.aclAsUserId ?
|
const dbUser = linkParameters.aclAsUserId ?
|
||||||
(await this._homeDbManager.getUser(integerParam(linkParameters.aclAsUserId, 'aclAsUserId'))) :
|
(await this._homeDbManager?.getUser(integerParam(linkParameters.aclAsUserId, 'aclAsUserId'))) :
|
||||||
(await this._homeDbManager.getExistingUserByLogin(linkParameters.aclAsUser));
|
(await this._homeDbManager?.getExistingUserByLogin(linkParameters.aclAsUser));
|
||||||
// If this is one of example users we will pretend that it doesn't exist, otherwise we would
|
// If this is one of example users we will pretend that it doesn't exist, otherwise we would
|
||||||
// end up using permissions of the real user.
|
// end up using permissions of the real user.
|
||||||
const isExampleUser = this.getExampleViewAsUsers().some(e => e.email === dbUser?.loginEmail);
|
const isExampleUser = this.getExampleViewAsUsers().some(e => e.email === dbUser?.loginEmail);
|
||||||
@ -1905,13 +1904,13 @@ export class GranularAccess implements GranularAccessForBundle {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const docAuth = userExists ? await this._homeDbManager.getDocAuthCached({
|
const docAuth = userExists ? await this._homeDbManager?.getDocAuthCached({
|
||||||
urlId: this._docId,
|
urlId: this._docId,
|
||||||
userId: dbUser.id
|
userId: dbUser.id
|
||||||
}) : null;
|
}) : null;
|
||||||
const access = docAuth?.access || null;
|
const access = docAuth?.access || null;
|
||||||
const user = userExists ? this._homeDbManager.makeFullUser(dbUser) : null;
|
const user = userExists ? this._homeDbManager?.makeFullUser(dbUser) : null;
|
||||||
return { access, user };
|
return { access, user: user || null };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -7,7 +7,8 @@ import {IBilling} from 'app/server/lib/IBilling';
|
|||||||
import {INotifier} from 'app/server/lib/INotifier';
|
import {INotifier} from 'app/server/lib/INotifier';
|
||||||
import {ISandbox, ISandboxCreationOptions} from 'app/server/lib/ISandbox';
|
import {ISandbox, ISandboxCreationOptions} from 'app/server/lib/ISandbox';
|
||||||
import {IShell} from 'app/server/lib/IShell';
|
import {IShell} from 'app/server/lib/IShell';
|
||||||
import {createSandbox} from 'app/server/lib/NSandbox';
|
import {createSandbox, SpawnFn} from 'app/server/lib/NSandbox';
|
||||||
|
import {SqliteVariant} from 'app/server/lib/SqliteCommon';
|
||||||
|
|
||||||
export interface ICreate {
|
export interface ICreate {
|
||||||
|
|
||||||
@ -31,6 +32,8 @@ export interface ICreate {
|
|||||||
// static page.
|
// static page.
|
||||||
getExtraHeadHtml?(): string;
|
getExtraHeadHtml?(): string;
|
||||||
getStorageOptions?(name: string): ICreateStorageOptions|undefined;
|
getStorageOptions?(name: string): ICreateStorageOptions|undefined;
|
||||||
|
getSqliteVariant?(): SqliteVariant;
|
||||||
|
getSandboxVariants?(): Record<string, SpawnFn>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ICreateActiveDocOptions {
|
export interface ICreateActiveDocOptions {
|
||||||
@ -62,6 +65,8 @@ export function makeSimpleCreator(opts: {
|
|||||||
sandboxFlavor?: string,
|
sandboxFlavor?: string,
|
||||||
shell?: IShell,
|
shell?: IShell,
|
||||||
getExtraHeadHtml?: () => string,
|
getExtraHeadHtml?: () => string,
|
||||||
|
getSqliteVariant?: () => SqliteVariant,
|
||||||
|
getSandboxVariants?: () => Record<string, SpawnFn>,
|
||||||
}): ICreate {
|
}): ICreate {
|
||||||
const {sessionSecret, storage, notifier, billing} = opts;
|
const {sessionSecret, storage, notifier, billing} = opts;
|
||||||
return {
|
return {
|
||||||
@ -121,6 +126,8 @@ export function makeSimpleCreator(opts: {
|
|||||||
},
|
},
|
||||||
getStorageOptions(name: string) {
|
getStorageOptions(name: string) {
|
||||||
return storage?.find(s => s.name === name);
|
return storage?.find(s => s.name === name);
|
||||||
}
|
},
|
||||||
|
getSqliteVariant: opts.getSqliteVariant,
|
||||||
|
getSandboxVariants: opts.getSandboxVariants,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
*/
|
*/
|
||||||
import {arrayToString} from 'app/common/arrayToString';
|
import {arrayToString} from 'app/common/arrayToString';
|
||||||
import * as marshal from 'app/common/marshal';
|
import * as marshal from 'app/common/marshal';
|
||||||
|
import {create} from 'app/server/lib/create';
|
||||||
import {ISandbox, ISandboxCreationOptions, ISandboxCreator} from 'app/server/lib/ISandbox';
|
import {ISandbox, ISandboxCreationOptions, ISandboxCreator} from 'app/server/lib/ISandbox';
|
||||||
import log from 'app/server/lib/log';
|
import log from 'app/server/lib/log';
|
||||||
import {getAppRoot, getAppRootFor, getUnpackedAppRoot} from 'app/server/lib/places';
|
import {getAppRoot, getAppRootFor, getUnpackedAppRoot} from 'app/server/lib/places';
|
||||||
@ -69,12 +70,18 @@ export interface ISandboxOptions {
|
|||||||
* We interact with sandboxes as a separate child process. Data engine work is done
|
* We interact with sandboxes as a separate child process. Data engine work is done
|
||||||
* across standard input and output streams from and to this process. We also monitor
|
* across standard input and output streams from and to this process. We also monitor
|
||||||
* and control resource utilization via a distinct control interface.
|
* and control resource utilization via a distinct control interface.
|
||||||
|
*
|
||||||
|
* More recently, a sandbox may not be a separate OS process, but (for
|
||||||
|
* example) a web worker. In this case, a pair of callbacks (getData and
|
||||||
|
* sendData) replace pipes.
|
||||||
*/
|
*/
|
||||||
interface SandboxProcess {
|
export interface SandboxProcess {
|
||||||
child: ChildProcess;
|
child?: ChildProcess;
|
||||||
control: ISandboxControl;
|
control: ISandboxControl;
|
||||||
dataToSandboxDescriptor?: number; // override sandbox's 'stdin' for data
|
dataToSandboxDescriptor?: number; // override sandbox's 'stdin' for data
|
||||||
dataFromSandboxDescriptor?: number; // override sandbox's 'stdout' for data
|
dataFromSandboxDescriptor?: number; // override sandbox's 'stdout' for data
|
||||||
|
getData?: (cb: (data: any) => void) => void; // use a callback instead of a pipe to get data
|
||||||
|
sendData?: (data: any) => void; // use a callback instead of a pipe to send data
|
||||||
}
|
}
|
||||||
|
|
||||||
type ResolveRejectPair = [(value?: any) => void, (reason?: unknown) => void];
|
type ResolveRejectPair = [(value?: any) => void, (reason?: unknown) => void];
|
||||||
@ -88,7 +95,7 @@ const recordBuffersRoot = process.env.RECORD_SANDBOX_BUFFERS_DIR;
|
|||||||
|
|
||||||
export class NSandbox implements ISandbox {
|
export class NSandbox implements ISandbox {
|
||||||
|
|
||||||
public readonly childProc: ChildProcess;
|
public readonly childProc?: ChildProcess;
|
||||||
private _control: ISandboxControl;
|
private _control: ISandboxControl;
|
||||||
private _logTimes: boolean;
|
private _logTimes: boolean;
|
||||||
private _exportedFunctions: {[name: string]: SandboxMethod};
|
private _exportedFunctions: {[name: string]: SandboxMethod};
|
||||||
@ -101,8 +108,9 @@ export class NSandbox implements ISandbox {
|
|||||||
private _isWriteClosed = false;
|
private _isWriteClosed = false;
|
||||||
|
|
||||||
private _logMeta: log.ILogMeta;
|
private _logMeta: log.ILogMeta;
|
||||||
private _streamToSandbox: Writable;
|
private _streamToSandbox?: Writable;
|
||||||
private _streamFromSandbox: Stream;
|
private _streamFromSandbox: Stream;
|
||||||
|
private _dataToSandbox?: (data: any) => void;
|
||||||
private _lastStderr: Uint8Array; // Record last error line seen.
|
private _lastStderr: Uint8Array; // Record last error line seen.
|
||||||
|
|
||||||
// Create a unique subdirectory for each sandbox process so they can be replayed separately
|
// Create a unique subdirectory for each sandbox process so they can be replayed separately
|
||||||
@ -129,52 +137,26 @@ export class NSandbox implements ISandbox {
|
|||||||
this._control = sandboxProcess.control;
|
this._control = sandboxProcess.control;
|
||||||
this.childProc = sandboxProcess.child;
|
this.childProc = sandboxProcess.child;
|
||||||
|
|
||||||
this._logMeta = {sandboxPid: this.childProc.pid, ...options.logMeta};
|
this._logMeta = {sandboxPid: this.childProc?.pid, ...options.logMeta};
|
||||||
|
|
||||||
|
if (this.childProc) {
|
||||||
if (options.minimalPipeMode) {
|
if (options.minimalPipeMode) {
|
||||||
log.rawDebug("3-pipe Sandbox started", this._logMeta);
|
this._initializeMinimalPipeMode(sandboxProcess);
|
||||||
if (sandboxProcess.dataToSandboxDescriptor) {
|
|
||||||
this._streamToSandbox =
|
|
||||||
(this.childProc.stdio as Stream[])[sandboxProcess.dataToSandboxDescriptor] as Writable;
|
|
||||||
} else {
|
} else {
|
||||||
this._streamToSandbox = this.childProc.stdin!;
|
this._initializeFivePipeMode(sandboxProcess);
|
||||||
}
|
|
||||||
if (sandboxProcess.dataFromSandboxDescriptor) {
|
|
||||||
this._streamFromSandbox =
|
|
||||||
(this.childProc.stdio as Stream[])[sandboxProcess.dataFromSandboxDescriptor];
|
|
||||||
} else {
|
|
||||||
this._streamFromSandbox = this.childProc.stdout!;
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
log.rawDebug("5-pipe Sandbox started", this._logMeta);
|
// No child process. In this case, there should be a callback for
|
||||||
if (sandboxProcess.dataFromSandboxDescriptor || sandboxProcess.dataToSandboxDescriptor) {
|
// receiving and sending data.
|
||||||
throw new Error('cannot override file descriptors in 5 pipe mode');
|
if (!sandboxProcess.getData) {
|
||||||
|
throw new Error('no way to get data from sandbox');
|
||||||
}
|
}
|
||||||
this._streamToSandbox = (this.childProc.stdio as Stream[])[3] as Writable;
|
if (!sandboxProcess.sendData) {
|
||||||
this._streamFromSandbox = (this.childProc.stdio as Stream[])[4];
|
throw new Error('no way to send data to sandbox');
|
||||||
this.childProc.stdout!.on('data', sandboxUtil.makeLinePrefixer('Sandbox stdout: ', this._logMeta));
|
|
||||||
}
|
}
|
||||||
const sandboxStderrLogger = sandboxUtil.makeLinePrefixer('Sandbox stderr: ', this._logMeta);
|
sandboxProcess.getData((data) => this._onSandboxData(data));
|
||||||
this.childProc.stderr!.on('data', data => {
|
this._dataToSandbox = sandboxProcess.sendData;
|
||||||
this._lastStderr = data;
|
|
||||||
sandboxStderrLogger(data);
|
|
||||||
});
|
|
||||||
|
|
||||||
this.childProc.on('close', this._onExit.bind(this));
|
|
||||||
this.childProc.on('error', this._onError.bind(this));
|
|
||||||
|
|
||||||
this._streamFromSandbox.on('data', (data) => this._onSandboxData(data));
|
|
||||||
this._streamFromSandbox.on('end', () => this._onSandboxClose());
|
|
||||||
this._streamFromSandbox.on('error', (err) => {
|
|
||||||
log.rawError(`Sandbox error reading: ${err}`, this._logMeta);
|
|
||||||
this._onSandboxClose();
|
|
||||||
});
|
|
||||||
|
|
||||||
this._streamToSandbox.on('error', (err) => {
|
|
||||||
if (!this._isWriteClosed) {
|
|
||||||
log.rawError(`Sandbox error writing: ${err}`, this._logMeta);
|
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
// On shutdown, shutdown the child process cleanly, and wait for it to exit.
|
// On shutdown, shutdown the child process cleanly, and wait for it to exit.
|
||||||
shutdown.addCleanupHandler(this, this.shutdown);
|
shutdown.addCleanupHandler(this, this.shutdown);
|
||||||
@ -203,9 +185,9 @@ export class NSandbox implements ISandbox {
|
|||||||
|
|
||||||
const result = await new Promise<void>((resolve, reject) => {
|
const result = await new Promise<void>((resolve, reject) => {
|
||||||
if (this._isWriteClosed) { resolve(); }
|
if (this._isWriteClosed) { resolve(); }
|
||||||
this.childProc.on('error', reject);
|
this.childProc?.on('error', reject);
|
||||||
this.childProc.on('close', resolve);
|
this.childProc?.on('close', resolve);
|
||||||
this.childProc.on('exit', resolve);
|
this.childProc?.on('exit', resolve);
|
||||||
this._close();
|
this._close();
|
||||||
}).finally(() => this._control.close());
|
}).finally(() => this._control.close());
|
||||||
|
|
||||||
@ -244,6 +226,82 @@ export class NSandbox implements ISandbox {
|
|||||||
log.rawDebug('Sandbox memory', {memory, ...this._logMeta});
|
log.rawDebug('Sandbox memory', {memory, ...this._logMeta});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get ready to communicate with a sandbox process using stdin,
|
||||||
|
* stdout, and stderr.
|
||||||
|
*/
|
||||||
|
private _initializeMinimalPipeMode(sandboxProcess: SandboxProcess) {
|
||||||
|
log.rawDebug("3-pipe Sandbox started", this._logMeta);
|
||||||
|
if (!this.childProc) {
|
||||||
|
throw new Error('child process required');
|
||||||
|
}
|
||||||
|
if (sandboxProcess.dataToSandboxDescriptor) {
|
||||||
|
this._streamToSandbox =
|
||||||
|
(this.childProc.stdio as Stream[])[sandboxProcess.dataToSandboxDescriptor] as Writable;
|
||||||
|
} else {
|
||||||
|
this._streamToSandbox = this.childProc.stdin!;
|
||||||
|
}
|
||||||
|
if (sandboxProcess.dataFromSandboxDescriptor) {
|
||||||
|
this._streamFromSandbox =
|
||||||
|
(this.childProc.stdio as Stream[])[sandboxProcess.dataFromSandboxDescriptor];
|
||||||
|
} else {
|
||||||
|
this._streamFromSandbox = this.childProc.stdout!;
|
||||||
|
}
|
||||||
|
this._initializeStreamEvents();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get ready to communicate with a sandbox process using stdin,
|
||||||
|
* stdout, and stderr, and two extra FDs. This was a nice way
|
||||||
|
* to have a clean, separate data channel, when supported.
|
||||||
|
*/
|
||||||
|
private _initializeFivePipeMode(sandboxProcess: SandboxProcess) {
|
||||||
|
log.rawDebug("5-pipe Sandbox started", this._logMeta);
|
||||||
|
if (!this.childProc) {
|
||||||
|
throw new Error('child process required');
|
||||||
|
}
|
||||||
|
if (sandboxProcess.dataFromSandboxDescriptor || sandboxProcess.dataToSandboxDescriptor) {
|
||||||
|
throw new Error('cannot override file descriptors in 5 pipe mode');
|
||||||
|
}
|
||||||
|
this._streamToSandbox = (this.childProc.stdio as Stream[])[3] as Writable;
|
||||||
|
this._streamFromSandbox = (this.childProc.stdio as Stream[])[4];
|
||||||
|
this.childProc.stdout!.on('data', sandboxUtil.makeLinePrefixer('Sandbox stdout: ', this._logMeta));
|
||||||
|
this._initializeStreamEvents();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set up logging and events on streams to/from a sandbox.
|
||||||
|
*/
|
||||||
|
private _initializeStreamEvents() {
|
||||||
|
if (!this.childProc) {
|
||||||
|
throw new Error('child process required');
|
||||||
|
}
|
||||||
|
if (!this._streamToSandbox) {
|
||||||
|
throw new Error('expected streamToSandbox to be configured');
|
||||||
|
}
|
||||||
|
const sandboxStderrLogger = sandboxUtil.makeLinePrefixer('Sandbox stderr: ', this._logMeta);
|
||||||
|
this.childProc.stderr!.on('data', data => {
|
||||||
|
this._lastStderr = data;
|
||||||
|
sandboxStderrLogger(data);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.childProc.on('close', this._onExit.bind(this));
|
||||||
|
this.childProc.on('error', this._onError.bind(this));
|
||||||
|
|
||||||
|
this._streamFromSandbox.on('data', (data) => this._onSandboxData(data));
|
||||||
|
this._streamFromSandbox.on('end', () => this._onSandboxClose());
|
||||||
|
this._streamFromSandbox.on('error', (err) => {
|
||||||
|
log.rawError(`Sandbox error reading: ${err}`, this._logMeta);
|
||||||
|
this._onSandboxClose();
|
||||||
|
});
|
||||||
|
|
||||||
|
this._streamToSandbox.on('error', (err) => {
|
||||||
|
if (!this._isWriteClosed) {
|
||||||
|
log.rawError(`Sandbox error writing: ${err}`, this._logMeta);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
private async _pyCallWait(funcName: string, startTime: number): Promise<any> {
|
private async _pyCallWait(funcName: string, startTime: number): Promise<any> {
|
||||||
try {
|
try {
|
||||||
return await new Promise((resolve, reject) => {
|
return await new Promise((resolve, reject) => {
|
||||||
@ -263,7 +321,7 @@ export class NSandbox implements ISandbox {
|
|||||||
this._control.prepareToClose();
|
this._control.prepareToClose();
|
||||||
if (!this._isWriteClosed) {
|
if (!this._isWriteClosed) {
|
||||||
// Close the pipe to the sandbox, which should cause the sandbox to exit cleanly.
|
// Close the pipe to the sandbox, which should cause the sandbox to exit cleanly.
|
||||||
this._streamToSandbox.end();
|
this._streamToSandbox?.end();
|
||||||
this._isWriteClosed = true;
|
this._isWriteClosed = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -298,9 +356,16 @@ export class NSandbox implements ISandbox {
|
|||||||
if (this._recordBuffersDir) {
|
if (this._recordBuffersDir) {
|
||||||
fs.appendFileSync(path.resolve(this._recordBuffersDir, "input"), buf);
|
fs.appendFileSync(path.resolve(this._recordBuffersDir, "input"), buf);
|
||||||
}
|
}
|
||||||
|
if (this._streamToSandbox) {
|
||||||
return this._streamToSandbox.write(buf);
|
return this._streamToSandbox.write(buf);
|
||||||
|
} else {
|
||||||
|
if (!this._dataToSandbox) {
|
||||||
|
throw new Error('no way to send data to sandbox');
|
||||||
|
}
|
||||||
|
this._dataToSandbox(buf);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Process a buffer of data received from the sandbox process.
|
* Process a buffer of data received from the sandbox process.
|
||||||
@ -422,18 +487,26 @@ function isFlavor(flavor: string): flavor is keyof typeof spawners {
|
|||||||
* It is ignored by other flavors.
|
* It is ignored by other flavors.
|
||||||
*/
|
*/
|
||||||
export class NSandboxCreator implements ISandboxCreator {
|
export class NSandboxCreator implements ISandboxCreator {
|
||||||
private _flavor: keyof typeof spawners;
|
private _flavor: string;
|
||||||
|
private _spawner: SpawnFn;
|
||||||
private _command?: string;
|
private _command?: string;
|
||||||
private _preferredPythonVersion?: string;
|
private _preferredPythonVersion?: string;
|
||||||
|
|
||||||
public constructor(options: {
|
public constructor(options: {
|
||||||
defaultFlavor: keyof typeof spawners,
|
defaultFlavor: string,
|
||||||
command?: string,
|
command?: string,
|
||||||
preferredPythonVersion?: string,
|
preferredPythonVersion?: string,
|
||||||
}) {
|
}) {
|
||||||
const flavor = options.defaultFlavor;
|
const flavor = options.defaultFlavor;
|
||||||
if (!isFlavor(flavor)) {
|
if (!isFlavor(flavor)) {
|
||||||
|
const variants = create.getSandboxVariants?.();
|
||||||
|
if (!variants?.[flavor]) {
|
||||||
throw new Error(`Unrecognized sandbox flavor: ${flavor}`);
|
throw new Error(`Unrecognized sandbox flavor: ${flavor}`);
|
||||||
|
} else {
|
||||||
|
this._spawner = variants[flavor];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this._spawner = spawners[flavor];
|
||||||
}
|
}
|
||||||
this._flavor = flavor;
|
this._flavor = flavor;
|
||||||
this._command = options.command;
|
this._command = options.command;
|
||||||
@ -463,12 +536,12 @@ export class NSandboxCreator implements ISandboxCreator {
|
|||||||
importDir: options.importMount,
|
importDir: options.importMount,
|
||||||
...options.sandboxOptions,
|
...options.sandboxOptions,
|
||||||
};
|
};
|
||||||
return new NSandbox(translatedOptions, spawners[this._flavor]);
|
return new NSandbox(translatedOptions, this._spawner);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// A function that takes sandbox options and starts a sandbox process.
|
// A function that takes sandbox options and starts a sandbox process.
|
||||||
type SpawnFn = (options: ISandboxOptions) => SandboxProcess;
|
export type SpawnFn = (options: ISandboxOptions) => SandboxProcess;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper function to run a nacl sandbox. It takes care of most arguments, similarly to
|
* Helper function to run a nacl sandbox. It takes care of most arguments, similarly to
|
||||||
@ -750,7 +823,7 @@ function macSandboxExec(options: ISandboxOptions): SandboxProcess {
|
|||||||
...getWrappingEnv(options),
|
...getWrappingEnv(options),
|
||||||
};
|
};
|
||||||
const command = findPython(options.command, options.preferredPythonVersion);
|
const command = findPython(options.command, options.preferredPythonVersion);
|
||||||
const realPath = fs.realpathSync(command);
|
const realPath = realpathSync(command);
|
||||||
log.rawDebug("macSandboxExec found a python", {...options.logMeta, command: realPath});
|
log.rawDebug("macSandboxExec found a python", {...options.logMeta, command: realPath});
|
||||||
|
|
||||||
// Prepare sandbox profile
|
// Prepare sandbox profile
|
||||||
@ -868,11 +941,11 @@ function getAbsolutePaths(options: ISandboxOptions) {
|
|||||||
// Get path to sandbox directory - this is a little idiosyncratic to work well
|
// Get path to sandbox directory - this is a little idiosyncratic to work well
|
||||||
// in grist-core. It is important to use real paths since we may be viewing
|
// in grist-core. It is important to use real paths since we may be viewing
|
||||||
// the file system through a narrow window in a container.
|
// the file system through a narrow window in a container.
|
||||||
const sandboxDir = path.join(fs.realpathSync(path.join(process.cwd(), 'sandbox', 'grist')),
|
const sandboxDir = path.join(realpathSync(path.join(process.cwd(), 'sandbox', 'grist')),
|
||||||
'..');
|
'..');
|
||||||
// Copy plugin options, and then make them absolute.
|
// Copy plugin options, and then make them absolute.
|
||||||
if (options.importDir) {
|
if (options.importDir) {
|
||||||
options.importDir = fs.realpathSync(options.importDir);
|
options.importDir = realpathSync(options.importDir);
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
sandboxDir,
|
sandboxDir,
|
||||||
@ -976,9 +1049,6 @@ export function createSandbox(defaultFlavorSpec: string, options: ISandboxCreati
|
|||||||
const flavor = parts[parts.length - 1];
|
const flavor = parts[parts.length - 1];
|
||||||
const version = parts.length === 2 ? parts[0] : '*';
|
const version = parts.length === 2 ? parts[0] : '*';
|
||||||
if (preferredPythonVersion === version || version === '*' || !preferredPythonVersion) {
|
if (preferredPythonVersion === version || version === '*' || !preferredPythonVersion) {
|
||||||
if (!isFlavor(flavor)) {
|
|
||||||
throw new Error(`Unrecognized sandbox flavor: ${flavor}`);
|
|
||||||
}
|
|
||||||
const creator = new NSandboxCreator({
|
const creator = new NSandboxCreator({
|
||||||
defaultFlavor: flavor,
|
defaultFlavor: flavor,
|
||||||
command: process.env['GRIST_SANDBOX' + (preferredPythonVersion||'')] ||
|
command: process.env['GRIST_SANDBOX' + (preferredPythonVersion||'')] ||
|
||||||
@ -990,3 +1060,16 @@ export function createSandbox(defaultFlavorSpec: string, options: ISandboxCreati
|
|||||||
}
|
}
|
||||||
throw new Error('Failed to create a sandbox');
|
throw new Error('Failed to create a sandbox');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The realpath function may not be available, just return the
|
||||||
|
* path unchanged if it is not. Specifically, this happens when
|
||||||
|
* compiled for use in a browser environment.
|
||||||
|
*/
|
||||||
|
function realpathSync(src: string) {
|
||||||
|
try {
|
||||||
|
return fs.realpathSync(src);
|
||||||
|
} catch (e) {
|
||||||
|
return src;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -69,23 +69,24 @@
|
|||||||
|
|
||||||
import {ErrorWithCode} from 'app/common/ErrorWithCode';
|
import {ErrorWithCode} from 'app/common/ErrorWithCode';
|
||||||
import {timeFormat} from 'app/common/timeFormat';
|
import {timeFormat} from 'app/common/timeFormat';
|
||||||
|
import {create} from 'app/server/lib/create';
|
||||||
import * as docUtils from 'app/server/lib/docUtils';
|
import * as docUtils from 'app/server/lib/docUtils';
|
||||||
import log from 'app/server/lib/log';
|
import log from 'app/server/lib/log';
|
||||||
import {fromCallback} from 'app/server/lib/serverUtils';
|
import {MinDB, MinRunResult, PreparedStatement, ResultRow,
|
||||||
|
SqliteVariant, Statement} from 'app/server/lib/SqliteCommon';
|
||||||
import * as sqlite3 from '@gristlabs/sqlite3';
|
import {NodeSqliteVariant} from 'app/server/lib/SqliteNode';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import {each} from 'bluebird';
|
|
||||||
import * as fse from 'fs-extra';
|
import * as fse from 'fs-extra';
|
||||||
import {RunResult} from 'sqlite3';
|
|
||||||
import fromPairs = require('lodash/fromPairs');
|
import fromPairs = require('lodash/fromPairs');
|
||||||
import isEqual = require('lodash/isEqual');
|
import isEqual = require('lodash/isEqual');
|
||||||
import noop = require('lodash/noop');
|
import noop = require('lodash/noop');
|
||||||
import range = require('lodash/range');
|
import range = require('lodash/range');
|
||||||
|
|
||||||
// Describes the result of get() and all() database methods.
|
export type {PreparedStatement, ResultRow, Statement};
|
||||||
export interface ResultRow {
|
export type RunResult = MinRunResult;
|
||||||
[column: string]: any;
|
|
||||||
|
function getVariant(): SqliteVariant {
|
||||||
|
return create.getSqliteVariant?.() || new NodeSqliteVariant();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Describes how to create a new DB or migrate an old one. Any changes to the DB must be reflected
|
// Describes how to create a new DB or migrate an old one. Any changes to the DB must be reflected
|
||||||
@ -136,7 +137,7 @@ export interface ISQLiteDB {
|
|||||||
run(sql: string, ...params: any[]): Promise<RunResult>;
|
run(sql: string, ...params: any[]): Promise<RunResult>;
|
||||||
get(sql: string, ...params: any[]): Promise<ResultRow|undefined>;
|
get(sql: string, ...params: any[]): Promise<ResultRow|undefined>;
|
||||||
all(sql: string, ...params: any[]): Promise<ResultRow[]>;
|
all(sql: string, ...params: any[]): Promise<ResultRow[]>;
|
||||||
prepare(sql: string, ...params: any[]): Promise<sqlite3.Statement>;
|
prepare(sql: string, ...params: any[]): Promise<PreparedStatement>;
|
||||||
execTransaction<T>(callback: () => Promise<T>): Promise<T>;
|
execTransaction<T>(callback: () => Promise<T>): Promise<T>;
|
||||||
runAndGetId(sql: string, ...params: any[]): Promise<number>;
|
runAndGetId(sql: string, ...params: any[]): Promise<number>;
|
||||||
requestVacuum(): Promise<boolean>;
|
requestVacuum(): Promise<boolean>;
|
||||||
@ -196,18 +197,11 @@ export class SQLiteDB implements ISQLiteDB {
|
|||||||
*/
|
*/
|
||||||
public static async openDBRaw(dbPath: string,
|
public static async openDBRaw(dbPath: string,
|
||||||
mode: OpenMode = OpenMode.OPEN_CREATE): Promise<SQLiteDB> {
|
mode: OpenMode = OpenMode.OPEN_CREATE): Promise<SQLiteDB> {
|
||||||
const sqliteMode: number =
|
const minDb: MinDB = await getVariant().opener(dbPath, mode);
|
||||||
// tslint:disable-next-line:no-bitwise
|
|
||||||
(mode === OpenMode.OPEN_READONLY ? sqlite3.OPEN_READONLY : sqlite3.OPEN_READWRITE) |
|
|
||||||
(mode === OpenMode.OPEN_CREATE || mode === OpenMode.CREATE_EXCL ? sqlite3.OPEN_CREATE : 0);
|
|
||||||
|
|
||||||
let _db: sqlite3.Database;
|
|
||||||
await fromCallback(cb => { _db = new sqlite3.Database(dbPath, sqliteMode, cb); });
|
|
||||||
limitAttach(_db!, 0); // Outside of VACUUM, we don't allow ATTACH.
|
|
||||||
if (SQLiteDB._addOpens(dbPath, 1) > 1) {
|
if (SQLiteDB._addOpens(dbPath, 1) > 1) {
|
||||||
log.warn("SQLiteDB[%s] avoid opening same DB more than once", dbPath);
|
log.warn("SQLiteDB[%s] avoid opening same DB more than once", dbPath);
|
||||||
}
|
}
|
||||||
return new SQLiteDB(_db!, dbPath);
|
return new SQLiteDB(minDb, dbPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -261,12 +255,29 @@ export class SQLiteDB implements ISQLiteDB {
|
|||||||
private _migrationError: Error|null = null;
|
private _migrationError: Error|null = null;
|
||||||
private _needVacuum: boolean = false;
|
private _needVacuum: boolean = false;
|
||||||
|
|
||||||
private constructor(private _db: sqlite3.Database, private _dbPath: string) {
|
private constructor(protected _db: MinDB, private _dbPath: string) {
|
||||||
// Default database to serialized execution. See https://github.com/mapbox/node-sqlite3/wiki/Control-Flow
|
|
||||||
// This isn't enough for transactions, which we serialize explicitly.
|
|
||||||
this._db.serialize();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async all(sql: string, ...args: any[]): Promise<ResultRow[]> {
|
||||||
|
const result = await this._db.all(sql, ...args);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public run(sql: string, ...args: any[]): Promise<MinRunResult> {
|
||||||
|
return this._db.run(sql, ...args);
|
||||||
|
}
|
||||||
|
|
||||||
|
public exec(sql: string): Promise<void> {
|
||||||
|
return this._db.exec(sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
public prepare(sql: string): Promise<PreparedStatement> {
|
||||||
|
return this._db.prepare(sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
public get(sql: string, ...args: any[]): Promise<ResultRow|undefined> {
|
||||||
|
return this._db.get(sql, ...args);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If a DB was migrated on open, this will be set to the path of the pre-migration backup copy.
|
* If a DB was migrated on open, this will be set to the path of the pre-migration backup copy.
|
||||||
@ -285,40 +296,8 @@ export class SQLiteDB implements ISQLiteDB {
|
|||||||
// The following methods mirror https://github.com/mapbox/node-sqlite3/wiki/API, but return
|
// The following methods mirror https://github.com/mapbox/node-sqlite3/wiki/API, but return
|
||||||
// Promises. We use fromCallback() rather than use promisify, to get better type-checking.
|
// Promises. We use fromCallback() rather than use promisify, to get better type-checking.
|
||||||
|
|
||||||
public exec(sql: string): Promise<void> {
|
public async allMarshal(sql: string, ...params: any[]): Promise<Buffer> {
|
||||||
return fromCallback(cb => this._db.exec(sql, cb));
|
return this._db.allMarshal(sql, ...params);
|
||||||
}
|
|
||||||
|
|
||||||
public run(sql: string, ...params: any[]): Promise<RunResult> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
function callback(this: RunResult, err: Error | null) {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
} else {
|
|
||||||
resolve(this);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this._db.run(sql, ...params, callback);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public get(sql: string, ...params: any[]): Promise<ResultRow|undefined> {
|
|
||||||
return fromCallback(cb => this._db.get(sql, ...params, cb));
|
|
||||||
}
|
|
||||||
|
|
||||||
public all(sql: string, ...params: any[]): Promise<ResultRow[]> {
|
|
||||||
return fromCallback(cb => this._db.all(sql, ...params, cb));
|
|
||||||
}
|
|
||||||
|
|
||||||
public allMarshal(sql: string, ...params: any[]): Promise<Buffer> {
|
|
||||||
// allMarshal isn't in the typings, because it is our addition to our fork of sqlite3 JS lib.
|
|
||||||
return fromCallback(cb => (this._db as any).allMarshal(sql, ...params, cb));
|
|
||||||
}
|
|
||||||
|
|
||||||
public prepare(sql: string, ...params: any[]): Promise<sqlite3.Statement> {
|
|
||||||
let stmt: sqlite3.Statement;
|
|
||||||
// The original interface is a little strange; we resolve to Statement if prepare() succeeded.
|
|
||||||
return fromCallback(cb => { stmt = this._db.prepare(sql, ...params, cb); }).then(() => stmt);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -336,11 +315,11 @@ export class SQLiteDB implements ISQLiteDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async vacuum(): Promise<void> {
|
public async vacuum(): Promise<void> {
|
||||||
limitAttach(this._db, 1); // VACUUM implementation uses ATTACH.
|
await this._db.limitAttach(1); // VACUUM implementation uses ATTACH.
|
||||||
try {
|
try {
|
||||||
await this.exec("VACUUM");
|
await this.exec("VACUUM");
|
||||||
} finally {
|
} finally {
|
||||||
limitAttach(this._db, 0); // Outside of VACUUM, we don't allow ATTACH.
|
await this._db.limitAttach(0); // Outside of VACUUM, we don't allow ATTACH.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -348,25 +327,24 @@ export class SQLiteDB implements ISQLiteDB {
|
|||||||
* Run each of the statements in turn. Each statement is either a string, or an array of arguments
|
* Run each of the statements in turn. Each statement is either a string, or an array of arguments
|
||||||
* to db.run, e.g. [sqlString, [params...]].
|
* to db.run, e.g. [sqlString, [params...]].
|
||||||
*/
|
*/
|
||||||
public runEach(...statements: Array<string | [string, any[]]>): Promise<void> {
|
public async runEach(...statements: Array<string | [string, any[]]>): Promise<void> {
|
||||||
return each(statements,
|
for (const stmt of statements) {
|
||||||
async (stmt: any) => {
|
|
||||||
try {
|
try {
|
||||||
return await (Array.isArray(stmt) ?
|
if (Array.isArray(stmt)) {
|
||||||
this.run(stmt[0], ...stmt[1]) :
|
await this.run(stmt[0], ...stmt[1]);
|
||||||
this.exec(stmt)
|
} else {
|
||||||
);
|
await this.exec(stmt);
|
||||||
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log.warn(`SQLiteDB: Failed to run ${stmt}`);
|
log.warn(`SQLiteDB: Failed to run ${stmt}`);
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public close(): Promise<void> {
|
public async close(): Promise<void> {
|
||||||
return fromCallback(cb => this._db.close(cb))
|
await this._db.close();
|
||||||
.then(() => { SQLiteDB._addOpens(this._dbPath, -1); });
|
SQLiteDB._addOpens(this._dbPath, -1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -375,8 +353,7 @@ export class SQLiteDB implements ISQLiteDB {
|
|||||||
* is only useful if the sql is actually an INSERT operation, but we don't check this.
|
* is only useful if the sql is actually an INSERT operation, but we don't check this.
|
||||||
*/
|
*/
|
||||||
public async runAndGetId(sql: string, ...params: any[]): Promise<number> {
|
public async runAndGetId(sql: string, ...params: any[]): Promise<number> {
|
||||||
const result = await this.run(sql, ...params);
|
return this._db.runAndGetId(sql, ...params);
|
||||||
return result.lastID;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -567,12 +544,3 @@ export function quoteIdent(ident: string): string {
|
|||||||
assert(/^[\w.]+$/.test(ident), `SQL identifier is not valid: ${ident}`);
|
assert(/^[\w.]+$/.test(ident), `SQL identifier is not valid: ${ident}`);
|
||||||
return `"${ident}"`;
|
return `"${ident}"`;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Limit the number of ATTACHed databases permitted.
|
|
||||||
*/
|
|
||||||
export function limitAttach(db: sqlite3.Database, maxAttach: number) {
|
|
||||||
// Pardon the casts, types are out of date.
|
|
||||||
const SQLITE_LIMIT_ATTACHED = (sqlite3 as any).LIMIT_ATTACHED;
|
|
||||||
(db as any).configure('limit', SQLITE_LIMIT_ATTACHED, maxAttach);
|
|
||||||
}
|
|
||||||
|
126
app/server/lib/SqliteCommon.ts
Normal file
126
app/server/lib/SqliteCommon.ts
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
import { Marshaller } from 'app/common/marshal';
|
||||||
|
import { OpenMode, quoteIdent } from 'app/server/lib/SQLiteDB';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Code common to SQLite wrappers.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* It is important that Statement exists - but we don't expect
|
||||||
|
* anything of it.
|
||||||
|
*/
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-empty-interface
|
||||||
|
export interface Statement {}
|
||||||
|
|
||||||
|
export interface MinDB {
|
||||||
|
exec(sql: string): Promise<void>;
|
||||||
|
run(sql: string, ...params: any[]): Promise<MinRunResult>;
|
||||||
|
get(sql: string, ...params: any[]): Promise<ResultRow|undefined>;
|
||||||
|
all(sql: string, ...params: any[]): Promise<ResultRow[]>;
|
||||||
|
prepare(sql: string, ...params: any[]): Promise<PreparedStatement>;
|
||||||
|
runAndGetId(sql: string, ...params: any[]): Promise<number>;
|
||||||
|
close(): Promise<void>;
|
||||||
|
allMarshal(sql: string, ...params: any[]): Promise<Buffer>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Limit the number of ATTACHed databases permitted.
|
||||||
|
*/
|
||||||
|
limitAttach(maxAttach: number): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MinRunResult {
|
||||||
|
changes: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Describes the result of get() and all() database methods.
|
||||||
|
export interface ResultRow {
|
||||||
|
[column: string]: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PreparedStatement {
|
||||||
|
run(...params: any[]): Promise<MinRunResult>;
|
||||||
|
finalize(): Promise<void>;
|
||||||
|
columns(): string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SqliteVariant {
|
||||||
|
opener(dbPath: string, mode: OpenMode): Promise<MinDB>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A crude implementation of Grist marshalling.
|
||||||
|
* There is a fork of node-sqlite3 that has Grist
|
||||||
|
* marshalling built in, at:
|
||||||
|
* https://github.com/gristlabs/node-sqlite3
|
||||||
|
* If using a version of SQLite without this built
|
||||||
|
* in, another option is to add custom functions
|
||||||
|
* to do it. This object has the initialize, step,
|
||||||
|
* and finalize callbacks typically needed to add
|
||||||
|
* a custom aggregration function.
|
||||||
|
*/
|
||||||
|
export const gristMarshal = {
|
||||||
|
initialize(): GristMarshalIntermediateValue {
|
||||||
|
return {};
|
||||||
|
},
|
||||||
|
step(accum: GristMarshalIntermediateValue, ...row: any[]) {
|
||||||
|
if (!accum.names || !accum.values) {
|
||||||
|
accum.names = row.map(value => String(value));
|
||||||
|
accum.values = row.map(() => []);
|
||||||
|
} else {
|
||||||
|
for (const [i, v] of row.entries()) {
|
||||||
|
accum.values[i].push(v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return accum;
|
||||||
|
},
|
||||||
|
finalize(accum: GristMarshalIntermediateValue) {
|
||||||
|
const marshaller = new Marshaller({version: 2, keysAreBuffers: true});
|
||||||
|
const result: Record<string, Array<any>> = {};
|
||||||
|
if (accum.names && accum.values) {
|
||||||
|
for (const [i, name] of accum.names.entries()) {
|
||||||
|
result[name] = accum.values[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
marshaller.marshal(result);
|
||||||
|
return marshaller.dumpAsBuffer();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An intermediate value used during an aggregation.
|
||||||
|
*/
|
||||||
|
interface GristMarshalIntermediateValue {
|
||||||
|
// The names of the columns, once known.
|
||||||
|
names?: string[];
|
||||||
|
// Values stored in the columns.
|
||||||
|
// There is one element in the outermost array per column.
|
||||||
|
// That element contains a list of values stored in that column.
|
||||||
|
values?: Array<Array<any>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run Grist marshalling as a SQLite query, assuming
|
||||||
|
* a custom aggregation has been added as "grist_marshal".
|
||||||
|
* The marshalled result needs to contain the column
|
||||||
|
* identifiers embedded in it. This is a little awkward
|
||||||
|
* to organize - hence the hacky UNION here. This is
|
||||||
|
* for compatibility with the existing marshalling method,
|
||||||
|
* which could be replaced instead.
|
||||||
|
*/
|
||||||
|
export async function allMarshalQuery(db: MinDB, sql: string, ...params: any[]): Promise<Buffer> {
|
||||||
|
const statement = await db.prepare(sql);
|
||||||
|
const columns = statement.columns();
|
||||||
|
const quotedColumnList = columns.map(quoteIdent).join(',');
|
||||||
|
const query = await db.all(`select grist_marshal(${quotedColumnList}) as buf FROM ` +
|
||||||
|
`(select ${quotedColumnList} UNION ALL select * from (` + sql + '))', ..._fixParameters(params));
|
||||||
|
return query[0].buf;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Booleans need to be cast to 1 or 0 for SQLite.
|
||||||
|
* The node-sqlite3 wrapper does this automatically, but other
|
||||||
|
* wrappers do not.
|
||||||
|
*/
|
||||||
|
function _fixParameters(params: any[]) {
|
||||||
|
return params.map(p => p === true ? 1 : (p === false ? 0 : p));
|
||||||
|
}
|
104
app/server/lib/SqliteNode.ts
Normal file
104
app/server/lib/SqliteNode.ts
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
import * as sqlite3 from '@gristlabs/sqlite3';
|
||||||
|
import { fromCallback } from 'app/server/lib/serverUtils';
|
||||||
|
import { MinDB, PreparedStatement, ResultRow, SqliteVariant } from 'app/server/lib/SqliteCommon';
|
||||||
|
import { OpenMode, RunResult } from 'app/server/lib/SQLiteDB';
|
||||||
|
|
||||||
|
export class NodeSqliteVariant implements SqliteVariant {
|
||||||
|
public opener(dbPath: string, mode: OpenMode): Promise<MinDB> {
|
||||||
|
return NodeSqlite3DatabaseAdapter.opener(dbPath, mode);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class NodeSqlite3PreparedStatement implements PreparedStatement {
|
||||||
|
public constructor(private _statement: sqlite3.Statement) {
|
||||||
|
}
|
||||||
|
|
||||||
|
public async run(...params: any[]): Promise<RunResult> {
|
||||||
|
return fromCallback(cb => this._statement.run(...params, cb));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async finalize() {
|
||||||
|
await fromCallback(cb => this._statement.finalize(cb));
|
||||||
|
}
|
||||||
|
|
||||||
|
public columns(): string[] {
|
||||||
|
// This method is only needed if marshalling is not built in -
|
||||||
|
// and node-sqlite3 has marshalling built in.
|
||||||
|
throw new Error('not available (but should not be needed)');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class NodeSqlite3DatabaseAdapter implements MinDB {
|
||||||
|
public static async opener(dbPath: string, mode: OpenMode): Promise<any> {
|
||||||
|
const sqliteMode: number =
|
||||||
|
// tslint:disable-next-line:no-bitwise
|
||||||
|
(mode === OpenMode.OPEN_READONLY ? sqlite3.OPEN_READONLY : sqlite3.OPEN_READWRITE) |
|
||||||
|
(mode === OpenMode.OPEN_CREATE || mode === OpenMode.CREATE_EXCL ? sqlite3.OPEN_CREATE : 0);
|
||||||
|
let _db: sqlite3.Database;
|
||||||
|
await fromCallback(cb => { _db = new sqlite3.Database(dbPath, sqliteMode, cb); });
|
||||||
|
const result = new NodeSqlite3DatabaseAdapter(_db!);
|
||||||
|
await result.limitAttach(0); // Outside of VACUUM, we don't allow ATTACH.
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public constructor(protected _db: sqlite3.Database) {
|
||||||
|
// Default database to serialized execution. See https://github.com/mapbox/node-sqlite3/wiki/Control-Flow
|
||||||
|
// This isn't enough for transactions, which we serialize explicitly.
|
||||||
|
this._db.serialize();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async exec(sql: string): Promise<void> {
|
||||||
|
return fromCallback(cb => this._db.exec(sql, cb));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async run(sql: string, ...params: any[]): Promise<RunResult> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
function callback(this: RunResult, err: Error | null) {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve(this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this._db.run(sql, ...params, callback);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async get(sql: string, ...params: any[]): Promise<ResultRow|undefined> {
|
||||||
|
return fromCallback(cb => this._db.get(sql, ...params, cb));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async all(sql: string, ...params: any[]): Promise<ResultRow[]> {
|
||||||
|
return fromCallback(cb => this._db.all(sql, params, cb));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async prepare(sql: string): Promise<PreparedStatement> {
|
||||||
|
let stmt: sqlite3.Statement|undefined;
|
||||||
|
// The original interface is a little strange; we resolve to Statement if prepare() succeeded.
|
||||||
|
await fromCallback(cb => { stmt = this._db.prepare(sql, cb); }).then(() => stmt);
|
||||||
|
if (!stmt) { throw new Error('could not prepare statement'); }
|
||||||
|
return new NodeSqlite3PreparedStatement(stmt);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async close() {
|
||||||
|
this._db.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async allMarshal(sql: string, ...params: any[]): Promise<Buffer> {
|
||||||
|
// allMarshal isn't in the typings, because it is our addition to our fork of sqlite3 JS lib.
|
||||||
|
return fromCallback(cb => (this._db as any).allMarshal(sql, ...params, cb));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public async runAndGetId(sql: string, ...params: any[]): Promise<number> {
|
||||||
|
const result = await this.run(sql, ...params);
|
||||||
|
return (result as any).lastID;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async limitAttach(maxAttach: number) {
|
||||||
|
const SQLITE_LIMIT_ATTACHED = (sqlite3 as any).LIMIT_ATTACHED;
|
||||||
|
// Cast because types out of date.
|
||||||
|
(this._db as any).configure('limit', SQLITE_LIMIT_ATTACHED, maxAttach);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -60,7 +60,12 @@ export class WorkCoordinator {
|
|||||||
|
|
||||||
private _maybeSchedule() {
|
private _maybeSchedule() {
|
||||||
if (this._isStepScheduled && !this._isStepRunning) {
|
if (this._isStepScheduled && !this._isStepRunning) {
|
||||||
|
try {
|
||||||
setImmediate(this._tryNextStepCB);
|
setImmediate(this._tryNextStepCB);
|
||||||
|
} catch (e) {
|
||||||
|
// setImmediate may not be available outside node.
|
||||||
|
setTimeout(this._tryNextStepCB, 0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,6 @@ import {RequestWithGrist} from 'app/server/lib/GristServer';
|
|||||||
import log from 'app/server/lib/log';
|
import log from 'app/server/lib/log';
|
||||||
import {Permit} from 'app/server/lib/Permit';
|
import {Permit} from 'app/server/lib/Permit';
|
||||||
import {Request, Response} from 'express';
|
import {Request, Response} from 'express';
|
||||||
import {URL} from 'url';
|
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
|
|
||||||
// log api details outside of dev environment (when GRIST_HOSTED_VERSION is set)
|
// log api details outside of dev environment (when GRIST_HOSTED_VERSION is set)
|
||||||
|
@ -3,8 +3,8 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
var log = require('app/server/lib/log');
|
||||||
var Promise = require('bluebird');
|
var Promise = require('bluebird');
|
||||||
var log = require('./log');
|
|
||||||
|
|
||||||
var cleanupHandlers = [];
|
var cleanupHandlers = [];
|
||||||
|
|
||||||
|
@ -7,11 +7,17 @@ export GRIST_EXT=stubs
|
|||||||
if [[ -e ext/app ]]; then
|
if [[ -e ext/app ]]; then
|
||||||
PROJECT="tsconfig-ext.json"
|
PROJECT="tsconfig-ext.json"
|
||||||
fi
|
fi
|
||||||
|
WEBPACK_CONFIG=buildtools/webpack.config.js
|
||||||
|
if [[ -e ext/buildtools/webpack.config.js ]]; then
|
||||||
|
# Allow webpack config file to be replaced (useful
|
||||||
|
# for grist-static)
|
||||||
|
WEBPACK_CONFIG=ext/buildtools/webpack.config.js
|
||||||
|
fi
|
||||||
|
|
||||||
set -x
|
set -x
|
||||||
tsc --build $PROJECT
|
tsc --build $PROJECT
|
||||||
buildtools/update_type_info.sh app
|
buildtools/update_type_info.sh app
|
||||||
webpack --config buildtools/webpack.config.js --mode production
|
webpack --config $WEBPACK_CONFIG --mode production
|
||||||
webpack --config buildtools/webpack.check.js --mode production
|
webpack --config buildtools/webpack.check.js --mode production
|
||||||
webpack --config buildtools/webpack.api.config.js --mode production
|
webpack --config buildtools/webpack.api.config.js --mode production
|
||||||
cat app/client/*.css app/client/*/*.css > static/bundle.css
|
cat app/client/*.css app/client/*/*.css > static/bundle.css
|
||||||
|
3
sandbox/MANIFEST.in
Normal file
3
sandbox/MANIFEST.in
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# see bundle_as_wheel.sh
|
||||||
|
|
||||||
|
include grist/tzdata.data
|
17
sandbox/bundle_as_wheel.sh
Executable file
17
sandbox/bundle_as_wheel.sh
Executable file
@ -0,0 +1,17 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Package up Grist code as a stand-alone wheel.
|
||||||
|
# This is useful for grist-static.
|
||||||
|
# It is the reason why MANIFEST.in and setup.py are present.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Clean up any previous packaging.
|
||||||
|
rm -rf dist foo.egg-info grist.egg-info build
|
||||||
|
|
||||||
|
# Go ahead and run packaging again.
|
||||||
|
python setup.py bdist_wheel
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Result is in the dist directory:"
|
||||||
|
ls dist
|
15
sandbox/setup.py
Normal file
15
sandbox/setup.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# see bundle_as_wheel.sh
|
||||||
|
|
||||||
|
from distutils.core import setup
|
||||||
|
import glob
|
||||||
|
|
||||||
|
files = glob.glob('grist/*.py') + glob.glob('grist/**/*.py')
|
||||||
|
names = [f.split('.py')[0] for f in files]
|
||||||
|
|
||||||
|
setup(name='grist',
|
||||||
|
version='1.0',
|
||||||
|
include_package_data=True,
|
||||||
|
packages=['grist', 'grist/functions', 'grist/imports'],
|
||||||
|
package_data={
|
||||||
|
'grist': ['grist/tzdata.data'],
|
||||||
|
})
|
@ -7,13 +7,17 @@ export GRIST_EXT=stubs
|
|||||||
if [[ -e ext/app ]]; then
|
if [[ -e ext/app ]]; then
|
||||||
PROJECT="tsconfig-ext.json"
|
PROJECT="tsconfig-ext.json"
|
||||||
fi
|
fi
|
||||||
|
WEBPACK_CONFIG=buildtools/webpack.config.js
|
||||||
|
if [[ -e ext/buildtools/webpack.config.js ]]; then
|
||||||
|
WEBPACK_CONFIG=ext/buildtools/webpack.config.js
|
||||||
|
fi
|
||||||
|
|
||||||
if [ ! -e _build ]; then
|
if [ ! -e _build ]; then
|
||||||
buildtools/build.sh
|
buildtools/build.sh
|
||||||
fi
|
fi
|
||||||
|
|
||||||
tsc --build -w --preserveWatchOutput $PROJECT &
|
tsc --build -w --preserveWatchOutput $PROJECT &
|
||||||
catw app/client/*.css app/client/*/*.css -o static/bundle.css -v & webpack --config buildtools/webpack.config.js --mode development --watch &
|
catw app/client/*.css app/client/*/*.css -o static/bundle.css -v & webpack --config $WEBPACK_CONFIG --mode development --watch &
|
||||||
NODE_PATH=_build:_build/stubs:_build/ext nodemon --delay 1 -w _build/app/server -w _build/app/common _build/stubs/app/server/server.js &
|
NODE_PATH=_build:_build/stubs:_build/ext nodemon --delay 1 -w _build/app/server -w _build/app/common _build/stubs/app/server/server.js &
|
||||||
|
|
||||||
wait
|
wait
|
||||||
|
@ -15,7 +15,6 @@ import {
|
|||||||
HostedStorageOptions
|
HostedStorageOptions
|
||||||
} from 'app/server/lib/HostedStorageManager';
|
} from 'app/server/lib/HostedStorageManager';
|
||||||
import log from 'app/server/lib/log';
|
import log from 'app/server/lib/log';
|
||||||
import {fromCallback} from 'app/server/lib/serverUtils';
|
|
||||||
import {SQLiteDB} from 'app/server/lib/SQLiteDB';
|
import {SQLiteDB} from 'app/server/lib/SQLiteDB';
|
||||||
import * as bluebird from 'bluebird';
|
import * as bluebird from 'bluebird';
|
||||||
import {assert} from 'chai';
|
import {assert} from 'chai';
|
||||||
@ -931,9 +930,9 @@ describe('backupSqliteDatabase', async function() {
|
|||||||
// Silly code to make a long random string to insert.
|
// Silly code to make a long random string to insert.
|
||||||
// We can make a big db faster this way.
|
// We can make a big db faster this way.
|
||||||
const str = (new Array(100)).fill(1).map((_: any) => Math.random().toString(2)).join();
|
const str = (new Array(100)).fill(1).map((_: any) => Math.random().toString(2)).join();
|
||||||
stmt.run(str, str, str);
|
await stmt.run(str, str, str);
|
||||||
}
|
}
|
||||||
await fromCallback(cb => stmt.finalize(cb));
|
await stmt.finalize();
|
||||||
});
|
});
|
||||||
const stat = await fse.stat(src);
|
const stat = await fse.stat(src);
|
||||||
assert(stat.size > 150 * 1000 * 1000);
|
assert(stat.size > 150 * 1000 * 1000);
|
||||||
|
Loading…
Reference in New Issue
Block a user