mirror of
https://github.com/gristlabs/grist-core.git
synced 2024-10-27 20:44:07 +00:00
(core) disentangle some server tests, release to core, add GRIST_PROXY_AUTH_HEADER test
Summary: This shuffles some server tests to make them available in grist-core, and adds a test for the `GRIST_PROXY_AUTH_HEADER` feature added in https://github.com/gristlabs/grist-core/pull/165 It includes a fix for a header normalization issue for websocket connections. Test Plan: added test Reviewers: georgegevoian Reviewed By: georgegevoian Differential Revision: https://phab.getgrist.com/D3326
This commit is contained in:
parent
64c9717ac1
commit
de703343d0
8
app/server/declarations/tmp.d.ts
vendored
Normal file
8
app/server/declarations/tmp.d.ts
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
import {Options, SimpleOptions} from "tmp";
|
||||
|
||||
// Add declarations of the promisifies methods of tmp.
|
||||
declare module "tmp" {
|
||||
function dirAsync(config?: Options): Promise<string>;
|
||||
function fileAsync(config?: Options): Promise<string>;
|
||||
function tmpNameAsync(config?: SimpleOptions): Promise<string>;
|
||||
}
|
@ -17,6 +17,7 @@ import {IPermitStore, Permit} from 'app/server/lib/Permit';
|
||||
import {allowHost, optStringParam} from 'app/server/lib/requestUtils';
|
||||
import * as cookie from 'cookie';
|
||||
import {NextFunction, Request, RequestHandler, Response} from 'express';
|
||||
import {IncomingMessage} from 'http';
|
||||
import * as onHeaders from 'on-headers';
|
||||
|
||||
export interface RequestWithLogin extends Request {
|
||||
@ -95,12 +96,14 @@ export function isSingleUserMode(): boolean {
|
||||
* header to specify the users' email address. The header to set comes from the
|
||||
* environment variable GRIST_PROXY_AUTH_HEADER.
|
||||
*/
|
||||
export function getRequestProfile(req: Request): UserProfile|undefined {
|
||||
export function getRequestProfile(req: Request|IncomingMessage): UserProfile|undefined {
|
||||
const header = process.env.GRIST_PROXY_AUTH_HEADER;
|
||||
let profile: UserProfile|undefined;
|
||||
|
||||
if (header && req.headers && req.headers[header]) {
|
||||
const headerContent = req.headers[header];
|
||||
if (header) {
|
||||
// Careful reading headers. If we have an IncomingMessage, there is no
|
||||
// get() function, and header names are lowercased.
|
||||
const headerContent = ('get' in req) ? req.get(header) : req.headers[header.toLowerCase()];
|
||||
if (headerContent) {
|
||||
const userEmail = headerContent.toString();
|
||||
const [userName] = userEmail.split("@", 1);
|
||||
@ -543,7 +546,7 @@ export function getTransitiveHeaders(req: Request): {[key: string]: string} {
|
||||
const XRequestedWith = req.get('X-Requested-With');
|
||||
const Origin = req.get('Origin'); // Pass along the original Origin since it may
|
||||
// play a role in granular access control.
|
||||
return {
|
||||
const result: Record<string, string> = {
|
||||
...(Authorization ? { Authorization } : undefined),
|
||||
...(Cookie ? { Cookie } : undefined),
|
||||
...(Organization ? { Organization } : undefined),
|
||||
@ -551,6 +554,12 @@ export function getTransitiveHeaders(req: Request): {[key: string]: string} {
|
||||
...(XRequestedWith ? { 'X-Requested-With': XRequestedWith } : undefined),
|
||||
...(Origin ? { Origin } : undefined),
|
||||
};
|
||||
const extraHeader = process.env.GRIST_PROXY_AUTH_HEADER;
|
||||
const extraHeaderValue = extraHeader && req.get(extraHeader);
|
||||
if (extraHeader && extraHeaderValue) {
|
||||
result[extraHeader] = extraHeaderValue;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export const signInStatusCookieName = sessionCookieName + '_status';
|
||||
|
@ -12,7 +12,8 @@
|
||||
"install:python3": "buildtools/prepare_python3.sh",
|
||||
"build:prod": "tsc --build && webpack --config buildtools/webpack.config.js --mode production && webpack --config buildtools/webpack.check.js --mode production && cat app/client/*.css app/client/*/*.css > static/bundle.css",
|
||||
"start:prod": "NODE_PATH=_build:_build/stubs node _build/stubs/app/server/server.js",
|
||||
"test": "GRIST_SESSION_COOKIE=grist_test_cookie GRIST_TEST_LOGIN=1 TEST_SUPPORT_API_KEY=api_key_for_support NODE_PATH=_build:_build/stubs mocha _build/test/nbrowser/*.js",
|
||||
"test": "GRIST_SESSION_COOKIE=grist_test_cookie GRIST_TEST_LOGIN=1 TEST_SUPPORT_API_KEY=api_key_for_support NODE_PATH=_build:_build/stubs mocha _build/test/nbrowser/*.js _build/test/server/**/*.js _build/test/gen-server/**/*.js",
|
||||
"test:server": "GRIST_SESSION_COOKIE=grist_test_cookie NODE_PATH=_build:_build/stubs mocha _build/test/server/**/*.js _build/test/gen-server/**/*.js",
|
||||
"test:smoke": "NODE_PATH=_build:_build/stubs mocha _build/test/nbrowser/Smoke.js",
|
||||
"test:docker": "./test/test_under_docker.sh"
|
||||
},
|
||||
@ -57,6 +58,7 @@
|
||||
"@types/tmp": "0.0.33",
|
||||
"@types/uuid": "3.4.4",
|
||||
"@types/which": "2.0.1",
|
||||
"app-module-path": "2.2.0",
|
||||
"catw": "1.0.1",
|
||||
"chai": "4.2.0",
|
||||
"chai-as-promised": "7.1.1",
|
||||
|
BIN
test/fixtures/docs/ApiDataRecordsTest.grist
vendored
Normal file
BIN
test/fixtures/docs/ApiDataRecordsTest.grist
vendored
Normal file
Binary file not shown.
BIN
test/fixtures/docs/Favorite_Films.grist
vendored
Normal file
BIN
test/fixtures/docs/Favorite_Films.grist
vendored
Normal file
Binary file not shown.
640
test/gen-server/seed.ts
Normal file
640
test/gen-server/seed.ts
Normal file
@ -0,0 +1,640 @@
|
||||
/**
|
||||
*
|
||||
* Can run standalone as:
|
||||
* ts-node test/gen-server/seed.ts serve
|
||||
* By default, uses a landing.db database in current directory.
|
||||
* Can prefix with database overrides, e.g.
|
||||
* TYPEORM_DATABASE=:memory:
|
||||
* TYPEORM_DATABASE=/tmp/test.db
|
||||
* To connect to a postgres database, change ormconfig.env, or add a bunch of variables:
|
||||
* export TYPEORM_CONNECTION=postgres
|
||||
* export TYPEORM_HOST=localhost
|
||||
* export TYPEORM_DATABASE=landing
|
||||
* export TYPEORM_USERNAME=development
|
||||
* export TYPEORM_PASSWORD=*****
|
||||
*
|
||||
* To just set up the database (migrate and add seed data), and then stop immediately, do:
|
||||
* ts-node test/gen-server/seed.ts init
|
||||
* To apply all migrations to the db, do:
|
||||
* ts-node test/gen-server/seed.ts migrate
|
||||
* To revert the last migration:
|
||||
* ts-node test/gen-server/seed.ts revert
|
||||
*
|
||||
*/
|
||||
|
||||
import {addPath} from 'app-module-path';
|
||||
import {IHookCallbackContext} from 'mocha';
|
||||
import * as path from 'path';
|
||||
import {Connection, createConnection, getConnectionManager, Repository} from 'typeorm';
|
||||
|
||||
if (require.main === module) {
|
||||
addPath(path.dirname(path.dirname(__dirname)));
|
||||
}
|
||||
|
||||
import {AclRuleDoc, AclRuleOrg, AclRuleWs} from "app/gen-server/entity/AclRule";
|
||||
import {BillingAccount} from "app/gen-server/entity/BillingAccount";
|
||||
import {Document} from "app/gen-server/entity/Document";
|
||||
import {Group} from "app/gen-server/entity/Group";
|
||||
import {Login} from "app/gen-server/entity/Login";
|
||||
import {Organization} from "app/gen-server/entity/Organization";
|
||||
import {Product, synchronizeProducts} from "app/gen-server/entity/Product";
|
||||
import {User} from "app/gen-server/entity/User";
|
||||
import {Workspace} from "app/gen-server/entity/Workspace";
|
||||
import {EXAMPLE_WORKSPACE_NAME} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {Permissions} from 'app/gen-server/lib/Permissions';
|
||||
import {runMigrations, undoLastMigration, updateDb} from 'app/server/lib/dbUtils';
|
||||
import {FlexServer} from 'app/server/lib/FlexServer';
|
||||
import * as fse from 'fs-extra';
|
||||
|
||||
const ACCESS_GROUPS = ['owners', 'editors', 'viewers', 'guests', 'members'];
|
||||
|
||||
export const exampleOrgs = [
|
||||
{
|
||||
name: 'NASA',
|
||||
domain: 'nasa',
|
||||
workspaces: [
|
||||
{
|
||||
name: 'Horizon',
|
||||
docs: ['Jupiter', 'Pluto', 'Beyond']
|
||||
},
|
||||
{
|
||||
name: 'Rovers',
|
||||
docs: ['Curiosity', 'Apathy']
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'Primately',
|
||||
domain: 'pr',
|
||||
workspaces: [
|
||||
{
|
||||
name: 'Fruit',
|
||||
docs: ['Bananas', 'Apples']
|
||||
},
|
||||
{
|
||||
name: 'Trees',
|
||||
docs: ['Tall', 'Short']
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'Flightless',
|
||||
domain: 'fly',
|
||||
workspaces: [
|
||||
{
|
||||
name: 'Media',
|
||||
docs: ['Australia', 'Antartic']
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'Abyss',
|
||||
domain: 'deep',
|
||||
workspaces: [
|
||||
{
|
||||
name: 'Deep',
|
||||
docs: ['Unfathomable']
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'Chimpyland',
|
||||
workspaces: [
|
||||
{
|
||||
name: 'Private',
|
||||
docs: ['Timesheets', 'Appointments']
|
||||
},
|
||||
{
|
||||
name: 'Public',
|
||||
docs: []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'Kiwiland',
|
||||
workspaces: []
|
||||
},
|
||||
{
|
||||
name: 'EmptyWsOrg',
|
||||
domain: 'blanky',
|
||||
workspaces: [
|
||||
{
|
||||
name: 'Vacuum',
|
||||
docs: []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'EmptyOrg',
|
||||
domain: 'blankiest',
|
||||
workspaces: []
|
||||
},
|
||||
{
|
||||
name: 'Fish',
|
||||
domain: 'fish',
|
||||
workspaces: [
|
||||
{
|
||||
name: 'Big',
|
||||
docs: [
|
||||
'Shark'
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'Small',
|
||||
docs: [
|
||||
'Anchovy',
|
||||
'Herring'
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'Supportland',
|
||||
workspaces: [
|
||||
{
|
||||
name: EXAMPLE_WORKSPACE_NAME,
|
||||
docs: ['Hello World', 'Sample Example']
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'Shiny',
|
||||
domain: 'shiny',
|
||||
host: 'www.shiny-grist.io',
|
||||
workspaces: [
|
||||
{
|
||||
name: 'Tailor Made',
|
||||
docs: ['Suits', 'Shoes']
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
name: 'FreeTeam',
|
||||
domain: 'freeteam',
|
||||
product: 'teamFree',
|
||||
workspaces: [
|
||||
{
|
||||
name: 'FreeTeamWs',
|
||||
docs: [],
|
||||
}
|
||||
]
|
||||
},
|
||||
];
|
||||
|
||||
|
||||
const exampleUsers: {[user: string]: {[org: string]: string}} = {
|
||||
Chimpy: {
|
||||
FreeTeam: 'owners',
|
||||
Chimpyland: 'owners',
|
||||
NASA: 'owners',
|
||||
Primately: 'guests',
|
||||
Fruit: 'viewers',
|
||||
Flightless: 'guests',
|
||||
Media: 'guests',
|
||||
Antartic: 'viewers',
|
||||
EmptyOrg: 'editors',
|
||||
EmptyWsOrg: 'editors',
|
||||
Fish: 'owners'
|
||||
},
|
||||
Kiwi: {
|
||||
Kiwiland: 'owners',
|
||||
Flightless: 'editors',
|
||||
Primately: 'viewers',
|
||||
Fish: 'editors'
|
||||
},
|
||||
Charon: {
|
||||
NASA: 'guests',
|
||||
Horizon: 'guests',
|
||||
Pluto: 'viewers',
|
||||
Chimpyland: 'viewers',
|
||||
Fish: 'viewers',
|
||||
Abyss: 'owners',
|
||||
},
|
||||
// User support@ owns a workspace "Examples & Templates" in its personal org. It can be shared
|
||||
// with everyone@ to let all users see it (this is not done here to avoid impacting all tests).
|
||||
Support: { Supportland: 'owners' },
|
||||
};
|
||||
|
||||
interface Groups {
|
||||
owners: Group;
|
||||
editors: Group;
|
||||
viewers: Group;
|
||||
guests: Group;
|
||||
members?: Group;
|
||||
}
|
||||
|
||||
class Seed {
|
||||
public userRepository: Repository<User>;
|
||||
public groupRepository: Repository<Group>;
|
||||
public groups: {[key: string]: Groups};
|
||||
|
||||
constructor(public connection: Connection) {
|
||||
this.userRepository = connection.getRepository(User);
|
||||
this.groupRepository = connection.getRepository(Group);
|
||||
this.groups = {};
|
||||
}
|
||||
|
||||
public async createGroups(parent?: Organization|Workspace): Promise<Groups> {
|
||||
const owners = new Group();
|
||||
owners.name = 'owners';
|
||||
const editors = new Group();
|
||||
editors.name = 'editors';
|
||||
const viewers = new Group();
|
||||
viewers.name = 'viewers';
|
||||
const guests = new Group();
|
||||
guests.name = 'guests';
|
||||
|
||||
if (parent) {
|
||||
// Nest the parent groups inside the new groups
|
||||
const parentGroups = this.groups[parent.name];
|
||||
owners.memberGroups = [parentGroups.owners];
|
||||
editors.memberGroups = [parentGroups.editors];
|
||||
viewers.memberGroups = [parentGroups.viewers];
|
||||
}
|
||||
|
||||
await this.groupRepository.save([owners, editors, viewers, guests]);
|
||||
|
||||
if (!parent) {
|
||||
// Add the members group for orgs.
|
||||
const members = new Group();
|
||||
members.name = 'members';
|
||||
await this.groupRepository.save(members);
|
||||
return {
|
||||
owners,
|
||||
editors,
|
||||
viewers,
|
||||
guests,
|
||||
members
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
owners,
|
||||
editors,
|
||||
viewers,
|
||||
guests
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public async addOrgToGroups(groups: Groups, org: Organization) {
|
||||
const acl0 = new AclRuleOrg();
|
||||
acl0.group = groups.members!;
|
||||
acl0.permissions = Permissions.VIEW;
|
||||
acl0.organization = org;
|
||||
|
||||
const acl1 = new AclRuleOrg();
|
||||
acl1.group = groups.guests;
|
||||
acl1.permissions = Permissions.VIEW;
|
||||
acl1.organization = org;
|
||||
|
||||
const acl2 = new AclRuleOrg();
|
||||
acl2.group = groups.viewers;
|
||||
acl2.permissions = Permissions.VIEW;
|
||||
acl2.organization = org;
|
||||
|
||||
const acl3 = new AclRuleOrg();
|
||||
acl3.group = groups.editors;
|
||||
acl3.permissions = Permissions.EDITOR;
|
||||
acl3.organization = org;
|
||||
|
||||
const acl4 = new AclRuleOrg();
|
||||
acl4.group = groups.owners;
|
||||
acl4.permissions = Permissions.OWNER;
|
||||
acl4.organization = org;
|
||||
|
||||
// should be able to save both together, but typeorm messes up on postgres.
|
||||
await acl0.save();
|
||||
await acl1.save();
|
||||
await acl2.save();
|
||||
await acl3.save();
|
||||
await acl4.save();
|
||||
}
|
||||
|
||||
public async addWorkspaceToGroups(groups: Groups, ws: Workspace) {
|
||||
const acl1 = new AclRuleWs();
|
||||
acl1.group = groups.guests;
|
||||
acl1.permissions = Permissions.VIEW;
|
||||
acl1.workspace = ws;
|
||||
|
||||
const acl2 = new AclRuleWs();
|
||||
acl2.group = groups.viewers;
|
||||
acl2.permissions = Permissions.VIEW;
|
||||
acl2.workspace = ws;
|
||||
|
||||
const acl3 = new AclRuleWs();
|
||||
acl3.group = groups.editors;
|
||||
acl3.permissions = Permissions.EDITOR;
|
||||
acl3.workspace = ws;
|
||||
|
||||
const acl4 = new AclRuleWs();
|
||||
acl4.group = groups.owners;
|
||||
acl4.permissions = Permissions.OWNER;
|
||||
acl4.workspace = ws;
|
||||
|
||||
// should be able to save both together, but typeorm messes up on postgres.
|
||||
await acl1.save();
|
||||
await acl2.save();
|
||||
await acl3.save();
|
||||
await acl4.save();
|
||||
}
|
||||
|
||||
public async addDocumentToGroups(groups: Groups, doc: Document) {
|
||||
const acl1 = new AclRuleDoc();
|
||||
acl1.group = groups.guests;
|
||||
acl1.permissions = Permissions.VIEW;
|
||||
acl1.document = doc;
|
||||
|
||||
const acl2 = new AclRuleDoc();
|
||||
acl2.group = groups.viewers;
|
||||
acl2.permissions = Permissions.VIEW;
|
||||
acl2.document = doc;
|
||||
|
||||
const acl3 = new AclRuleDoc();
|
||||
acl3.group = groups.editors;
|
||||
acl3.permissions = Permissions.EDITOR;
|
||||
acl3.document = doc;
|
||||
|
||||
const acl4 = new AclRuleDoc();
|
||||
acl4.group = groups.owners;
|
||||
acl4.permissions = Permissions.OWNER;
|
||||
acl4.document = doc;
|
||||
|
||||
await acl1.save();
|
||||
await acl2.save();
|
||||
await acl3.save();
|
||||
await acl4.save();
|
||||
}
|
||||
|
||||
public async addUserToGroup(user: User, group: Group) {
|
||||
await this.connection.createQueryBuilder()
|
||||
.relation(Group, "memberUsers")
|
||||
.of(group)
|
||||
.add(user);
|
||||
}
|
||||
|
||||
public async addDocs(orgs: Array<{name: string, domain?: string, host?: string, product?: string,
|
||||
workspaces: Array<{name: string, docs: string[]}>}>) {
|
||||
let docId = 1;
|
||||
for (const org of orgs) {
|
||||
const o = new Organization();
|
||||
o.name = org.name;
|
||||
const ba = new BillingAccount();
|
||||
ba.individual = false;
|
||||
const productName = org.product || 'Free';
|
||||
ba.product = (await Product.findOne({name: productName}))!;
|
||||
o.billingAccount = ba;
|
||||
if (org.domain) { o.domain = org.domain; }
|
||||
if (org.host) { o.host = org.host; }
|
||||
await ba.save();
|
||||
await o.save();
|
||||
const grps = await this.createGroups();
|
||||
this.groups[o.name] = grps;
|
||||
await this.addOrgToGroups(grps, o);
|
||||
for (const workspace of org.workspaces) {
|
||||
const w = new Workspace();
|
||||
w.name = workspace.name;
|
||||
w.org = o;
|
||||
await w.save();
|
||||
const wgrps = await this.createGroups(o);
|
||||
this.groups[w.name] = wgrps;
|
||||
await this.addWorkspaceToGroups(wgrps, w);
|
||||
for (const doc of workspace.docs) {
|
||||
const d = new Document();
|
||||
d.name = doc;
|
||||
d.workspace = w;
|
||||
d.id = `sample_${docId}`;
|
||||
docId++;
|
||||
await d.save();
|
||||
const dgrps = await this.createGroups(w);
|
||||
this.groups[d.name] = dgrps;
|
||||
await this.addDocumentToGroups(dgrps, d);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async run() {
|
||||
if (await this.userRepository.findOne()) {
|
||||
// we already have a user - skip seeding database
|
||||
return;
|
||||
}
|
||||
|
||||
await this.addDocs(exampleOrgs);
|
||||
await this._buildUsers(exampleUsers);
|
||||
}
|
||||
|
||||
// Creates benchmark data with 10 orgs, 50 workspaces per org and 20 docs per workspace.
|
||||
public async runBenchmark() {
|
||||
if (await this.userRepository.findOne()) {
|
||||
// we already have a user - skip seeding database
|
||||
return;
|
||||
}
|
||||
|
||||
await this.connection.runMigrations();
|
||||
|
||||
const benchmarkOrgs = _generateData(100, 50, 20);
|
||||
// Create an access object giving Chimpy random access to the orgs.
|
||||
const chimpyAccess: {[name: string]: string} = {};
|
||||
benchmarkOrgs.forEach((_org: any) => {
|
||||
const zeroToThree = Math.floor(Math.random() * 4);
|
||||
chimpyAccess[_org.name] = ACCESS_GROUPS[zeroToThree];
|
||||
});
|
||||
|
||||
await this.addDocs(benchmarkOrgs);
|
||||
await this._buildUsers({ Chimpy: chimpyAccess });
|
||||
}
|
||||
|
||||
private async _buildUsers(userAccessMap: {[user: string]: {[org: string]: string}}) {
|
||||
for (const name of Object.keys(userAccessMap)) {
|
||||
const user = new User();
|
||||
user.name = name;
|
||||
user.apiKey = "api_key_for_" + name.toLowerCase();
|
||||
await user.save();
|
||||
const login = new Login();
|
||||
login.displayEmail = login.email = name.toLowerCase() + "@getgrist.com";
|
||||
login.user = user;
|
||||
await login.save();
|
||||
const personal = await Organization.findOne({name: name + "land"});
|
||||
if (personal) {
|
||||
personal.owner = user;
|
||||
await personal.save();
|
||||
}
|
||||
for (const org of Object.keys(userAccessMap[name])) {
|
||||
await this.addUserToGroup(user, (this.groups[org] as any)[userAccessMap[name][org]]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// When running mocha on several test files at once, we need to reset our database connection
|
||||
// if it exists. This is a little ugly since it is stored globally.
|
||||
export async function removeConnection() {
|
||||
if (getConnectionManager().connections.length > 0) {
|
||||
if (getConnectionManager().connections.length > 1) {
|
||||
throw new Error("unexpected number of connections");
|
||||
}
|
||||
await getConnectionManager().connections[0].close();
|
||||
// There is no official way to delete connections that I've found.
|
||||
(getConnectionManager().connections as any) = [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function createInitialDb(connection?: Connection, migrateAndSeedData: boolean = true) {
|
||||
// In jenkins tests, we may want to reset the database to a clean
|
||||
// state. If so, TEST_CLEAN_DATABASE will have been set. How to
|
||||
// clean the database depends on what kind of database it is. With
|
||||
// postgres, it suffices to recreate our schema ("public", the
|
||||
// default). With sqlite, it suffices to delete the file -- but we
|
||||
// are only allowed to do this if there is no connection open to it
|
||||
// (so we fail if a connection has already been made). If the
|
||||
// sqlite db is in memory (":memory:") there's nothing to delete.
|
||||
const uncommitted = !connection; // has user already created a connection?
|
||||
// if so we won't be able to delete sqlite db
|
||||
connection = connection || await createConnection();
|
||||
const opt = connection.driver.options;
|
||||
if (process.env.TEST_CLEAN_DATABASE) {
|
||||
if (opt.type === 'sqlite') {
|
||||
const database = (opt as any).database;
|
||||
// Only dbs on disk need to be deleted
|
||||
if (database !== ':memory:') {
|
||||
// We can only delete on-file dbs if no connection is open to them
|
||||
if (!uncommitted) {
|
||||
throw Error("too late to clean sqlite db");
|
||||
}
|
||||
await removeConnection();
|
||||
if (await fse.pathExists(database)) {
|
||||
await fse.unlink(database);
|
||||
}
|
||||
connection = await createConnection();
|
||||
}
|
||||
} else if (opt.type === 'postgres') {
|
||||
// recreate schema, destroying everything that was inside it
|
||||
await connection.query("DROP SCHEMA public CASCADE;");
|
||||
await connection.query("CREATE SCHEMA public;");
|
||||
} else {
|
||||
throw new Error(`do not know how to clean a ${opt.type} db`);
|
||||
}
|
||||
}
|
||||
|
||||
// Finally - actually initialize the database.
|
||||
if (migrateAndSeedData) {
|
||||
await updateDb(connection);
|
||||
await addSeedData(connection);
|
||||
}
|
||||
}
|
||||
|
||||
// add some test data to the database.
|
||||
export async function addSeedData(connection: Connection) {
|
||||
await synchronizeProducts(connection, true);
|
||||
await connection.transaction(async tr => {
|
||||
const seed = new Seed(tr.connection);
|
||||
await seed.run();
|
||||
});
|
||||
}
|
||||
|
||||
export async function createBenchmarkDb(connection?: Connection) {
|
||||
connection = connection || await createConnection();
|
||||
await updateDb(connection);
|
||||
await connection.transaction(async tr => {
|
||||
const seed = new Seed(tr.connection);
|
||||
await seed.runBenchmark();
|
||||
});
|
||||
}
|
||||
|
||||
export async function createServer(port: number, initDb = createInitialDb): Promise<FlexServer> {
|
||||
const flexServer = new FlexServer(port);
|
||||
flexServer.addJsonSupport();
|
||||
await flexServer.start();
|
||||
await flexServer.initHomeDBManager();
|
||||
flexServer.addAccessMiddleware();
|
||||
flexServer.addApiMiddleware();
|
||||
flexServer.addHomeApi();
|
||||
flexServer.addApiErrorHandlers();
|
||||
await initDb(flexServer.getHomeDBManager().connection);
|
||||
flexServer.summary();
|
||||
return flexServer;
|
||||
}
|
||||
|
||||
export async function createBenchmarkServer(port: number): Promise<FlexServer> {
|
||||
return createServer(port, createBenchmarkDb);
|
||||
}
|
||||
|
||||
// Generates a random dataset of orgs, workspaces and docs. The number of workspaces
|
||||
// given is per org, and the number of docs given is per workspace.
|
||||
function _generateData(numOrgs: number, numWorkspaces: number, numDocs: number) {
|
||||
if (numOrgs < 1 || numWorkspaces < 1 || numDocs < 0) {
|
||||
throw new Error('_generateData error: Invalid arguments');
|
||||
}
|
||||
const example = [];
|
||||
for (let i = 0; i < numOrgs; i++) {
|
||||
const workspaces = [];
|
||||
for (let j = 0; j < numWorkspaces; j++) {
|
||||
const docs = [];
|
||||
for (let k = 0; k < numDocs; k++) {
|
||||
const docIndex = (i * numWorkspaces * numDocs) + (j * numDocs) + k;
|
||||
docs.push(`doc-${docIndex}`);
|
||||
}
|
||||
const workspaceIndex = (i * numWorkspaces) + j;
|
||||
workspaces.push({
|
||||
name: `ws-${workspaceIndex}`,
|
||||
docs
|
||||
});
|
||||
}
|
||||
example.push({
|
||||
name: `org-${i}`,
|
||||
domain: `org-${i}`,
|
||||
workspaces
|
||||
});
|
||||
}
|
||||
return example;
|
||||
}
|
||||
|
||||
/**
|
||||
* To set up TYPEORM_* environment variables for testing, call this in a before() call of a test
|
||||
* suite, using setUpDB(this);
|
||||
*/
|
||||
export function setUpDB(context?: IHookCallbackContext) {
|
||||
if (!process.env.TYPEORM_DATABASE) {
|
||||
process.env.TYPEORM_DATABASE = ":memory:";
|
||||
} else {
|
||||
if (context) { context.timeout(60000); }
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const cmd = process.argv[2];
|
||||
if (cmd === 'init') {
|
||||
const connection = await createConnection();
|
||||
await createInitialDb(connection);
|
||||
return;
|
||||
} else if (cmd === 'benchmark') {
|
||||
const connection = await createConnection();
|
||||
await createInitialDb(connection, false);
|
||||
await createBenchmarkDb(connection);
|
||||
return;
|
||||
} else if (cmd === 'migrate') {
|
||||
process.env.TYPEORM_LOGGING = 'true';
|
||||
const connection = await createConnection();
|
||||
await runMigrations(connection);
|
||||
return;
|
||||
} else if (cmd === 'revert') {
|
||||
process.env.TYPEORM_LOGGING = 'true';
|
||||
const connection = await createConnection();
|
||||
await undoLastMigration(connection);
|
||||
return;
|
||||
} else if (cmd === 'serve') {
|
||||
const home = await createServer(3000);
|
||||
// tslint:disable-next-line:no-console
|
||||
console.log(`Home API demo available at ${home.getOwnUrl()}`);
|
||||
return;
|
||||
}
|
||||
// tslint:disable-next-line:no-console
|
||||
console.log("Call with: init | migrate | revert | serve | benchmark");
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main().catch(e => {
|
||||
// tslint:disable-next-line:no-console
|
||||
console.log(e);
|
||||
});
|
||||
}
|
103
test/gen-server/testUtils.ts
Normal file
103
test/gen-server/testUtils.ts
Normal file
@ -0,0 +1,103 @@
|
||||
import {GristLoadConfig} from 'app/common/gristUrls';
|
||||
import {BillingAccount} from 'app/gen-server/entity/BillingAccount';
|
||||
import {Organization} from 'app/gen-server/entity/Organization';
|
||||
import {Product} from 'app/gen-server/entity/Product';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {INotifier} from 'app/server/lib/INotifier';
|
||||
import {AxiosRequestConfig} from "axios";
|
||||
import {delay} from 'bluebird';
|
||||
|
||||
/**
|
||||
* Returns an AxiosRequestConfig, that identifies the user with `username` on a server running
|
||||
* against a database using `test/gen-server/seed.ts`. Also tells axios not to raise exception on
|
||||
* failed request.
|
||||
*/
|
||||
export function configForUser(username: string): AxiosRequestConfig {
|
||||
const config: AxiosRequestConfig = {
|
||||
responseType: 'json',
|
||||
validateStatus: (status: number) => true,
|
||||
headers: {
|
||||
'X-Requested-With': 'XMLHttpRequest',
|
||||
}
|
||||
};
|
||||
if (username !== 'Anonymous') {
|
||||
config.headers.Authorization = 'Bearer api_key_for_' + username.toLowerCase();
|
||||
}
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new user and return their personal org.
|
||||
*/
|
||||
export async function createUser(dbManager: HomeDBManager, name: string): Promise<Organization> {
|
||||
const username = name.toLowerCase();
|
||||
const email = `${username}@getgrist.com`;
|
||||
const user = await dbManager.getUserByLogin(email, {email, name});
|
||||
if (!user) { throw new Error('failed to create user'); }
|
||||
user.apiKey = `api_key_for_${username}`;
|
||||
await user.save();
|
||||
const userHome = (await dbManager.getOrg({userId: user.id}, null)).data;
|
||||
if (!userHome) { throw new Error('failed to create personal org'); }
|
||||
return userHome;
|
||||
}
|
||||
|
||||
/**
|
||||
* Associate a given org with a given product.
|
||||
*/
|
||||
export async function setPlan(dbManager: HomeDBManager, org: {billingAccount?: {id: number}},
|
||||
productName: string) {
|
||||
const product = await dbManager.connection.manager.findOne(Product, {where: {name: productName}});
|
||||
if (!product) { throw new Error(`cannot find product ${productName}`); }
|
||||
if (!org.billingAccount) { throw new Error('must join billingAccount'); }
|
||||
await dbManager.connection.createQueryBuilder()
|
||||
.update(BillingAccount)
|
||||
.set({product})
|
||||
.where('id = :bid', {bid: org.billingAccount.id})
|
||||
.execute();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the window.gristConfig object extracted from the raw HTML of app.html page.
|
||||
*/
|
||||
export function getGristConfig(page: string): Partial<GristLoadConfig> {
|
||||
const match = /window\.gristConfig = ([^;]*)/.exec(page);
|
||||
if (!match) { throw new Error('cannot find grist config'); }
|
||||
return JSON.parse(match[1]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for all pending (back-end) notifications to complete. Notifications are
|
||||
* started during request handling, but may not complete fully during it.
|
||||
*/
|
||||
export async function waitForAllNotifications(notifier: INotifier, maxWait: number = 1000) {
|
||||
const start = Date.now();
|
||||
while (Date.now() - start < maxWait) {
|
||||
if (!notifier.testPending) { return; }
|
||||
await delay(1);
|
||||
}
|
||||
throw new Error('waitForAllNotifications timed out');
|
||||
}
|
||||
|
||||
// count the number of rows in a table
|
||||
export async function getRowCount(dbManager: HomeDBManager, tableName: string): Promise<number> {
|
||||
const result = await dbManager.connection.query(`select count(*) as ct from ${tableName}`);
|
||||
return parseInt(result[0].ct, 10);
|
||||
}
|
||||
|
||||
// gather counts for all significant tables - handy as a sanity check on deletions
|
||||
export async function getRowCounts(dbManager: HomeDBManager) {
|
||||
return {
|
||||
aclRules: await getRowCount(dbManager, 'acl_rules'),
|
||||
docs: await getRowCount(dbManager, 'docs'),
|
||||
groupGroups: await getRowCount(dbManager, 'group_groups'),
|
||||
groupUsers: await getRowCount(dbManager, 'group_users'),
|
||||
groups: await getRowCount(dbManager, 'groups'),
|
||||
logins: await getRowCount(dbManager, 'logins'),
|
||||
orgs: await getRowCount(dbManager, 'orgs'),
|
||||
users: await getRowCount(dbManager, 'users'),
|
||||
workspaces: await getRowCount(dbManager, 'workspaces'),
|
||||
billingAccounts: await getRowCount(dbManager, 'billing_accounts'),
|
||||
billingAccountManagers: await getRowCount(dbManager, 'billing_account_managers'),
|
||||
products: await getRowCount(dbManager, 'products')
|
||||
};
|
||||
}
|
66
test/server/customUtil.ts
Normal file
66
test/server/customUtil.ts
Normal file
@ -0,0 +1,66 @@
|
||||
import {getAppRoot} from 'app/server/lib/places';
|
||||
import {fromCallback} from 'bluebird';
|
||||
import * as express from 'express';
|
||||
import * as http from 'http';
|
||||
import {AddressInfo, Socket} from 'net';
|
||||
import * as path from 'path';
|
||||
import {fixturesRoot} from 'test/server/testUtils';
|
||||
|
||||
export interface Serving {
|
||||
url: string;
|
||||
shutdown: () => void;
|
||||
}
|
||||
|
||||
|
||||
// Adds static files from a directory.
|
||||
// By default exposes /fixture/sites
|
||||
export function addStatic(app: express.Express, rootDir?: string) {
|
||||
// mix in a copy of the plugin api
|
||||
app.use(/^\/(grist-plugin-api.js)$/, (req, res) =>
|
||||
res.sendFile(req.params[0], {root:
|
||||
path.resolve(getAppRoot(), "static")}));
|
||||
app.use(express.static(rootDir || path.resolve(fixturesRoot, "sites"), {
|
||||
setHeaders: (res) => {
|
||||
res.set("Access-Control-Allow-Origin", "*");
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
// Serve from a directory.
|
||||
export async function serveStatic(rootDir: string): Promise<Serving> {
|
||||
return serveSomething(app => addStatic(app, rootDir));
|
||||
}
|
||||
|
||||
// Serve a string of html.
|
||||
export async function serveSinglePage(html: string): Promise<Serving> {
|
||||
return serveSomething(app => {
|
||||
app.get('', (req, res) => res.send(html));
|
||||
});
|
||||
}
|
||||
|
||||
export function serveCustomViews(): Promise<Serving> {
|
||||
return serveStatic(path.resolve(fixturesRoot, "sites"));
|
||||
}
|
||||
|
||||
export async function serveSomething(setup: (app: express.Express) => void, port= 0): Promise<Serving> {
|
||||
const app = express();
|
||||
const server = http.createServer(app);
|
||||
await fromCallback((cb: any) => server.listen(port, cb));
|
||||
|
||||
const connections = new Set<Socket>();
|
||||
server.on('connection', (conn) => {
|
||||
connections.add(conn);
|
||||
conn.on('close', () => connections.delete(conn));
|
||||
});
|
||||
|
||||
function shutdown() {
|
||||
server.close();
|
||||
for (const conn of connections) { conn.destroy(); }
|
||||
}
|
||||
|
||||
port = (server.address() as AddressInfo).port;
|
||||
app.set('port', port);
|
||||
setup(app);
|
||||
const url = `http://localhost:${port}`;
|
||||
return {url, shutdown};
|
||||
}
|
243
test/server/docTools.ts
Normal file
243
test/server/docTools.ts
Normal file
@ -0,0 +1,243 @@
|
||||
import {getDocWorkerMap} from 'app/gen-server/lib/DocWorkerMap';
|
||||
import {ActiveDoc} from 'app/server/lib/ActiveDoc';
|
||||
import {DummyAuthorizer} from 'app/server/lib/Authorizer';
|
||||
import {create} from 'app/server/lib/create';
|
||||
import {DocManager} from 'app/server/lib/DocManager';
|
||||
import {DocSession, makeExceptionalDocSession} from 'app/server/lib/DocSession';
|
||||
import {DocStorageManager} from 'app/server/lib/DocStorageManager';
|
||||
import {GristServer} from 'app/server/lib/GristServer';
|
||||
import {IDocStorageManager} from 'app/server/lib/IDocStorageManager';
|
||||
import {getAppRoot} from 'app/server/lib/places';
|
||||
import {PluginManager} from 'app/server/lib/PluginManager';
|
||||
import {createTmpDir as createTmpUploadDir, FileUploadInfo, globalUploadSet} from 'app/server/lib/uploads';
|
||||
import * as testUtils from 'test/server/testUtils';
|
||||
|
||||
import {assert} from 'chai';
|
||||
import * as fse from 'fs-extra';
|
||||
import {tmpdir} from 'os';
|
||||
import * as path from 'path';
|
||||
import * as tmp from 'tmp';
|
||||
|
||||
tmp.setGracefulCleanup();
|
||||
|
||||
// it is sometimes useful in debugging to turn off automatic cleanup of docs and workspaces.
|
||||
const noCleanup = Boolean(process.env.NO_CLEANUP);
|
||||
|
||||
/**
|
||||
* Use from a test suite to get an object with convenient methods for creating ActiveDocs:
|
||||
*
|
||||
* createDoc(docName): creates a new empty document.
|
||||
* loadFixtureDoc(docName): loads a copy of a fixture document.
|
||||
* loadDoc(docName): loads a given document, e.g. previously created with createDoc().
|
||||
* createFakeSession(): creates a fake DocSession for use when applying user actions.
|
||||
*
|
||||
* Also available are accessors for the created "managers":
|
||||
* getDocManager()
|
||||
* getStorageManager()
|
||||
* getPluginManager()
|
||||
*
|
||||
* It also takes care of cleaning up any created ActiveDocs.
|
||||
* @param persistAcrossCases Don't shut down created ActiveDocs between test cases.
|
||||
* @param useFixturePlugins Use the plugins in `test/fixtures/plugins`
|
||||
*/
|
||||
export function createDocTools(options: {persistAcrossCases?: boolean,
|
||||
useFixturePlugins?: boolean,
|
||||
storageManager?: IDocStorageManager,
|
||||
server?: GristServer} = {}) {
|
||||
let tmpDir: string;
|
||||
let docManager: DocManager;
|
||||
|
||||
async function doBefore() {
|
||||
tmpDir = await createTmpDir();
|
||||
const pluginManager = options.useFixturePlugins ? await createFixturePluginManager() : undefined;
|
||||
docManager = await createDocManager({tmpDir, pluginManager, storageManager: options.storageManager,
|
||||
server: options.server});
|
||||
}
|
||||
|
||||
async function doAfter() {
|
||||
// Clean up at the end of the test suite (in addition to the optional per-test cleanup).
|
||||
await testUtils.captureLog('info', () => docManager.shutdownAll());
|
||||
assert.equal(docManager.numOpenDocs(), 0);
|
||||
await globalUploadSet.cleanupAll();
|
||||
|
||||
// Clean up the temp directory.
|
||||
if (!noCleanup) {
|
||||
await fse.remove(tmpDir);
|
||||
}
|
||||
}
|
||||
|
||||
// Allow using outside of mocha
|
||||
if (typeof before !== "undefined") {
|
||||
before(doBefore);
|
||||
after(doAfter);
|
||||
|
||||
// Check after each test case that all ActiveDocs got shut down.
|
||||
afterEach(async function() {
|
||||
if (!options.persistAcrossCases) {
|
||||
await docManager.shutdownAll();
|
||||
assert.equal(docManager.numOpenDocs(), 0);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const systemSession = makeExceptionalDocSession('system');
|
||||
return {
|
||||
/** create a fake session for use when applying user actions to a document */
|
||||
createFakeSession(): DocSession {
|
||||
return {client: null, authorizer: new DummyAuthorizer('editors', 'doc')} as any as DocSession;
|
||||
},
|
||||
|
||||
/** create a throw-away, empty document for testing purposes */
|
||||
async createDoc(docName: string): Promise<ActiveDoc> {
|
||||
return docManager.createNewEmptyDoc(systemSession, docName);
|
||||
},
|
||||
|
||||
/** load a copy of a fixture document for testing purposes */
|
||||
async loadFixtureDoc(docName: string): Promise<ActiveDoc> {
|
||||
const copiedDocName = await testUtils.useFixtureDoc(docName, docManager.storageManager);
|
||||
return this.loadDoc(copiedDocName);
|
||||
},
|
||||
|
||||
/** load a copy of a local document at an arbitrary path on disk for testing purposes */
|
||||
async loadLocalDoc(srcPath: string): Promise<ActiveDoc> {
|
||||
const copiedDocName = await testUtils.useLocalDoc(srcPath, docManager.storageManager);
|
||||
return this.loadDoc(copiedDocName);
|
||||
},
|
||||
|
||||
/** like `loadFixtureDoc`, but lets you rename the document on disk */
|
||||
async loadFixtureDocAs(docName: string, alias: string): Promise<ActiveDoc> {
|
||||
const copiedDocName = await testUtils.useFixtureDoc(docName, docManager.storageManager, alias);
|
||||
return this.loadDoc(copiedDocName);
|
||||
},
|
||||
|
||||
/** Loads a given document, e.g. previously created with createDoc() */
|
||||
async loadDoc(docName: string): Promise<ActiveDoc> {
|
||||
return docManager.fetchDoc(systemSession, docName);
|
||||
},
|
||||
|
||||
getDocManager() { return docManager; },
|
||||
getStorageManager() { return docManager.storageManager; },
|
||||
getPluginManager() { return docManager.pluginManager; },
|
||||
|
||||
/** Setup that needs to be done before using the tools, typically called by mocha */
|
||||
before() { return doBefore(); },
|
||||
|
||||
/** Teardown that needs to be done after using the tools, typically called by mocha */
|
||||
after() { return doAfter(); },
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a DocManager for tests, complete with a PluginManager and DocStorageManager.
|
||||
* @param options.pluginManager The PluginManager to use; defaults to using a real global singleton
|
||||
* that loads built-in modules.
|
||||
*/
|
||||
export async function createDocManager(
|
||||
options: {tmpDir?: string, pluginManager?: PluginManager,
|
||||
storageManager?: IDocStorageManager,
|
||||
server?: GristServer} = {}): Promise<DocManager> {
|
||||
// Set Grist home to a temporary directory, and wipe it out on exit.
|
||||
const tmpDir = options.tmpDir || await createTmpDir();
|
||||
const docStorageManager = options.storageManager || new DocStorageManager(tmpDir);
|
||||
const pluginManager = options.pluginManager || await getGlobalPluginManager();
|
||||
const store = getDocWorkerMap();
|
||||
const internalPermitStore = store.getPermitStore('1');
|
||||
const externalPermitStore = store.getPermitStore('2');
|
||||
return new DocManager(docStorageManager, pluginManager, null, options.server || {
|
||||
...createDummyGristServer(),
|
||||
getPermitStore() { return internalPermitStore; },
|
||||
getExternalPermitStore() { return externalPermitStore; },
|
||||
getStorageManager() { return docStorageManager; },
|
||||
});
|
||||
}
|
||||
|
||||
export function createDummyGristServer(): GristServer {
|
||||
return {
|
||||
create,
|
||||
getHost() { return 'localhost:4242'; },
|
||||
getHomeUrl() { return 'http://localhost:4242'; },
|
||||
getHomeUrlByDocId() { return Promise.resolve('http://localhost:4242'); },
|
||||
getMergedOrgUrl() { return 'http://localhost:4242'; },
|
||||
getOwnUrl() { return 'http://localhost:4242'; },
|
||||
getPermitStore() { throw new Error('no permit store'); },
|
||||
getExternalPermitStore() { throw new Error('no external permit store'); },
|
||||
getGristConfig() { return { homeUrl: '', timestampMs: 0 }; },
|
||||
getOrgUrl() { return Promise.resolve(''); },
|
||||
getResourceUrl() { return Promise.resolve(''); },
|
||||
getSessions() { throw new Error('no sessions'); },
|
||||
getComm() { throw new Error('no comms'); },
|
||||
getHosts() { throw new Error('no hosts'); },
|
||||
getHomeDBManager() { throw new Error('no db'); },
|
||||
getStorageManager() { throw new Error('no storage manager'); },
|
||||
getNotifier() { throw new Error('no notifier'); },
|
||||
getDocTemplate() { throw new Error('no doc template'); },
|
||||
getTag() { return 'tag'; },
|
||||
sendAppPage() { return Promise.resolve(); },
|
||||
};
|
||||
}
|
||||
|
||||
export async function createTmpDir(): Promise<string> {
|
||||
const tmpRootDir = process.env.TESTDIR || tmpdir();
|
||||
await fse.mkdirs(tmpRootDir);
|
||||
return fse.realpath(await tmp.dirAsync({
|
||||
dir: tmpRootDir,
|
||||
prefix: 'grist_test_',
|
||||
unsafeCleanup: true,
|
||||
keep: noCleanup,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file with the given name (and simple dummy content) in dirPath, and returns
|
||||
* FileUploadInfo for it.
|
||||
*/
|
||||
export async function createFile(dirPath: string, name: string): Promise<FileUploadInfo> {
|
||||
const absPath = path.join(dirPath, name);
|
||||
await fse.outputFile(absPath, `${name}:${name}\n`);
|
||||
return {
|
||||
absPath,
|
||||
origName: name,
|
||||
size: (await fse.stat(absPath)).size,
|
||||
ext: path.extname(name),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an upload with the given filenames (containg simple dummy content), in the
|
||||
* globalUploadSet, and returns its uploadId. The upload is registered with the given accessId
|
||||
* (userId), and the same id must be used to retrieve it.
|
||||
*/
|
||||
export async function createUpload(fileNames: string[], accessId: string|null): Promise<number> {
|
||||
const {tmpDir, cleanupCallback} = await createTmpUploadDir({});
|
||||
const files = await Promise.all(fileNames.map((name) => createFile(tmpDir, name)));
|
||||
return globalUploadSet.registerUpload(files, tmpDir, cleanupCallback, accessId);
|
||||
}
|
||||
|
||||
|
||||
let _globalPluginManager: PluginManager|null = null;
|
||||
|
||||
// Helper to create a singleton PluginManager. This includes loading built-in plugins. Since most
|
||||
// tests don't make any use of it, it's fine to reuse a single one. For tests that need a custom
|
||||
// one, pass one into createDocManager().
|
||||
export async function getGlobalPluginManager(): Promise<PluginManager> {
|
||||
if (!_globalPluginManager) {
|
||||
const appRoot = getAppRoot();
|
||||
_globalPluginManager = new PluginManager(appRoot);
|
||||
await _globalPluginManager.initialize();
|
||||
}
|
||||
return _globalPluginManager;
|
||||
}
|
||||
|
||||
// Path to the folder where builtIn plugins leave in test/fixtures
|
||||
export const builtInFolder = path.join(testUtils.fixturesRoot, 'plugins/builtInPlugins');
|
||||
|
||||
// Path to the folder where installed plugins leave in test/fixtures
|
||||
export const installedFolder = path.join(testUtils.fixturesRoot, 'plugins/installedPlugins');
|
||||
|
||||
// Creates a plugin manager which loads the plugins in `test/fixtures/plugins`
|
||||
async function createFixturePluginManager() {
|
||||
const p = new PluginManager(builtInFolder, installedFolder);
|
||||
p.appRoot = getAppRoot();
|
||||
await p.initialize();
|
||||
return p;
|
||||
}
|
167
test/server/gristClient.ts
Normal file
167
test/server/gristClient.ts
Normal file
@ -0,0 +1,167 @@
|
||||
import { DocAction } from 'app/common/DocActions';
|
||||
import { FlexServer } from 'app/server/lib/FlexServer';
|
||||
import axios from 'axios';
|
||||
import pick = require('lodash/pick');
|
||||
import * as WebSocket from 'ws';
|
||||
|
||||
interface GristRequest {
|
||||
reqId: number;
|
||||
method: string;
|
||||
args: any[];
|
||||
}
|
||||
|
||||
interface GristResponse {
|
||||
reqId: number;
|
||||
error?: string;
|
||||
errorCode?: string;
|
||||
data?: any;
|
||||
}
|
||||
|
||||
interface GristMessage {
|
||||
type: 'clientConnect' | 'docUserAction';
|
||||
docFD: number;
|
||||
data: any;
|
||||
}
|
||||
|
||||
export class GristClient {
|
||||
public messages: GristMessage[] = [];
|
||||
|
||||
private _requestId: number = 0;
|
||||
private _pending: Array<GristResponse|GristMessage> = [];
|
||||
private _consumer: () => void;
|
||||
private _ignoreTrivialActions: boolean = false;
|
||||
|
||||
constructor(public ws: any) {
|
||||
ws.onmessage = (data: any) => {
|
||||
const msg = pick(JSON.parse(data.data),
|
||||
['reqId', 'error', 'errorCode', 'data', 'type', 'docFD']);
|
||||
if (this._ignoreTrivialActions && msg.type === 'docUserAction' &&
|
||||
msg.data?.actionGroup?.internal === true &&
|
||||
msg.data?.docActions?.length === 0) {
|
||||
return;
|
||||
}
|
||||
this._pending.push(msg);
|
||||
if (this._consumer) { this._consumer(); }
|
||||
};
|
||||
}
|
||||
|
||||
// After a document is opened, the sandbox recomputes its formulas and sends any changes.
|
||||
// The client will receive an update even if there are no changes. This may be useful in
|
||||
// the future to know that the document is up to date. But for testing, this asynchronous
|
||||
// message can be awkward. Call this method to ignore it.
|
||||
public ignoreTrivialActions() {
|
||||
this._ignoreTrivialActions = true;
|
||||
}
|
||||
|
||||
public flush() {
|
||||
this._pending = [];
|
||||
}
|
||||
|
||||
public shift() {
|
||||
return this._pending.shift();
|
||||
}
|
||||
|
||||
public count() {
|
||||
return this._pending.length;
|
||||
}
|
||||
|
||||
public async read(): Promise<any> {
|
||||
for (;;) {
|
||||
if (this._pending.length) {
|
||||
return this._pending.shift();
|
||||
}
|
||||
await new Promise(resolve => this._consumer = resolve);
|
||||
}
|
||||
}
|
||||
|
||||
public async readMessage(): Promise<GristMessage> {
|
||||
const result = await this.read();
|
||||
if (!result.type) {
|
||||
throw new Error(`message looks wrong: ${JSON.stringify(result)}`);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public async readResponse(): Promise<GristResponse> {
|
||||
this.messages = [];
|
||||
for (;;) {
|
||||
const result = await this.read();
|
||||
if (result.reqId === undefined) {
|
||||
this.messages.push(result);
|
||||
continue;
|
||||
}
|
||||
if (result.reqId !== this._requestId) {
|
||||
throw new Error("unexpected request id");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to read the next docUserAction ignoring anything else (e.g. a duplicate clientConnect).
|
||||
public async readDocUserAction(): Promise<DocAction[]> {
|
||||
while (true) { // eslint-disable-line no-constant-condition
|
||||
const msg = await this.readMessage();
|
||||
if (msg.type === 'docUserAction') {
|
||||
return msg.data.docActions;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async send(method: string, ...args: any[]): Promise<GristResponse> {
|
||||
const p = this.readResponse();
|
||||
this._requestId++;
|
||||
const req: GristRequest = {
|
||||
reqId: this._requestId,
|
||||
method,
|
||||
args
|
||||
};
|
||||
this.ws.send(JSON.stringify(req));
|
||||
const result = await p;
|
||||
return result;
|
||||
}
|
||||
|
||||
public async close() {
|
||||
this.ws.terminate();
|
||||
this.ws.close();
|
||||
}
|
||||
|
||||
public async openDocOnConnect(docId: string) {
|
||||
const msg = await this.readMessage();
|
||||
if (msg.type !== 'clientConnect') { throw new Error('expected clientConnect'); }
|
||||
const openDoc = await this.send('openDoc', docId);
|
||||
if (openDoc.error) { throw new Error('error in openDocOnConnect'); }
|
||||
return openDoc;
|
||||
}
|
||||
}
|
||||
|
||||
export async function openClient(server: FlexServer, email: string, org: string,
|
||||
emailHeader?: string): Promise<GristClient> {
|
||||
const headers: Record<string, string> = {};
|
||||
if (!emailHeader) {
|
||||
const resp = await axios.get(`${server.getOwnUrl()}/test/session`);
|
||||
const cookie = resp.headers['set-cookie'][0];
|
||||
if (email !== 'anon@getgrist.com') {
|
||||
const cid = decodeURIComponent(cookie.split('=')[1].split(';')[0]);
|
||||
const comm = server.getComm();
|
||||
const sessionId = comm.getSessionIdFromCookie(cid);
|
||||
const scopedSession = comm.getOrCreateSession(sessionId, {org});
|
||||
const profile = { email, email_verified: true, name: "Someone" };
|
||||
await scopedSession.updateUserProfile({} as any, profile);
|
||||
}
|
||||
headers.Cookie = cookie;
|
||||
} else {
|
||||
headers[emailHeader] = email;
|
||||
}
|
||||
const ws = new WebSocket('ws://localhost:' + server.getOwnPort() + `/o/${org}`, {
|
||||
headers
|
||||
});
|
||||
await new Promise(function(resolve, reject) {
|
||||
ws.on('open', function() {
|
||||
resolve(ws);
|
||||
});
|
||||
ws.on('error', function(err: any) {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
return new GristClient(ws);
|
||||
}
|
305
test/server/lib/Authorizer.ts
Normal file
305
test/server/lib/Authorizer.ts
Normal file
@ -0,0 +1,305 @@
|
||||
import {parseUrlId} from 'app/common/gristUrls';
|
||||
import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager';
|
||||
import {DocManager} from 'app/server/lib/DocManager';
|
||||
import {FlexServer} from 'app/server/lib/FlexServer';
|
||||
import axios from 'axios';
|
||||
import {assert} from 'chai';
|
||||
import {toPairs} from 'lodash';
|
||||
import {createInitialDb, removeConnection, setUpDB} from 'test/gen-server/seed';
|
||||
import {configForUser, getGristConfig} from 'test/gen-server/testUtils';
|
||||
import {createDocTools} from 'test/server/docTools';
|
||||
import {openClient} from 'test/server/gristClient';
|
||||
import * as testUtils from 'test/server/testUtils';
|
||||
import * as uuidv4 from 'uuid/v4';
|
||||
|
||||
let serverUrl: string;
|
||||
let server: FlexServer;
|
||||
let dbManager: HomeDBManager;
|
||||
|
||||
async function activateServer(home: FlexServer, docManager: DocManager) {
|
||||
await home.initHomeDBManager();
|
||||
home.addHosts();
|
||||
home.addDocWorkerMap();
|
||||
home.addAccessMiddleware();
|
||||
dbManager = home.getHomeDBManager();
|
||||
await home.loadConfig({});
|
||||
home.addSessions();
|
||||
home.addHealthCheck();
|
||||
docManager.testSetHomeDbManager(dbManager);
|
||||
home.testSetDocManager(docManager);
|
||||
await home.start();
|
||||
home.addAccessMiddleware();
|
||||
home.addApiMiddleware();
|
||||
home.addJsonSupport();
|
||||
await home.addLandingPages();
|
||||
home.addHomeApi();
|
||||
await home.addDoc();
|
||||
home.addApiErrorHandlers();
|
||||
serverUrl = home.getOwnUrl();
|
||||
}
|
||||
|
||||
const chimpy = configForUser('Chimpy');
|
||||
const charon = configForUser('Charon');
|
||||
|
||||
const fixtures: {[docName: string]: string|null} = {
|
||||
Bananas: 'Hello.grist',
|
||||
Pluto: 'Hello.grist',
|
||||
};
|
||||
|
||||
describe('Authorizer', function() {
|
||||
|
||||
testUtils.setTmpLogLevel('fatal');
|
||||
|
||||
server = new FlexServer(0, 'test docWorker');
|
||||
const docTools = createDocTools({persistAcrossCases: true, useFixturePlugins: false,
|
||||
server});
|
||||
const docs: {[name: string]: {id: string}} = {};
|
||||
|
||||
// Loads the fixtures documents so that they are available to the doc worker under the correct
|
||||
// names.
|
||||
async function loadFixtureDocs() {
|
||||
for (const [docName, fixtureDoc] of toPairs(fixtures)) {
|
||||
const docId = String(await dbManager.testGetId(docName));
|
||||
if (fixtureDoc) {
|
||||
await docTools.loadFixtureDocAs(fixtureDoc, docId);
|
||||
} else {
|
||||
await docTools.createDoc(docId);
|
||||
}
|
||||
docs[docName] = {id: docId};
|
||||
}
|
||||
}
|
||||
|
||||
let oldEnv: testUtils.EnvironmentSnapshot;
|
||||
before(async function() {
|
||||
this.timeout(5000);
|
||||
setUpDB(this);
|
||||
oldEnv = new testUtils.EnvironmentSnapshot();
|
||||
process.env.GRIST_PROXY_AUTH_HEADER = 'X-email';
|
||||
await createInitialDb();
|
||||
await activateServer(server, docTools.getDocManager());
|
||||
await loadFixtureDocs();
|
||||
});
|
||||
|
||||
after(async function() {
|
||||
const messages = await testUtils.captureLog('warn', async () => {
|
||||
await server.close();
|
||||
await removeConnection();
|
||||
});
|
||||
assert.lengthOf(messages, 0);
|
||||
oldEnv.restore();
|
||||
});
|
||||
|
||||
// TODO XXX Is it safe to remove this support now?
|
||||
// (It used to be implemented in getDocAccessInfo() in Authorizer.ts).
|
||||
it.skip("viewer gets redirect by title", async function() {
|
||||
const resp = await axios.get(`${serverUrl}/o/pr/doc/Bananas`, chimpy);
|
||||
assert.equal(resp.status, 200);
|
||||
assert.equal(getGristConfig(resp.data).assignmentId, 'sample_6');
|
||||
assert.match(resp.request.res.responseUrl, /\/doc\/sample_6$/);
|
||||
const resp2 = await axios.get(`${serverUrl}/o/nasa/doc/Pluto`, chimpy);
|
||||
assert.equal(resp2.status, 200);
|
||||
assert.equal(getGristConfig(resp2.data).assignmentId, 'sample_2');
|
||||
assert.match(resp2.request.res.responseUrl, /\/doc\/sample_2$/);
|
||||
});
|
||||
|
||||
it("stranger gets consistent refusal regardless of title", async function() {
|
||||
const resp = await axios.get(`${serverUrl}/o/pr/doc/Bananas`, charon);
|
||||
assert.equal(resp.status, 404);
|
||||
assert.notMatch(resp.data, /sample_6/);
|
||||
const resp2 = await axios.get(`${serverUrl}/o/pr/doc/Bananas2`, charon);
|
||||
assert.equal(resp2.status, 404);
|
||||
assert.notMatch(resp.data, /sample_6/);
|
||||
assert.deepEqual(resp.data, resp2.data);
|
||||
});
|
||||
|
||||
it("viewer can access title", async function() {
|
||||
const resp = await axios.get(`${serverUrl}/o/pr/doc/sample_6`, chimpy);
|
||||
assert.equal(resp.status, 200);
|
||||
const config = getGristConfig(resp.data);
|
||||
assert.equal(config.getDoc![config.assignmentId!].name, 'Bananas');
|
||||
});
|
||||
|
||||
it("stranger cannot access title", async function() {
|
||||
const resp = await axios.get(`${serverUrl}/o/pr/doc/sample_6`, charon);
|
||||
assert.equal(resp.status, 403);
|
||||
assert.notMatch(resp.data, /Bananas/);
|
||||
});
|
||||
|
||||
it("viewer cannot access document from wrong org", async function() {
|
||||
const resp = await axios.get(`${serverUrl}/o/nasa/doc/sample_6`, chimpy);
|
||||
assert.equal(resp.status, 404);
|
||||
});
|
||||
|
||||
it("websocket allows openDoc for viewer", async function() {
|
||||
const cli = await openClient(server, 'chimpy@getgrist.com', 'pr');
|
||||
cli.ignoreTrivialActions();
|
||||
assert.equal((await cli.readMessage()).type, 'clientConnect');
|
||||
const openDoc = await cli.send("openDoc", "sample_6");
|
||||
assert.equal(openDoc.error, undefined);
|
||||
assert.match(JSON.stringify(openDoc.data), /Table1/);
|
||||
await cli.close();
|
||||
});
|
||||
|
||||
it("websocket forbids openDoc for stranger", async function() {
|
||||
const cli = await openClient(server, 'charon@getgrist.com', 'pr');
|
||||
cli.ignoreTrivialActions();
|
||||
assert.equal((await cli.readMessage()).type, 'clientConnect');
|
||||
const openDoc = await cli.send("openDoc", "sample_6");
|
||||
assert.match(openDoc.error!, /No view access/);
|
||||
assert.equal(openDoc.data, undefined);
|
||||
assert.match(openDoc.errorCode!, /AUTH_NO_VIEW/);
|
||||
await cli.close();
|
||||
});
|
||||
|
||||
it("websocket forbids applyUserActions for viewer", async function() {
|
||||
const cli = await openClient(server, 'charon@getgrist.com', 'nasa');
|
||||
cli.ignoreTrivialActions();
|
||||
assert.equal((await cli.readMessage()).type, 'clientConnect');
|
||||
const openDoc = await cli.openDocOnConnect("sample_2");
|
||||
assert.equal(openDoc.error, undefined);
|
||||
const nonce = uuidv4();
|
||||
const applyUserActions = await cli.send("applyUserActions",
|
||||
0,
|
||||
[["UpdateRecord", "Table1", 1, {A: nonce}], {}]);
|
||||
assert.lengthOf(cli.messages, 0); // no user actions pushed to client
|
||||
assert.match(applyUserActions.error!, /No write access/);
|
||||
assert.match(applyUserActions.errorCode!, /AUTH_NO_EDIT/);
|
||||
const fetchTable = await cli.send("fetchTable", 0, "Table1");
|
||||
assert.equal(fetchTable.error, undefined);
|
||||
assert.notInclude(JSON.stringify(fetchTable.data), nonce);
|
||||
await cli.close();
|
||||
});
|
||||
|
||||
it("websocket allows applyUserActions for editor", async function() {
|
||||
const cli = await openClient(server, 'chimpy@getgrist.com', 'nasa');
|
||||
cli.ignoreTrivialActions();
|
||||
assert.equal((await cli.readMessage()).type, 'clientConnect');
|
||||
const openDoc = await cli.openDocOnConnect("sample_2");
|
||||
assert.equal(openDoc.error, undefined);
|
||||
const nonce = uuidv4();
|
||||
const applyUserActions = await cli.send("applyUserActions",
|
||||
0,
|
||||
[["UpdateRecord", "Table1", 1, {A: nonce}]]);
|
||||
assert.lengthOf(cli.messages, 1); // user actions pushed to client
|
||||
assert.equal(applyUserActions.error, undefined);
|
||||
const fetchTable = await cli.send("fetchTable", 0, "Table1");
|
||||
assert.equal(fetchTable.error, undefined);
|
||||
assert.include(JSON.stringify(fetchTable.data), nonce);
|
||||
await cli.close();
|
||||
});
|
||||
|
||||
it("can keep different simultaneous clients of a doc straight", async function() {
|
||||
const editor = await openClient(server, 'chimpy@getgrist.com', 'nasa');
|
||||
assert.equal((await editor.readMessage()).type, 'clientConnect');
|
||||
const viewer = await openClient(server, 'charon@getgrist.com', 'nasa');
|
||||
assert.equal((await viewer.readMessage()).type, 'clientConnect');
|
||||
const stranger = await openClient(server, 'kiwi@getgrist.com', 'nasa');
|
||||
assert.equal((await stranger.readMessage()).type, 'clientConnect');
|
||||
|
||||
editor.ignoreTrivialActions();
|
||||
viewer.ignoreTrivialActions();
|
||||
stranger.ignoreTrivialActions();
|
||||
assert.equal((await editor.send("openDoc", "sample_2")).error, undefined);
|
||||
assert.equal((await viewer.send("openDoc", "sample_2")).error, undefined);
|
||||
assert.match((await stranger.send("openDoc", "sample_2")).error!, /No view access/);
|
||||
|
||||
const action = [0, [["UpdateRecord", "Table1", 1, {A: "foo"}]]];
|
||||
assert.equal((await editor.send("applyUserActions", ...action)).error, undefined);
|
||||
assert.match((await viewer.send("applyUserActions", ...action)).error!, /No write access/);
|
||||
// Different message here because sending actions without a doc being open.
|
||||
assert.match((await stranger.send("applyUserActions", ...action)).error!, /Invalid/);
|
||||
});
|
||||
|
||||
it("previewer has view access to docs", async function() {
|
||||
const cli = await openClient(server, 'thumbnail@getgrist.com', 'nasa');
|
||||
cli.ignoreTrivialActions();
|
||||
assert.equal((await cli.readMessage()).type, 'clientConnect');
|
||||
const openDoc = await cli.send("openDoc", "sample_2");
|
||||
assert.equal(openDoc.error, undefined);
|
||||
const nonce = uuidv4();
|
||||
const applyUserActions = await cli.send("applyUserActions",
|
||||
0,
|
||||
[["UpdateRecord", "Table1", 1, {A: nonce}], {}]);
|
||||
assert.lengthOf(cli.messages, 0); // no user actions pushed to client
|
||||
assert.match(applyUserActions.error!, /No write access/);
|
||||
assert.match(applyUserActions.errorCode!, /AUTH_NO_EDIT/);
|
||||
const fetchTable = await cli.send("fetchTable", 0, "Table1");
|
||||
assert.equal(fetchTable.error, undefined);
|
||||
assert.notInclude(JSON.stringify(fetchTable.data), nonce);
|
||||
await cli.close();
|
||||
});
|
||||
|
||||
it("viewer can fork doc", async function() {
|
||||
const cli = await openClient(server, 'charon@getgrist.com', 'nasa');
|
||||
cli.ignoreTrivialActions();
|
||||
assert.equal((await cli.readMessage()).type, 'clientConnect');
|
||||
const openDoc = await cli.send("openDoc", "sample_2");
|
||||
assert.equal(openDoc.error, undefined);
|
||||
const result = await cli.send("fork", 0);
|
||||
assert.equal(result.data.docId, result.data.urlId);
|
||||
const parts = parseUrlId(result.data.docId);
|
||||
assert.equal(parts.trunkId, "sample_2");
|
||||
assert.isAbove(parts.forkId!.length, 4);
|
||||
assert.equal(parts.forkUserId, await dbManager.testGetId('Charon') as number);
|
||||
});
|
||||
|
||||
it("anon can fork doc", async function() {
|
||||
// anon does not have access to doc initially
|
||||
const cli = await openClient(server, 'anon@getgrist.com', 'nasa');
|
||||
cli.ignoreTrivialActions();
|
||||
assert.equal((await cli.readMessage()).type, 'clientConnect');
|
||||
let openDoc = await cli.send("openDoc", "sample_2");
|
||||
assert.match(openDoc.error!, /No view access/);
|
||||
|
||||
// grant anon access to doc and retry
|
||||
await dbManager.updateDocPermissions({
|
||||
userId: await dbManager.testGetId('Chimpy') as number,
|
||||
urlId: 'sample_2',
|
||||
org: 'nasa'
|
||||
}, {users: {"anon@getgrist.com": "viewers"}});
|
||||
dbManager.flushDocAuthCache();
|
||||
openDoc = await cli.send("openDoc", "sample_2");
|
||||
assert.equal(openDoc.error, undefined);
|
||||
|
||||
// make a fork
|
||||
const result = await cli.send("fork", 0);
|
||||
assert.equal(result.data.docId, result.data.urlId);
|
||||
const parts = parseUrlId(result.data.docId);
|
||||
assert.equal(parts.trunkId, "sample_2");
|
||||
assert.isAbove(parts.forkId!.length, 4);
|
||||
assert.equal(parts.forkUserId, undefined);
|
||||
});
|
||||
|
||||
it("can set user via GRIST_PROXY_AUTH_HEADER", async function() {
|
||||
// User can access a doc by setting header.
|
||||
const docUrl = `${serverUrl}/o/pr/api/docs/sample_6`;
|
||||
const resp = await axios.get(docUrl, {
|
||||
headers: {'X-email': 'chimpy@getgrist.com'}
|
||||
});
|
||||
assert.equal(resp.data.name, 'Bananas');
|
||||
|
||||
// Unknown user is denied.
|
||||
await assert.isRejected(axios.get(docUrl, {
|
||||
headers: {'X-email': 'notchimpy@getgrist.com'}
|
||||
}));
|
||||
|
||||
// User can access a doc via websocket by setting header.
|
||||
let cli = await openClient(server, 'chimpy@getgrist.com', 'pr', 'X-email');
|
||||
cli.ignoreTrivialActions();
|
||||
assert.equal((await cli.readMessage()).type, 'clientConnect');
|
||||
let openDoc = await cli.send("openDoc", "sample_6");
|
||||
assert.equal(openDoc.error, undefined);
|
||||
assert.match(JSON.stringify(openDoc.data), /Table1/);
|
||||
await cli.close();
|
||||
|
||||
// Unknown user is denied.
|
||||
cli = await openClient(server, 'notchimpy@getgrist.com', 'pr', 'X-email');
|
||||
cli.ignoreTrivialActions();
|
||||
assert.equal((await cli.readMessage()).type, 'clientConnect');
|
||||
openDoc = await cli.send("openDoc", "sample_6");
|
||||
assert.match(openDoc.error!, /No view access/);
|
||||
assert.equal(openDoc.data, undefined);
|
||||
assert.match(openDoc.errorCode!, /AUTH_NO_VIEW/);
|
||||
await cli.close();
|
||||
});
|
||||
});
|
2589
test/server/lib/DocApi.ts
Normal file
2589
test/server/lib/DocApi.ts
Normal file
File diff suppressed because it is too large
Load Diff
@ -169,7 +169,7 @@ export function assertMatchArray(stringArray: string[], regexArray: RegExp[]) {
|
||||
* @param {String} errCode - Error code to check against `err.code` from the caller.
|
||||
* @param {RegExp} errRegexp - Regular expression to check against `err.message` from the caller.
|
||||
*/
|
||||
export function expectRejection(promise: Promise<any>, errCode: number, errRegexp: RegExp) {
|
||||
export function expectRejection(promise: Promise<any>, errCode: number|string, errRegexp: RegExp) {
|
||||
return promise
|
||||
.then(function() {
|
||||
assert(false, "Expected promise to return an error: " + errCode);
|
||||
@ -307,4 +307,11 @@ export class EnvironmentSnapshot {
|
||||
}
|
||||
}
|
||||
|
||||
export async function getBuildFile(relativePath: string): Promise<string> {
|
||||
if (await fse.pathExists(path.join('_build', relativePath))) {
|
||||
return path.join('_build', relativePath);
|
||||
}
|
||||
return path.join('_build', 'core', relativePath);
|
||||
}
|
||||
|
||||
export { assert };
|
||||
|
@ -9,6 +9,6 @@
|
||||
],
|
||||
"references": [
|
||||
{ "path": "../app" },
|
||||
{ "path": "../stubs/app" },
|
||||
{ "path": "../stubs/app" }
|
||||
]
|
||||
}
|
||||
|
@ -741,6 +741,11 @@ anymatch@~3.1.1:
|
||||
normalize-path "^3.0.0"
|
||||
picomatch "^2.0.4"
|
||||
|
||||
app-module-path@2.2.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/app-module-path/-/app-module-path-2.2.0.tgz#641aa55dfb7d6a6f0a8141c4b9c0aa50b6c24dd5"
|
||||
integrity sha1-ZBqlXft9am8KgUHEucCqULbCTdU=
|
||||
|
||||
app-root-path@^2.0.1:
|
||||
version "2.2.1"
|
||||
resolved "https://registry.yarnpkg.com/app-root-path/-/app-root-path-2.2.1.tgz#d0df4a682ee408273583d43f6f79e9892624bc9a"
|
||||
|
Loading…
Reference in New Issue
Block a user