diff --git a/app/server/declarations/tmp.d.ts b/app/server/declarations/tmp.d.ts new file mode 100644 index 00000000..039ec371 --- /dev/null +++ b/app/server/declarations/tmp.d.ts @@ -0,0 +1,8 @@ +import {Options, SimpleOptions} from "tmp"; + +// Add declarations of the promisifies methods of tmp. +declare module "tmp" { + function dirAsync(config?: Options): Promise; + function fileAsync(config?: Options): Promise; + function tmpNameAsync(config?: SimpleOptions): Promise; +} diff --git a/app/server/lib/Authorizer.ts b/app/server/lib/Authorizer.ts index 63f1a906..6c735317 100644 --- a/app/server/lib/Authorizer.ts +++ b/app/server/lib/Authorizer.ts @@ -17,6 +17,7 @@ import {IPermitStore, Permit} from 'app/server/lib/Permit'; import {allowHost, optStringParam} from 'app/server/lib/requestUtils'; import * as cookie from 'cookie'; import {NextFunction, Request, RequestHandler, Response} from 'express'; +import {IncomingMessage} from 'http'; import * as onHeaders from 'on-headers'; export interface RequestWithLogin extends Request { @@ -95,12 +96,14 @@ export function isSingleUserMode(): boolean { * header to specify the users' email address. The header to set comes from the * environment variable GRIST_PROXY_AUTH_HEADER. */ -export function getRequestProfile(req: Request): UserProfile|undefined { +export function getRequestProfile(req: Request|IncomingMessage): UserProfile|undefined { const header = process.env.GRIST_PROXY_AUTH_HEADER; let profile: UserProfile|undefined; - if (header && req.headers && req.headers[header]) { - const headerContent = req.headers[header]; + if (header) { + // Careful reading headers. If we have an IncomingMessage, there is no + // get() function, and header names are lowercased. + const headerContent = ('get' in req) ? req.get(header) : req.headers[header.toLowerCase()]; if (headerContent) { const userEmail = headerContent.toString(); const [userName] = userEmail.split("@", 1); @@ -543,7 +546,7 @@ export function getTransitiveHeaders(req: Request): {[key: string]: string} { const XRequestedWith = req.get('X-Requested-With'); const Origin = req.get('Origin'); // Pass along the original Origin since it may // play a role in granular access control. - return { + const result: Record = { ...(Authorization ? { Authorization } : undefined), ...(Cookie ? { Cookie } : undefined), ...(Organization ? { Organization } : undefined), @@ -551,6 +554,12 @@ export function getTransitiveHeaders(req: Request): {[key: string]: string} { ...(XRequestedWith ? { 'X-Requested-With': XRequestedWith } : undefined), ...(Origin ? { Origin } : undefined), }; + const extraHeader = process.env.GRIST_PROXY_AUTH_HEADER; + const extraHeaderValue = extraHeader && req.get(extraHeader); + if (extraHeader && extraHeaderValue) { + result[extraHeader] = extraHeaderValue; + } + return result; } export const signInStatusCookieName = sessionCookieName + '_status'; diff --git a/package.json b/package.json index 81f7e629..5c7c8f43 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,8 @@ "install:python3": "buildtools/prepare_python3.sh", "build:prod": "tsc --build && webpack --config buildtools/webpack.config.js --mode production && webpack --config buildtools/webpack.check.js --mode production && cat app/client/*.css app/client/*/*.css > static/bundle.css", "start:prod": "NODE_PATH=_build:_build/stubs node _build/stubs/app/server/server.js", - "test": "GRIST_SESSION_COOKIE=grist_test_cookie GRIST_TEST_LOGIN=1 TEST_SUPPORT_API_KEY=api_key_for_support NODE_PATH=_build:_build/stubs mocha _build/test/nbrowser/*.js", + "test": "GRIST_SESSION_COOKIE=grist_test_cookie GRIST_TEST_LOGIN=1 TEST_SUPPORT_API_KEY=api_key_for_support NODE_PATH=_build:_build/stubs mocha _build/test/nbrowser/*.js _build/test/server/**/*.js _build/test/gen-server/**/*.js", + "test:server": "GRIST_SESSION_COOKIE=grist_test_cookie NODE_PATH=_build:_build/stubs mocha _build/test/server/**/*.js _build/test/gen-server/**/*.js", "test:smoke": "NODE_PATH=_build:_build/stubs mocha _build/test/nbrowser/Smoke.js", "test:docker": "./test/test_under_docker.sh" }, @@ -57,6 +58,7 @@ "@types/tmp": "0.0.33", "@types/uuid": "3.4.4", "@types/which": "2.0.1", + "app-module-path": "2.2.0", "catw": "1.0.1", "chai": "4.2.0", "chai-as-promised": "7.1.1", diff --git a/test/fixtures/docs/ApiDataRecordsTest.grist b/test/fixtures/docs/ApiDataRecordsTest.grist new file mode 100644 index 00000000..bf3c0f39 Binary files /dev/null and b/test/fixtures/docs/ApiDataRecordsTest.grist differ diff --git a/test/fixtures/docs/Favorite_Films.grist b/test/fixtures/docs/Favorite_Films.grist new file mode 100644 index 00000000..f5c4fecc Binary files /dev/null and b/test/fixtures/docs/Favorite_Films.grist differ diff --git a/test/gen-server/seed.ts b/test/gen-server/seed.ts new file mode 100644 index 00000000..ef8a8b63 --- /dev/null +++ b/test/gen-server/seed.ts @@ -0,0 +1,640 @@ +/** + * + * Can run standalone as: + * ts-node test/gen-server/seed.ts serve + * By default, uses a landing.db database in current directory. + * Can prefix with database overrides, e.g. + * TYPEORM_DATABASE=:memory: + * TYPEORM_DATABASE=/tmp/test.db + * To connect to a postgres database, change ormconfig.env, or add a bunch of variables: + * export TYPEORM_CONNECTION=postgres + * export TYPEORM_HOST=localhost + * export TYPEORM_DATABASE=landing + * export TYPEORM_USERNAME=development + * export TYPEORM_PASSWORD=***** + * + * To just set up the database (migrate and add seed data), and then stop immediately, do: + * ts-node test/gen-server/seed.ts init + * To apply all migrations to the db, do: + * ts-node test/gen-server/seed.ts migrate + * To revert the last migration: + * ts-node test/gen-server/seed.ts revert + * + */ + +import {addPath} from 'app-module-path'; +import {IHookCallbackContext} from 'mocha'; +import * as path from 'path'; +import {Connection, createConnection, getConnectionManager, Repository} from 'typeorm'; + +if (require.main === module) { + addPath(path.dirname(path.dirname(__dirname))); +} + +import {AclRuleDoc, AclRuleOrg, AclRuleWs} from "app/gen-server/entity/AclRule"; +import {BillingAccount} from "app/gen-server/entity/BillingAccount"; +import {Document} from "app/gen-server/entity/Document"; +import {Group} from "app/gen-server/entity/Group"; +import {Login} from "app/gen-server/entity/Login"; +import {Organization} from "app/gen-server/entity/Organization"; +import {Product, synchronizeProducts} from "app/gen-server/entity/Product"; +import {User} from "app/gen-server/entity/User"; +import {Workspace} from "app/gen-server/entity/Workspace"; +import {EXAMPLE_WORKSPACE_NAME} from 'app/gen-server/lib/HomeDBManager'; +import {Permissions} from 'app/gen-server/lib/Permissions'; +import {runMigrations, undoLastMigration, updateDb} from 'app/server/lib/dbUtils'; +import {FlexServer} from 'app/server/lib/FlexServer'; +import * as fse from 'fs-extra'; + +const ACCESS_GROUPS = ['owners', 'editors', 'viewers', 'guests', 'members']; + +export const exampleOrgs = [ + { + name: 'NASA', + domain: 'nasa', + workspaces: [ + { + name: 'Horizon', + docs: ['Jupiter', 'Pluto', 'Beyond'] + }, + { + name: 'Rovers', + docs: ['Curiosity', 'Apathy'] + } + ] + }, + { + name: 'Primately', + domain: 'pr', + workspaces: [ + { + name: 'Fruit', + docs: ['Bananas', 'Apples'] + }, + { + name: 'Trees', + docs: ['Tall', 'Short'] + } + ] + }, + { + name: 'Flightless', + domain: 'fly', + workspaces: [ + { + name: 'Media', + docs: ['Australia', 'Antartic'] + } + ] + }, + { + name: 'Abyss', + domain: 'deep', + workspaces: [ + { + name: 'Deep', + docs: ['Unfathomable'] + } + ] + }, + { + name: 'Chimpyland', + workspaces: [ + { + name: 'Private', + docs: ['Timesheets', 'Appointments'] + }, + { + name: 'Public', + docs: [] + } + ] + }, + { + name: 'Kiwiland', + workspaces: [] + }, + { + name: 'EmptyWsOrg', + domain: 'blanky', + workspaces: [ + { + name: 'Vacuum', + docs: [] + } + ] + }, + { + name: 'EmptyOrg', + domain: 'blankiest', + workspaces: [] + }, + { + name: 'Fish', + domain: 'fish', + workspaces: [ + { + name: 'Big', + docs: [ + 'Shark' + ] + }, + { + name: 'Small', + docs: [ + 'Anchovy', + 'Herring' + ] + } + ] + }, + { + name: 'Supportland', + workspaces: [ + { + name: EXAMPLE_WORKSPACE_NAME, + docs: ['Hello World', 'Sample Example'] + }, + ] + }, + { + name: 'Shiny', + domain: 'shiny', + host: 'www.shiny-grist.io', + workspaces: [ + { + name: 'Tailor Made', + docs: ['Suits', 'Shoes'] + } + ] + }, + { + name: 'FreeTeam', + domain: 'freeteam', + product: 'teamFree', + workspaces: [ + { + name: 'FreeTeamWs', + docs: [], + } + ] + }, +]; + + +const exampleUsers: {[user: string]: {[org: string]: string}} = { + Chimpy: { + FreeTeam: 'owners', + Chimpyland: 'owners', + NASA: 'owners', + Primately: 'guests', + Fruit: 'viewers', + Flightless: 'guests', + Media: 'guests', + Antartic: 'viewers', + EmptyOrg: 'editors', + EmptyWsOrg: 'editors', + Fish: 'owners' + }, + Kiwi: { + Kiwiland: 'owners', + Flightless: 'editors', + Primately: 'viewers', + Fish: 'editors' + }, + Charon: { + NASA: 'guests', + Horizon: 'guests', + Pluto: 'viewers', + Chimpyland: 'viewers', + Fish: 'viewers', + Abyss: 'owners', + }, + // User support@ owns a workspace "Examples & Templates" in its personal org. It can be shared + // with everyone@ to let all users see it (this is not done here to avoid impacting all tests). + Support: { Supportland: 'owners' }, +}; + +interface Groups { + owners: Group; + editors: Group; + viewers: Group; + guests: Group; + members?: Group; +} + +class Seed { + public userRepository: Repository; + public groupRepository: Repository; + public groups: {[key: string]: Groups}; + + constructor(public connection: Connection) { + this.userRepository = connection.getRepository(User); + this.groupRepository = connection.getRepository(Group); + this.groups = {}; + } + + public async createGroups(parent?: Organization|Workspace): Promise { + const owners = new Group(); + owners.name = 'owners'; + const editors = new Group(); + editors.name = 'editors'; + const viewers = new Group(); + viewers.name = 'viewers'; + const guests = new Group(); + guests.name = 'guests'; + + if (parent) { + // Nest the parent groups inside the new groups + const parentGroups = this.groups[parent.name]; + owners.memberGroups = [parentGroups.owners]; + editors.memberGroups = [parentGroups.editors]; + viewers.memberGroups = [parentGroups.viewers]; + } + + await this.groupRepository.save([owners, editors, viewers, guests]); + + if (!parent) { + // Add the members group for orgs. + const members = new Group(); + members.name = 'members'; + await this.groupRepository.save(members); + return { + owners, + editors, + viewers, + guests, + members + }; + } else { + return { + owners, + editors, + viewers, + guests + }; + } + } + + public async addOrgToGroups(groups: Groups, org: Organization) { + const acl0 = new AclRuleOrg(); + acl0.group = groups.members!; + acl0.permissions = Permissions.VIEW; + acl0.organization = org; + + const acl1 = new AclRuleOrg(); + acl1.group = groups.guests; + acl1.permissions = Permissions.VIEW; + acl1.organization = org; + + const acl2 = new AclRuleOrg(); + acl2.group = groups.viewers; + acl2.permissions = Permissions.VIEW; + acl2.organization = org; + + const acl3 = new AclRuleOrg(); + acl3.group = groups.editors; + acl3.permissions = Permissions.EDITOR; + acl3.organization = org; + + const acl4 = new AclRuleOrg(); + acl4.group = groups.owners; + acl4.permissions = Permissions.OWNER; + acl4.organization = org; + + // should be able to save both together, but typeorm messes up on postgres. + await acl0.save(); + await acl1.save(); + await acl2.save(); + await acl3.save(); + await acl4.save(); + } + + public async addWorkspaceToGroups(groups: Groups, ws: Workspace) { + const acl1 = new AclRuleWs(); + acl1.group = groups.guests; + acl1.permissions = Permissions.VIEW; + acl1.workspace = ws; + + const acl2 = new AclRuleWs(); + acl2.group = groups.viewers; + acl2.permissions = Permissions.VIEW; + acl2.workspace = ws; + + const acl3 = new AclRuleWs(); + acl3.group = groups.editors; + acl3.permissions = Permissions.EDITOR; + acl3.workspace = ws; + + const acl4 = new AclRuleWs(); + acl4.group = groups.owners; + acl4.permissions = Permissions.OWNER; + acl4.workspace = ws; + + // should be able to save both together, but typeorm messes up on postgres. + await acl1.save(); + await acl2.save(); + await acl3.save(); + await acl4.save(); + } + + public async addDocumentToGroups(groups: Groups, doc: Document) { + const acl1 = new AclRuleDoc(); + acl1.group = groups.guests; + acl1.permissions = Permissions.VIEW; + acl1.document = doc; + + const acl2 = new AclRuleDoc(); + acl2.group = groups.viewers; + acl2.permissions = Permissions.VIEW; + acl2.document = doc; + + const acl3 = new AclRuleDoc(); + acl3.group = groups.editors; + acl3.permissions = Permissions.EDITOR; + acl3.document = doc; + + const acl4 = new AclRuleDoc(); + acl4.group = groups.owners; + acl4.permissions = Permissions.OWNER; + acl4.document = doc; + + await acl1.save(); + await acl2.save(); + await acl3.save(); + await acl4.save(); + } + + public async addUserToGroup(user: User, group: Group) { + await this.connection.createQueryBuilder() + .relation(Group, "memberUsers") + .of(group) + .add(user); + } + + public async addDocs(orgs: Array<{name: string, domain?: string, host?: string, product?: string, + workspaces: Array<{name: string, docs: string[]}>}>) { + let docId = 1; + for (const org of orgs) { + const o = new Organization(); + o.name = org.name; + const ba = new BillingAccount(); + ba.individual = false; + const productName = org.product || 'Free'; + ba.product = (await Product.findOne({name: productName}))!; + o.billingAccount = ba; + if (org.domain) { o.domain = org.domain; } + if (org.host) { o.host = org.host; } + await ba.save(); + await o.save(); + const grps = await this.createGroups(); + this.groups[o.name] = grps; + await this.addOrgToGroups(grps, o); + for (const workspace of org.workspaces) { + const w = new Workspace(); + w.name = workspace.name; + w.org = o; + await w.save(); + const wgrps = await this.createGroups(o); + this.groups[w.name] = wgrps; + await this.addWorkspaceToGroups(wgrps, w); + for (const doc of workspace.docs) { + const d = new Document(); + d.name = doc; + d.workspace = w; + d.id = `sample_${docId}`; + docId++; + await d.save(); + const dgrps = await this.createGroups(w); + this.groups[d.name] = dgrps; + await this.addDocumentToGroups(dgrps, d); + } + } + } + } + + public async run() { + if (await this.userRepository.findOne()) { + // we already have a user - skip seeding database + return; + } + + await this.addDocs(exampleOrgs); + await this._buildUsers(exampleUsers); + } + + // Creates benchmark data with 10 orgs, 50 workspaces per org and 20 docs per workspace. + public async runBenchmark() { + if (await this.userRepository.findOne()) { + // we already have a user - skip seeding database + return; + } + + await this.connection.runMigrations(); + + const benchmarkOrgs = _generateData(100, 50, 20); + // Create an access object giving Chimpy random access to the orgs. + const chimpyAccess: {[name: string]: string} = {}; + benchmarkOrgs.forEach((_org: any) => { + const zeroToThree = Math.floor(Math.random() * 4); + chimpyAccess[_org.name] = ACCESS_GROUPS[zeroToThree]; + }); + + await this.addDocs(benchmarkOrgs); + await this._buildUsers({ Chimpy: chimpyAccess }); + } + + private async _buildUsers(userAccessMap: {[user: string]: {[org: string]: string}}) { + for (const name of Object.keys(userAccessMap)) { + const user = new User(); + user.name = name; + user.apiKey = "api_key_for_" + name.toLowerCase(); + await user.save(); + const login = new Login(); + login.displayEmail = login.email = name.toLowerCase() + "@getgrist.com"; + login.user = user; + await login.save(); + const personal = await Organization.findOne({name: name + "land"}); + if (personal) { + personal.owner = user; + await personal.save(); + } + for (const org of Object.keys(userAccessMap[name])) { + await this.addUserToGroup(user, (this.groups[org] as any)[userAccessMap[name][org]]); + } + } + } +} + +// When running mocha on several test files at once, we need to reset our database connection +// if it exists. This is a little ugly since it is stored globally. +export async function removeConnection() { + if (getConnectionManager().connections.length > 0) { + if (getConnectionManager().connections.length > 1) { + throw new Error("unexpected number of connections"); + } + await getConnectionManager().connections[0].close(); + // There is no official way to delete connections that I've found. + (getConnectionManager().connections as any) = []; + } +} + +export async function createInitialDb(connection?: Connection, migrateAndSeedData: boolean = true) { + // In jenkins tests, we may want to reset the database to a clean + // state. If so, TEST_CLEAN_DATABASE will have been set. How to + // clean the database depends on what kind of database it is. With + // postgres, it suffices to recreate our schema ("public", the + // default). With sqlite, it suffices to delete the file -- but we + // are only allowed to do this if there is no connection open to it + // (so we fail if a connection has already been made). If the + // sqlite db is in memory (":memory:") there's nothing to delete. + const uncommitted = !connection; // has user already created a connection? + // if so we won't be able to delete sqlite db + connection = connection || await createConnection(); + const opt = connection.driver.options; + if (process.env.TEST_CLEAN_DATABASE) { + if (opt.type === 'sqlite') { + const database = (opt as any).database; + // Only dbs on disk need to be deleted + if (database !== ':memory:') { + // We can only delete on-file dbs if no connection is open to them + if (!uncommitted) { + throw Error("too late to clean sqlite db"); + } + await removeConnection(); + if (await fse.pathExists(database)) { + await fse.unlink(database); + } + connection = await createConnection(); + } + } else if (opt.type === 'postgres') { + // recreate schema, destroying everything that was inside it + await connection.query("DROP SCHEMA public CASCADE;"); + await connection.query("CREATE SCHEMA public;"); + } else { + throw new Error(`do not know how to clean a ${opt.type} db`); + } + } + + // Finally - actually initialize the database. + if (migrateAndSeedData) { + await updateDb(connection); + await addSeedData(connection); + } +} + +// add some test data to the database. +export async function addSeedData(connection: Connection) { + await synchronizeProducts(connection, true); + await connection.transaction(async tr => { + const seed = new Seed(tr.connection); + await seed.run(); + }); +} + +export async function createBenchmarkDb(connection?: Connection) { + connection = connection || await createConnection(); + await updateDb(connection); + await connection.transaction(async tr => { + const seed = new Seed(tr.connection); + await seed.runBenchmark(); + }); +} + +export async function createServer(port: number, initDb = createInitialDb): Promise { + const flexServer = new FlexServer(port); + flexServer.addJsonSupport(); + await flexServer.start(); + await flexServer.initHomeDBManager(); + flexServer.addAccessMiddleware(); + flexServer.addApiMiddleware(); + flexServer.addHomeApi(); + flexServer.addApiErrorHandlers(); + await initDb(flexServer.getHomeDBManager().connection); + flexServer.summary(); + return flexServer; +} + +export async function createBenchmarkServer(port: number): Promise { + return createServer(port, createBenchmarkDb); +} + +// Generates a random dataset of orgs, workspaces and docs. The number of workspaces +// given is per org, and the number of docs given is per workspace. +function _generateData(numOrgs: number, numWorkspaces: number, numDocs: number) { + if (numOrgs < 1 || numWorkspaces < 1 || numDocs < 0) { + throw new Error('_generateData error: Invalid arguments'); + } + const example = []; + for (let i = 0; i < numOrgs; i++) { + const workspaces = []; + for (let j = 0; j < numWorkspaces; j++) { + const docs = []; + for (let k = 0; k < numDocs; k++) { + const docIndex = (i * numWorkspaces * numDocs) + (j * numDocs) + k; + docs.push(`doc-${docIndex}`); + } + const workspaceIndex = (i * numWorkspaces) + j; + workspaces.push({ + name: `ws-${workspaceIndex}`, + docs + }); + } + example.push({ + name: `org-${i}`, + domain: `org-${i}`, + workspaces + }); + } + return example; +} + +/** + * To set up TYPEORM_* environment variables for testing, call this in a before() call of a test + * suite, using setUpDB(this); + */ +export function setUpDB(context?: IHookCallbackContext) { + if (!process.env.TYPEORM_DATABASE) { + process.env.TYPEORM_DATABASE = ":memory:"; + } else { + if (context) { context.timeout(60000); } + } +} + +async function main() { + const cmd = process.argv[2]; + if (cmd === 'init') { + const connection = await createConnection(); + await createInitialDb(connection); + return; + } else if (cmd === 'benchmark') { + const connection = await createConnection(); + await createInitialDb(connection, false); + await createBenchmarkDb(connection); + return; + } else if (cmd === 'migrate') { + process.env.TYPEORM_LOGGING = 'true'; + const connection = await createConnection(); + await runMigrations(connection); + return; + } else if (cmd === 'revert') { + process.env.TYPEORM_LOGGING = 'true'; + const connection = await createConnection(); + await undoLastMigration(connection); + return; + } else if (cmd === 'serve') { + const home = await createServer(3000); + // tslint:disable-next-line:no-console + console.log(`Home API demo available at ${home.getOwnUrl()}`); + return; + } + // tslint:disable-next-line:no-console + console.log("Call with: init | migrate | revert | serve | benchmark"); +} + +if (require.main === module) { + main().catch(e => { + // tslint:disable-next-line:no-console + console.log(e); + }); +} diff --git a/test/gen-server/testUtils.ts b/test/gen-server/testUtils.ts new file mode 100644 index 00000000..81dc18a1 --- /dev/null +++ b/test/gen-server/testUtils.ts @@ -0,0 +1,103 @@ +import {GristLoadConfig} from 'app/common/gristUrls'; +import {BillingAccount} from 'app/gen-server/entity/BillingAccount'; +import {Organization} from 'app/gen-server/entity/Organization'; +import {Product} from 'app/gen-server/entity/Product'; +import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager'; +import {INotifier} from 'app/server/lib/INotifier'; +import {AxiosRequestConfig} from "axios"; +import {delay} from 'bluebird'; + +/** + * Returns an AxiosRequestConfig, that identifies the user with `username` on a server running + * against a database using `test/gen-server/seed.ts`. Also tells axios not to raise exception on + * failed request. + */ +export function configForUser(username: string): AxiosRequestConfig { + const config: AxiosRequestConfig = { + responseType: 'json', + validateStatus: (status: number) => true, + headers: { + 'X-Requested-With': 'XMLHttpRequest', + } + }; + if (username !== 'Anonymous') { + config.headers.Authorization = 'Bearer api_key_for_' + username.toLowerCase(); + } + return config; +} + +/** + * Create a new user and return their personal org. + */ +export async function createUser(dbManager: HomeDBManager, name: string): Promise { + const username = name.toLowerCase(); + const email = `${username}@getgrist.com`; + const user = await dbManager.getUserByLogin(email, {email, name}); + if (!user) { throw new Error('failed to create user'); } + user.apiKey = `api_key_for_${username}`; + await user.save(); + const userHome = (await dbManager.getOrg({userId: user.id}, null)).data; + if (!userHome) { throw new Error('failed to create personal org'); } + return userHome; +} + +/** + * Associate a given org with a given product. + */ +export async function setPlan(dbManager: HomeDBManager, org: {billingAccount?: {id: number}}, + productName: string) { + const product = await dbManager.connection.manager.findOne(Product, {where: {name: productName}}); + if (!product) { throw new Error(`cannot find product ${productName}`); } + if (!org.billingAccount) { throw new Error('must join billingAccount'); } + await dbManager.connection.createQueryBuilder() + .update(BillingAccount) + .set({product}) + .where('id = :bid', {bid: org.billingAccount.id}) + .execute(); +} + +/** + * Returns the window.gristConfig object extracted from the raw HTML of app.html page. + */ +export function getGristConfig(page: string): Partial { + const match = /window\.gristConfig = ([^;]*)/.exec(page); + if (!match) { throw new Error('cannot find grist config'); } + return JSON.parse(match[1]); +} + +/** + * Waits for all pending (back-end) notifications to complete. Notifications are + * started during request handling, but may not complete fully during it. + */ +export async function waitForAllNotifications(notifier: INotifier, maxWait: number = 1000) { + const start = Date.now(); + while (Date.now() - start < maxWait) { + if (!notifier.testPending) { return; } + await delay(1); + } + throw new Error('waitForAllNotifications timed out'); +} + +// count the number of rows in a table +export async function getRowCount(dbManager: HomeDBManager, tableName: string): Promise { + const result = await dbManager.connection.query(`select count(*) as ct from ${tableName}`); + return parseInt(result[0].ct, 10); +} + +// gather counts for all significant tables - handy as a sanity check on deletions +export async function getRowCounts(dbManager: HomeDBManager) { + return { + aclRules: await getRowCount(dbManager, 'acl_rules'), + docs: await getRowCount(dbManager, 'docs'), + groupGroups: await getRowCount(dbManager, 'group_groups'), + groupUsers: await getRowCount(dbManager, 'group_users'), + groups: await getRowCount(dbManager, 'groups'), + logins: await getRowCount(dbManager, 'logins'), + orgs: await getRowCount(dbManager, 'orgs'), + users: await getRowCount(dbManager, 'users'), + workspaces: await getRowCount(dbManager, 'workspaces'), + billingAccounts: await getRowCount(dbManager, 'billing_accounts'), + billingAccountManagers: await getRowCount(dbManager, 'billing_account_managers'), + products: await getRowCount(dbManager, 'products') + }; +} diff --git a/test/server/customUtil.ts b/test/server/customUtil.ts new file mode 100644 index 00000000..91a4175a --- /dev/null +++ b/test/server/customUtil.ts @@ -0,0 +1,66 @@ +import {getAppRoot} from 'app/server/lib/places'; +import {fromCallback} from 'bluebird'; +import * as express from 'express'; +import * as http from 'http'; +import {AddressInfo, Socket} from 'net'; +import * as path from 'path'; +import {fixturesRoot} from 'test/server/testUtils'; + +export interface Serving { + url: string; + shutdown: () => void; +} + + +// Adds static files from a directory. +// By default exposes /fixture/sites +export function addStatic(app: express.Express, rootDir?: string) { + // mix in a copy of the plugin api + app.use(/^\/(grist-plugin-api.js)$/, (req, res) => + res.sendFile(req.params[0], {root: + path.resolve(getAppRoot(), "static")})); + app.use(express.static(rootDir || path.resolve(fixturesRoot, "sites"), { + setHeaders: (res) => { + res.set("Access-Control-Allow-Origin", "*"); + } + })); +} + +// Serve from a directory. +export async function serveStatic(rootDir: string): Promise { + return serveSomething(app => addStatic(app, rootDir)); +} + +// Serve a string of html. +export async function serveSinglePage(html: string): Promise { + return serveSomething(app => { + app.get('', (req, res) => res.send(html)); + }); +} + +export function serveCustomViews(): Promise { + return serveStatic(path.resolve(fixturesRoot, "sites")); +} + +export async function serveSomething(setup: (app: express.Express) => void, port= 0): Promise { + const app = express(); + const server = http.createServer(app); + await fromCallback((cb: any) => server.listen(port, cb)); + + const connections = new Set(); + server.on('connection', (conn) => { + connections.add(conn); + conn.on('close', () => connections.delete(conn)); + }); + + function shutdown() { + server.close(); + for (const conn of connections) { conn.destroy(); } + } + + port = (server.address() as AddressInfo).port; + app.set('port', port); + setup(app); + const url = `http://localhost:${port}`; + return {url, shutdown}; +} diff --git a/test/server/docTools.ts b/test/server/docTools.ts new file mode 100644 index 00000000..32d7be52 --- /dev/null +++ b/test/server/docTools.ts @@ -0,0 +1,243 @@ +import {getDocWorkerMap} from 'app/gen-server/lib/DocWorkerMap'; +import {ActiveDoc} from 'app/server/lib/ActiveDoc'; +import {DummyAuthorizer} from 'app/server/lib/Authorizer'; +import {create} from 'app/server/lib/create'; +import {DocManager} from 'app/server/lib/DocManager'; +import {DocSession, makeExceptionalDocSession} from 'app/server/lib/DocSession'; +import {DocStorageManager} from 'app/server/lib/DocStorageManager'; +import {GristServer} from 'app/server/lib/GristServer'; +import {IDocStorageManager} from 'app/server/lib/IDocStorageManager'; +import {getAppRoot} from 'app/server/lib/places'; +import {PluginManager} from 'app/server/lib/PluginManager'; +import {createTmpDir as createTmpUploadDir, FileUploadInfo, globalUploadSet} from 'app/server/lib/uploads'; +import * as testUtils from 'test/server/testUtils'; + +import {assert} from 'chai'; +import * as fse from 'fs-extra'; +import {tmpdir} from 'os'; +import * as path from 'path'; +import * as tmp from 'tmp'; + +tmp.setGracefulCleanup(); + +// it is sometimes useful in debugging to turn off automatic cleanup of docs and workspaces. +const noCleanup = Boolean(process.env.NO_CLEANUP); + +/** + * Use from a test suite to get an object with convenient methods for creating ActiveDocs: + * + * createDoc(docName): creates a new empty document. + * loadFixtureDoc(docName): loads a copy of a fixture document. + * loadDoc(docName): loads a given document, e.g. previously created with createDoc(). + * createFakeSession(): creates a fake DocSession for use when applying user actions. + * + * Also available are accessors for the created "managers": + * getDocManager() + * getStorageManager() + * getPluginManager() + * + * It also takes care of cleaning up any created ActiveDocs. + * @param persistAcrossCases Don't shut down created ActiveDocs between test cases. + * @param useFixturePlugins Use the plugins in `test/fixtures/plugins` + */ +export function createDocTools(options: {persistAcrossCases?: boolean, + useFixturePlugins?: boolean, + storageManager?: IDocStorageManager, + server?: GristServer} = {}) { + let tmpDir: string; + let docManager: DocManager; + + async function doBefore() { + tmpDir = await createTmpDir(); + const pluginManager = options.useFixturePlugins ? await createFixturePluginManager() : undefined; + docManager = await createDocManager({tmpDir, pluginManager, storageManager: options.storageManager, + server: options.server}); + } + + async function doAfter() { + // Clean up at the end of the test suite (in addition to the optional per-test cleanup). + await testUtils.captureLog('info', () => docManager.shutdownAll()); + assert.equal(docManager.numOpenDocs(), 0); + await globalUploadSet.cleanupAll(); + + // Clean up the temp directory. + if (!noCleanup) { + await fse.remove(tmpDir); + } + } + + // Allow using outside of mocha + if (typeof before !== "undefined") { + before(doBefore); + after(doAfter); + + // Check after each test case that all ActiveDocs got shut down. + afterEach(async function() { + if (!options.persistAcrossCases) { + await docManager.shutdownAll(); + assert.equal(docManager.numOpenDocs(), 0); + } + }); + } + + const systemSession = makeExceptionalDocSession('system'); + return { + /** create a fake session for use when applying user actions to a document */ + createFakeSession(): DocSession { + return {client: null, authorizer: new DummyAuthorizer('editors', 'doc')} as any as DocSession; + }, + + /** create a throw-away, empty document for testing purposes */ + async createDoc(docName: string): Promise { + return docManager.createNewEmptyDoc(systemSession, docName); + }, + + /** load a copy of a fixture document for testing purposes */ + async loadFixtureDoc(docName: string): Promise { + const copiedDocName = await testUtils.useFixtureDoc(docName, docManager.storageManager); + return this.loadDoc(copiedDocName); + }, + + /** load a copy of a local document at an arbitrary path on disk for testing purposes */ + async loadLocalDoc(srcPath: string): Promise { + const copiedDocName = await testUtils.useLocalDoc(srcPath, docManager.storageManager); + return this.loadDoc(copiedDocName); + }, + + /** like `loadFixtureDoc`, but lets you rename the document on disk */ + async loadFixtureDocAs(docName: string, alias: string): Promise { + const copiedDocName = await testUtils.useFixtureDoc(docName, docManager.storageManager, alias); + return this.loadDoc(copiedDocName); + }, + + /** Loads a given document, e.g. previously created with createDoc() */ + async loadDoc(docName: string): Promise { + return docManager.fetchDoc(systemSession, docName); + }, + + getDocManager() { return docManager; }, + getStorageManager() { return docManager.storageManager; }, + getPluginManager() { return docManager.pluginManager; }, + + /** Setup that needs to be done before using the tools, typically called by mocha */ + before() { return doBefore(); }, + + /** Teardown that needs to be done after using the tools, typically called by mocha */ + after() { return doAfter(); }, + }; +} + +/** + * Returns a DocManager for tests, complete with a PluginManager and DocStorageManager. + * @param options.pluginManager The PluginManager to use; defaults to using a real global singleton + * that loads built-in modules. + */ +export async function createDocManager( + options: {tmpDir?: string, pluginManager?: PluginManager, + storageManager?: IDocStorageManager, + server?: GristServer} = {}): Promise { + // Set Grist home to a temporary directory, and wipe it out on exit. + const tmpDir = options.tmpDir || await createTmpDir(); + const docStorageManager = options.storageManager || new DocStorageManager(tmpDir); + const pluginManager = options.pluginManager || await getGlobalPluginManager(); + const store = getDocWorkerMap(); + const internalPermitStore = store.getPermitStore('1'); + const externalPermitStore = store.getPermitStore('2'); + return new DocManager(docStorageManager, pluginManager, null, options.server || { + ...createDummyGristServer(), + getPermitStore() { return internalPermitStore; }, + getExternalPermitStore() { return externalPermitStore; }, + getStorageManager() { return docStorageManager; }, + }); +} + +export function createDummyGristServer(): GristServer { + return { + create, + getHost() { return 'localhost:4242'; }, + getHomeUrl() { return 'http://localhost:4242'; }, + getHomeUrlByDocId() { return Promise.resolve('http://localhost:4242'); }, + getMergedOrgUrl() { return 'http://localhost:4242'; }, + getOwnUrl() { return 'http://localhost:4242'; }, + getPermitStore() { throw new Error('no permit store'); }, + getExternalPermitStore() { throw new Error('no external permit store'); }, + getGristConfig() { return { homeUrl: '', timestampMs: 0 }; }, + getOrgUrl() { return Promise.resolve(''); }, + getResourceUrl() { return Promise.resolve(''); }, + getSessions() { throw new Error('no sessions'); }, + getComm() { throw new Error('no comms'); }, + getHosts() { throw new Error('no hosts'); }, + getHomeDBManager() { throw new Error('no db'); }, + getStorageManager() { throw new Error('no storage manager'); }, + getNotifier() { throw new Error('no notifier'); }, + getDocTemplate() { throw new Error('no doc template'); }, + getTag() { return 'tag'; }, + sendAppPage() { return Promise.resolve(); }, + }; +} + +export async function createTmpDir(): Promise { + const tmpRootDir = process.env.TESTDIR || tmpdir(); + await fse.mkdirs(tmpRootDir); + return fse.realpath(await tmp.dirAsync({ + dir: tmpRootDir, + prefix: 'grist_test_', + unsafeCleanup: true, + keep: noCleanup, + })); +} + +/** + * Creates a file with the given name (and simple dummy content) in dirPath, and returns + * FileUploadInfo for it. + */ +export async function createFile(dirPath: string, name: string): Promise { + const absPath = path.join(dirPath, name); + await fse.outputFile(absPath, `${name}:${name}\n`); + return { + absPath, + origName: name, + size: (await fse.stat(absPath)).size, + ext: path.extname(name), + }; +} + +/** + * Creates an upload with the given filenames (containg simple dummy content), in the + * globalUploadSet, and returns its uploadId. The upload is registered with the given accessId + * (userId), and the same id must be used to retrieve it. + */ +export async function createUpload(fileNames: string[], accessId: string|null): Promise { + const {tmpDir, cleanupCallback} = await createTmpUploadDir({}); + const files = await Promise.all(fileNames.map((name) => createFile(tmpDir, name))); + return globalUploadSet.registerUpload(files, tmpDir, cleanupCallback, accessId); +} + + +let _globalPluginManager: PluginManager|null = null; + +// Helper to create a singleton PluginManager. This includes loading built-in plugins. Since most +// tests don't make any use of it, it's fine to reuse a single one. For tests that need a custom +// one, pass one into createDocManager(). +export async function getGlobalPluginManager(): Promise { + if (!_globalPluginManager) { + const appRoot = getAppRoot(); + _globalPluginManager = new PluginManager(appRoot); + await _globalPluginManager.initialize(); + } + return _globalPluginManager; +} + +// Path to the folder where builtIn plugins leave in test/fixtures +export const builtInFolder = path.join(testUtils.fixturesRoot, 'plugins/builtInPlugins'); + +// Path to the folder where installed plugins leave in test/fixtures +export const installedFolder = path.join(testUtils.fixturesRoot, 'plugins/installedPlugins'); + +// Creates a plugin manager which loads the plugins in `test/fixtures/plugins` +async function createFixturePluginManager() { + const p = new PluginManager(builtInFolder, installedFolder); + p.appRoot = getAppRoot(); + await p.initialize(); + return p; +} diff --git a/test/server/gristClient.ts b/test/server/gristClient.ts new file mode 100644 index 00000000..324a803a --- /dev/null +++ b/test/server/gristClient.ts @@ -0,0 +1,167 @@ +import { DocAction } from 'app/common/DocActions'; +import { FlexServer } from 'app/server/lib/FlexServer'; +import axios from 'axios'; +import pick = require('lodash/pick'); +import * as WebSocket from 'ws'; + +interface GristRequest { + reqId: number; + method: string; + args: any[]; +} + +interface GristResponse { + reqId: number; + error?: string; + errorCode?: string; + data?: any; +} + +interface GristMessage { + type: 'clientConnect' | 'docUserAction'; + docFD: number; + data: any; +} + +export class GristClient { + public messages: GristMessage[] = []; + + private _requestId: number = 0; + private _pending: Array = []; + private _consumer: () => void; + private _ignoreTrivialActions: boolean = false; + + constructor(public ws: any) { + ws.onmessage = (data: any) => { + const msg = pick(JSON.parse(data.data), + ['reqId', 'error', 'errorCode', 'data', 'type', 'docFD']); + if (this._ignoreTrivialActions && msg.type === 'docUserAction' && + msg.data?.actionGroup?.internal === true && + msg.data?.docActions?.length === 0) { + return; + } + this._pending.push(msg); + if (this._consumer) { this._consumer(); } + }; + } + + // After a document is opened, the sandbox recomputes its formulas and sends any changes. + // The client will receive an update even if there are no changes. This may be useful in + // the future to know that the document is up to date. But for testing, this asynchronous + // message can be awkward. Call this method to ignore it. + public ignoreTrivialActions() { + this._ignoreTrivialActions = true; + } + + public flush() { + this._pending = []; + } + + public shift() { + return this._pending.shift(); + } + + public count() { + return this._pending.length; + } + + public async read(): Promise { + for (;;) { + if (this._pending.length) { + return this._pending.shift(); + } + await new Promise(resolve => this._consumer = resolve); + } + } + + public async readMessage(): Promise { + const result = await this.read(); + if (!result.type) { + throw new Error(`message looks wrong: ${JSON.stringify(result)}`); + } + return result; + } + + public async readResponse(): Promise { + this.messages = []; + for (;;) { + const result = await this.read(); + if (result.reqId === undefined) { + this.messages.push(result); + continue; + } + if (result.reqId !== this._requestId) { + throw new Error("unexpected request id"); + } + return result; + } + } + + // Helper to read the next docUserAction ignoring anything else (e.g. a duplicate clientConnect). + public async readDocUserAction(): Promise { + while (true) { // eslint-disable-line no-constant-condition + const msg = await this.readMessage(); + if (msg.type === 'docUserAction') { + return msg.data.docActions; + } + } + } + + public async send(method: string, ...args: any[]): Promise { + const p = this.readResponse(); + this._requestId++; + const req: GristRequest = { + reqId: this._requestId, + method, + args + }; + this.ws.send(JSON.stringify(req)); + const result = await p; + return result; + } + + public async close() { + this.ws.terminate(); + this.ws.close(); + } + + public async openDocOnConnect(docId: string) { + const msg = await this.readMessage(); + if (msg.type !== 'clientConnect') { throw new Error('expected clientConnect'); } + const openDoc = await this.send('openDoc', docId); + if (openDoc.error) { throw new Error('error in openDocOnConnect'); } + return openDoc; + } +} + +export async function openClient(server: FlexServer, email: string, org: string, + emailHeader?: string): Promise { + const headers: Record = {}; + if (!emailHeader) { + const resp = await axios.get(`${server.getOwnUrl()}/test/session`); + const cookie = resp.headers['set-cookie'][0]; + if (email !== 'anon@getgrist.com') { + const cid = decodeURIComponent(cookie.split('=')[1].split(';')[0]); + const comm = server.getComm(); + const sessionId = comm.getSessionIdFromCookie(cid); + const scopedSession = comm.getOrCreateSession(sessionId, {org}); + const profile = { email, email_verified: true, name: "Someone" }; + await scopedSession.updateUserProfile({} as any, profile); + } + headers.Cookie = cookie; + } else { + headers[emailHeader] = email; + } + const ws = new WebSocket('ws://localhost:' + server.getOwnPort() + `/o/${org}`, { + headers + }); + await new Promise(function(resolve, reject) { + ws.on('open', function() { + resolve(ws); + }); + ws.on('error', function(err: any) { + reject(err); + }); + }); + return new GristClient(ws); +} diff --git a/test/server/lib/Authorizer.ts b/test/server/lib/Authorizer.ts new file mode 100644 index 00000000..6c4af796 --- /dev/null +++ b/test/server/lib/Authorizer.ts @@ -0,0 +1,305 @@ +import {parseUrlId} from 'app/common/gristUrls'; +import {HomeDBManager} from 'app/gen-server/lib/HomeDBManager'; +import {DocManager} from 'app/server/lib/DocManager'; +import {FlexServer} from 'app/server/lib/FlexServer'; +import axios from 'axios'; +import {assert} from 'chai'; +import {toPairs} from 'lodash'; +import {createInitialDb, removeConnection, setUpDB} from 'test/gen-server/seed'; +import {configForUser, getGristConfig} from 'test/gen-server/testUtils'; +import {createDocTools} from 'test/server/docTools'; +import {openClient} from 'test/server/gristClient'; +import * as testUtils from 'test/server/testUtils'; +import * as uuidv4 from 'uuid/v4'; + +let serverUrl: string; +let server: FlexServer; +let dbManager: HomeDBManager; + +async function activateServer(home: FlexServer, docManager: DocManager) { + await home.initHomeDBManager(); + home.addHosts(); + home.addDocWorkerMap(); + home.addAccessMiddleware(); + dbManager = home.getHomeDBManager(); + await home.loadConfig({}); + home.addSessions(); + home.addHealthCheck(); + docManager.testSetHomeDbManager(dbManager); + home.testSetDocManager(docManager); + await home.start(); + home.addAccessMiddleware(); + home.addApiMiddleware(); + home.addJsonSupport(); + await home.addLandingPages(); + home.addHomeApi(); + await home.addDoc(); + home.addApiErrorHandlers(); + serverUrl = home.getOwnUrl(); +} + +const chimpy = configForUser('Chimpy'); +const charon = configForUser('Charon'); + +const fixtures: {[docName: string]: string|null} = { + Bananas: 'Hello.grist', + Pluto: 'Hello.grist', +}; + +describe('Authorizer', function() { + + testUtils.setTmpLogLevel('fatal'); + + server = new FlexServer(0, 'test docWorker'); + const docTools = createDocTools({persistAcrossCases: true, useFixturePlugins: false, + server}); + const docs: {[name: string]: {id: string}} = {}; + + // Loads the fixtures documents so that they are available to the doc worker under the correct + // names. + async function loadFixtureDocs() { + for (const [docName, fixtureDoc] of toPairs(fixtures)) { + const docId = String(await dbManager.testGetId(docName)); + if (fixtureDoc) { + await docTools.loadFixtureDocAs(fixtureDoc, docId); + } else { + await docTools.createDoc(docId); + } + docs[docName] = {id: docId}; + } + } + + let oldEnv: testUtils.EnvironmentSnapshot; + before(async function() { + this.timeout(5000); + setUpDB(this); + oldEnv = new testUtils.EnvironmentSnapshot(); + process.env.GRIST_PROXY_AUTH_HEADER = 'X-email'; + await createInitialDb(); + await activateServer(server, docTools.getDocManager()); + await loadFixtureDocs(); + }); + + after(async function() { + const messages = await testUtils.captureLog('warn', async () => { + await server.close(); + await removeConnection(); + }); + assert.lengthOf(messages, 0); + oldEnv.restore(); + }); + + // TODO XXX Is it safe to remove this support now? + // (It used to be implemented in getDocAccessInfo() in Authorizer.ts). + it.skip("viewer gets redirect by title", async function() { + const resp = await axios.get(`${serverUrl}/o/pr/doc/Bananas`, chimpy); + assert.equal(resp.status, 200); + assert.equal(getGristConfig(resp.data).assignmentId, 'sample_6'); + assert.match(resp.request.res.responseUrl, /\/doc\/sample_6$/); + const resp2 = await axios.get(`${serverUrl}/o/nasa/doc/Pluto`, chimpy); + assert.equal(resp2.status, 200); + assert.equal(getGristConfig(resp2.data).assignmentId, 'sample_2'); + assert.match(resp2.request.res.responseUrl, /\/doc\/sample_2$/); + }); + + it("stranger gets consistent refusal regardless of title", async function() { + const resp = await axios.get(`${serverUrl}/o/pr/doc/Bananas`, charon); + assert.equal(resp.status, 404); + assert.notMatch(resp.data, /sample_6/); + const resp2 = await axios.get(`${serverUrl}/o/pr/doc/Bananas2`, charon); + assert.equal(resp2.status, 404); + assert.notMatch(resp.data, /sample_6/); + assert.deepEqual(resp.data, resp2.data); + }); + + it("viewer can access title", async function() { + const resp = await axios.get(`${serverUrl}/o/pr/doc/sample_6`, chimpy); + assert.equal(resp.status, 200); + const config = getGristConfig(resp.data); + assert.equal(config.getDoc![config.assignmentId!].name, 'Bananas'); + }); + + it("stranger cannot access title", async function() { + const resp = await axios.get(`${serverUrl}/o/pr/doc/sample_6`, charon); + assert.equal(resp.status, 403); + assert.notMatch(resp.data, /Bananas/); + }); + + it("viewer cannot access document from wrong org", async function() { + const resp = await axios.get(`${serverUrl}/o/nasa/doc/sample_6`, chimpy); + assert.equal(resp.status, 404); + }); + + it("websocket allows openDoc for viewer", async function() { + const cli = await openClient(server, 'chimpy@getgrist.com', 'pr'); + cli.ignoreTrivialActions(); + assert.equal((await cli.readMessage()).type, 'clientConnect'); + const openDoc = await cli.send("openDoc", "sample_6"); + assert.equal(openDoc.error, undefined); + assert.match(JSON.stringify(openDoc.data), /Table1/); + await cli.close(); + }); + + it("websocket forbids openDoc for stranger", async function() { + const cli = await openClient(server, 'charon@getgrist.com', 'pr'); + cli.ignoreTrivialActions(); + assert.equal((await cli.readMessage()).type, 'clientConnect'); + const openDoc = await cli.send("openDoc", "sample_6"); + assert.match(openDoc.error!, /No view access/); + assert.equal(openDoc.data, undefined); + assert.match(openDoc.errorCode!, /AUTH_NO_VIEW/); + await cli.close(); + }); + + it("websocket forbids applyUserActions for viewer", async function() { + const cli = await openClient(server, 'charon@getgrist.com', 'nasa'); + cli.ignoreTrivialActions(); + assert.equal((await cli.readMessage()).type, 'clientConnect'); + const openDoc = await cli.openDocOnConnect("sample_2"); + assert.equal(openDoc.error, undefined); + const nonce = uuidv4(); + const applyUserActions = await cli.send("applyUserActions", + 0, + [["UpdateRecord", "Table1", 1, {A: nonce}], {}]); + assert.lengthOf(cli.messages, 0); // no user actions pushed to client + assert.match(applyUserActions.error!, /No write access/); + assert.match(applyUserActions.errorCode!, /AUTH_NO_EDIT/); + const fetchTable = await cli.send("fetchTable", 0, "Table1"); + assert.equal(fetchTable.error, undefined); + assert.notInclude(JSON.stringify(fetchTable.data), nonce); + await cli.close(); + }); + + it("websocket allows applyUserActions for editor", async function() { + const cli = await openClient(server, 'chimpy@getgrist.com', 'nasa'); + cli.ignoreTrivialActions(); + assert.equal((await cli.readMessage()).type, 'clientConnect'); + const openDoc = await cli.openDocOnConnect("sample_2"); + assert.equal(openDoc.error, undefined); + const nonce = uuidv4(); + const applyUserActions = await cli.send("applyUserActions", + 0, + [["UpdateRecord", "Table1", 1, {A: nonce}]]); + assert.lengthOf(cli.messages, 1); // user actions pushed to client + assert.equal(applyUserActions.error, undefined); + const fetchTable = await cli.send("fetchTable", 0, "Table1"); + assert.equal(fetchTable.error, undefined); + assert.include(JSON.stringify(fetchTable.data), nonce); + await cli.close(); + }); + + it("can keep different simultaneous clients of a doc straight", async function() { + const editor = await openClient(server, 'chimpy@getgrist.com', 'nasa'); + assert.equal((await editor.readMessage()).type, 'clientConnect'); + const viewer = await openClient(server, 'charon@getgrist.com', 'nasa'); + assert.equal((await viewer.readMessage()).type, 'clientConnect'); + const stranger = await openClient(server, 'kiwi@getgrist.com', 'nasa'); + assert.equal((await stranger.readMessage()).type, 'clientConnect'); + + editor.ignoreTrivialActions(); + viewer.ignoreTrivialActions(); + stranger.ignoreTrivialActions(); + assert.equal((await editor.send("openDoc", "sample_2")).error, undefined); + assert.equal((await viewer.send("openDoc", "sample_2")).error, undefined); + assert.match((await stranger.send("openDoc", "sample_2")).error!, /No view access/); + + const action = [0, [["UpdateRecord", "Table1", 1, {A: "foo"}]]]; + assert.equal((await editor.send("applyUserActions", ...action)).error, undefined); + assert.match((await viewer.send("applyUserActions", ...action)).error!, /No write access/); + // Different message here because sending actions without a doc being open. + assert.match((await stranger.send("applyUserActions", ...action)).error!, /Invalid/); + }); + + it("previewer has view access to docs", async function() { + const cli = await openClient(server, 'thumbnail@getgrist.com', 'nasa'); + cli.ignoreTrivialActions(); + assert.equal((await cli.readMessage()).type, 'clientConnect'); + const openDoc = await cli.send("openDoc", "sample_2"); + assert.equal(openDoc.error, undefined); + const nonce = uuidv4(); + const applyUserActions = await cli.send("applyUserActions", + 0, + [["UpdateRecord", "Table1", 1, {A: nonce}], {}]); + assert.lengthOf(cli.messages, 0); // no user actions pushed to client + assert.match(applyUserActions.error!, /No write access/); + assert.match(applyUserActions.errorCode!, /AUTH_NO_EDIT/); + const fetchTable = await cli.send("fetchTable", 0, "Table1"); + assert.equal(fetchTable.error, undefined); + assert.notInclude(JSON.stringify(fetchTable.data), nonce); + await cli.close(); + }); + + it("viewer can fork doc", async function() { + const cli = await openClient(server, 'charon@getgrist.com', 'nasa'); + cli.ignoreTrivialActions(); + assert.equal((await cli.readMessage()).type, 'clientConnect'); + const openDoc = await cli.send("openDoc", "sample_2"); + assert.equal(openDoc.error, undefined); + const result = await cli.send("fork", 0); + assert.equal(result.data.docId, result.data.urlId); + const parts = parseUrlId(result.data.docId); + assert.equal(parts.trunkId, "sample_2"); + assert.isAbove(parts.forkId!.length, 4); + assert.equal(parts.forkUserId, await dbManager.testGetId('Charon') as number); + }); + + it("anon can fork doc", async function() { + // anon does not have access to doc initially + const cli = await openClient(server, 'anon@getgrist.com', 'nasa'); + cli.ignoreTrivialActions(); + assert.equal((await cli.readMessage()).type, 'clientConnect'); + let openDoc = await cli.send("openDoc", "sample_2"); + assert.match(openDoc.error!, /No view access/); + + // grant anon access to doc and retry + await dbManager.updateDocPermissions({ + userId: await dbManager.testGetId('Chimpy') as number, + urlId: 'sample_2', + org: 'nasa' + }, {users: {"anon@getgrist.com": "viewers"}}); + dbManager.flushDocAuthCache(); + openDoc = await cli.send("openDoc", "sample_2"); + assert.equal(openDoc.error, undefined); + + // make a fork + const result = await cli.send("fork", 0); + assert.equal(result.data.docId, result.data.urlId); + const parts = parseUrlId(result.data.docId); + assert.equal(parts.trunkId, "sample_2"); + assert.isAbove(parts.forkId!.length, 4); + assert.equal(parts.forkUserId, undefined); + }); + + it("can set user via GRIST_PROXY_AUTH_HEADER", async function() { + // User can access a doc by setting header. + const docUrl = `${serverUrl}/o/pr/api/docs/sample_6`; + const resp = await axios.get(docUrl, { + headers: {'X-email': 'chimpy@getgrist.com'} + }); + assert.equal(resp.data.name, 'Bananas'); + + // Unknown user is denied. + await assert.isRejected(axios.get(docUrl, { + headers: {'X-email': 'notchimpy@getgrist.com'} + })); + + // User can access a doc via websocket by setting header. + let cli = await openClient(server, 'chimpy@getgrist.com', 'pr', 'X-email'); + cli.ignoreTrivialActions(); + assert.equal((await cli.readMessage()).type, 'clientConnect'); + let openDoc = await cli.send("openDoc", "sample_6"); + assert.equal(openDoc.error, undefined); + assert.match(JSON.stringify(openDoc.data), /Table1/); + await cli.close(); + + // Unknown user is denied. + cli = await openClient(server, 'notchimpy@getgrist.com', 'pr', 'X-email'); + cli.ignoreTrivialActions(); + assert.equal((await cli.readMessage()).type, 'clientConnect'); + openDoc = await cli.send("openDoc", "sample_6"); + assert.match(openDoc.error!, /No view access/); + assert.equal(openDoc.data, undefined); + assert.match(openDoc.errorCode!, /AUTH_NO_VIEW/); + await cli.close(); + }); +}); diff --git a/test/server/lib/DocApi.ts b/test/server/lib/DocApi.ts new file mode 100644 index 00000000..48e80b66 --- /dev/null +++ b/test/server/lib/DocApi.ts @@ -0,0 +1,2589 @@ +import {ActionSummary} from 'app/common/ActionSummary'; +import {BulkColValues} from 'app/common/DocActions'; +import {arrayRepeat} from 'app/common/gutil'; +import {DocState, UserAPIImpl} from 'app/common/UserAPI'; +import {AddOrUpdateRecord} from 'app/plugin/DocApiTypes'; +import {teamFreeFeatures} from 'app/gen-server/entity/Product'; +import {CellValue, GristObjCode} from 'app/plugin/GristData'; +import {applyQueryParameters, docDailyApiUsageKey} from 'app/server/lib/DocApi'; +import * as log from 'app/server/lib/log'; +import {exitPromise} from 'app/server/lib/serverUtils'; +import {connectTestingHooks, TestingHooksClient} from 'app/server/lib/TestingHooks'; +import axios, {AxiosResponse} from 'axios'; +import {delay} from 'bluebird'; +import * as bodyParser from 'body-parser'; +import {assert} from 'chai'; +import {ChildProcess, execFileSync, spawn} from 'child_process'; +import * as FormData from 'form-data'; +import * as fse from 'fs-extra'; +import * as _ from 'lodash'; +import fetch from 'node-fetch'; +import {tmpdir} from 'os'; +import * as path from 'path'; +import {createClient, RedisClient} from 'redis'; +import {configForUser} from 'test/gen-server/testUtils'; +import {serveSomething, Serving} from 'test/server/customUtil'; +import * as testUtils from 'test/server/testUtils'; +import clone = require('lodash/clone'); +import defaultsDeep = require('lodash/defaultsDeep'); + +const chimpy = configForUser('Chimpy'); +const kiwi = configForUser('Kiwi'); +const charon = configForUser('Charon'); +const nobody = configForUser('Anonymous'); +const support = configForUser('support'); + +// some doc ids +const docIds: {[name: string]: string} = { + ApiDataRecordsTest: 'sample_7', + Timesheets: 'sample_13', + Bananas: 'sample_6', + Antartic: 'sample_11' +}; + +// A testDir of the form grist_test_{USER}_{SERVER_NAME} +const username = process.env.USER || "nobody"; +const tmpDir = path.join(tmpdir(), `grist_test_${username}_docapi`); + +let dataDir: string; +let suitename: string; +let serverUrl: string; +let homeUrl: string; +let hasHomeApi: boolean; +let home: TestServer; +let docs: TestServer; +let userApi: UserAPIImpl; + +describe('DocApi', function() { + this.timeout(20000); + testUtils.setTmpLogLevel('error'); + const oldEnv = clone(process.env); + + before(async function() { + // Create the tmp dir removing any previous one + await fse.remove(tmpDir); + await fse.mkdirs(tmpDir); + log.warn(`Test logs and data are at: ${tmpDir}/`); + + // Let's create a sqlite db that we can share with servers that run in other processes, hence + // not an in-memory db. Running seed.ts directly might not take in account the most recent value + // for TYPEORM_DATABASE, because ormconfig.js may already have been loaded with a different + // configuration (in-memory for instance). Spawning a process is one way to make sure that the + // latest value prevail. + process.env.TYPEORM_DATABASE = path.join(tmpDir, 'landing.db'); + const seed = await testUtils.getBuildFile('test/gen-server/seed.js'); + execFileSync('node', [seed, 'init'], { + env: process.env, + stdio: 'inherit' + }); + }); + + after(() => { + Object.assign(process.env, oldEnv); + }); + + /** + * Doc api tests are run against three different setup: + * - a merged server: a single server serving both as a home and doc worker + * - two separated servers: requests are sent to a home server which then forward them to a doc worker + * - a doc worker: request are sent directly to the doc worker (note that even though it is not + * used for testing we starts anyway a home server, needed for setting up the test cases) + * + * Future tests must be added within the testDocApi() function. + */ + + describe("should work with a merged server", async () => { + setup('merged', async () => { + home = docs = await startServer('home,docs'); + homeUrl = serverUrl = home.serverUrl; + hasHomeApi = true; + }); + testDocApi(); + }); + + // the way these tests are written, non-merged server requires redis. + if (process.env.TEST_REDIS_URL) { + describe("should work with a home server and a docworker", async () => { + setup('separated', async () => { + home = await startServer('home'); + docs = await startServer('docs', home.serverUrl); + homeUrl = serverUrl = home.serverUrl; + hasHomeApi = true; + }); + testDocApi(); + }); + + describe("should work directly with a docworker", async () => { + setup('docs', async () => { + home = await startServer('home'); + docs = await startServer('docs', home.serverUrl); + homeUrl = home.serverUrl; + serverUrl = docs.serverUrl; + hasHomeApi = false; + }); + testDocApi(); + }); + } + + describe("QueryParameters", async () => { + + function makeExample() { + return { + id: [ 1, 2, 3, 7, 8, 9 ], + color: ['red', 'yellow', 'white', 'blue', 'black', 'purple'], + spin: [ 'up', 'up', 'down', 'down', 'up', 'up'], + }; + } + + it("supports ascending sort", async function() { + assert.deepEqual(applyQueryParameters(makeExample(), {sort: ['color']}, null), { + id: [8, 7, 9, 1, 3, 2], + color: ['black', 'blue', 'purple', 'red', 'white', 'yellow'], + spin: ['up', 'down', 'up', 'up', 'down', 'up'] + }); + }); + + it("supports descending sort", async function() { + assert.deepEqual(applyQueryParameters(makeExample(), {sort: ['-id']}, null), { + id: [9, 8, 7, 3, 2, 1], + color: ['purple', 'black', 'blue', 'white', 'yellow', 'red'], + spin: ['up', 'up', 'down', 'down', 'up', 'up'], + }); + }); + + it("supports multi-key sort", async function() { + assert.deepEqual(applyQueryParameters(makeExample(), {sort: ['-spin', 'color']}, null), { + id: [8, 9, 1, 2, 7, 3], + color: ['black', 'purple', 'red', 'yellow', 'blue', 'white'], + spin: ['up', 'up', 'up', 'up', 'down', 'down'], + }); + }); + + it("does not freak out sorting mixed data", async function() { + const example = { + id: [ 1, 2, 3, 4, 5, 6, 7, 8, 9], + mixed: ['red', 'green', 'white', 2.5, 1, null, ['zing', 3] as any, 5, 'blue'] + }; + assert.deepEqual(applyQueryParameters(example, {sort: ['mixed']}, null), { + mixed: [1, 2.5, 5, null, ['zing', 3] as any, 'blue', 'green', 'red', 'white'], + id: [5, 4, 8, 6, 7, 9, 2, 1, 3], + }); + }); + + it("supports limit", async function() { + assert.deepEqual(applyQueryParameters(makeExample(), {limit: 1}), + { id: [1], color: ['red'], spin: ['up'] }); + }); + + it("supports sort and limit", async function() { + assert.deepEqual(applyQueryParameters(makeExample(), {sort: ['-color'], limit: 2}, null), + { id: [2, 3], color: ['yellow', 'white'], spin: ['up', 'down'] }); + }); + }); +}); + +// Contains the tests. This is where you want to add more test. +function testDocApi() { + + it("guesses types of new columns", async () => { + const userActions = [ + ['AddTable', 'GuessTypes', []], + // Make 5 blank columns of type Any + ['AddColumn', 'GuessTypes', 'Date', {}], + ['AddColumn', 'GuessTypes', 'DateTime', {}], + ['AddColumn', 'GuessTypes', 'Bool', {}], + ['AddColumn', 'GuessTypes', 'Numeric', {}], + ['AddColumn', 'GuessTypes', 'Text', {}], + // Add string values from which the initial type will be guessed + ['AddRecord', 'GuessTypes', null, { + Date: "1970-01-02", + DateTime: "1970-01-02 12:00", + Bool: "true", + Numeric: "1.2", + Text: "hello", + }], + ]; + const resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/apply`, userActions, chimpy); + assert.equal(resp.status, 200); + + // Check that the strings were parsed to typed values + assert.deepEqual( + (await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/GuessTypes/records`, chimpy)).data, + { + records: [ + { + id: 1, + fields: { + Date: 24 * 60 * 60, + DateTime: 36 * 60 * 60, + Bool: true, + Numeric: 1.2, + Text: "hello", + }, + }, + ], + }, + ); + + // Check the column types + assert.deepEqual( + (await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/GuessTypes/columns`, chimpy)) + .data.columns.map((col: any) => col.fields.type), + ["Date", "DateTime:UTC", "Bool", "Numeric", "Text"], + ); + }); + + for (const mode of ['logged in', 'anonymous']) { + for (const content of ['with content', 'without content']) { + it(`POST /api/docs ${content} creates an unsaved doc when ${mode}`, async function() { + const user = (mode === 'logged in') ? chimpy : nobody; + const formData = new FormData(); + formData.append('upload', 'A,B\n1,2\n3,4\n', 'table1.csv'); + const config = defaultsDeep({headers: formData.getHeaders()}, user); + let resp = await axios.post(`${serverUrl}/api/docs`, + ...(content === 'with content' ? [formData, config] : [null, user])); + assert.equal(resp.status, 200); + const urlId = resp.data; + if (mode === 'logged in') { + assert.match(urlId, /^new~[^~]*~[0-9]+$/); + } else { + assert.match(urlId, /^new~[^~]*$/); + } + + // Access information about that document should be sane for current user + resp = await axios.get(`${homeUrl}/api/docs/${urlId}`, user); + assert.equal(resp.status, 200); + assert.equal(resp.data.name, 'Untitled'); + assert.equal(resp.data.workspace.name, 'Examples & Templates'); + assert.equal(resp.data.access, 'owners'); + if (mode === 'anonymous') { + resp = await axios.get(`${homeUrl}/api/docs/${urlId}`, chimpy); + assert.equal(resp.data.access, 'owners'); + } else { + resp = await axios.get(`${homeUrl}/api/docs/${urlId}`, charon); + assert.equal(resp.status, 403); + resp = await axios.get(`${homeUrl}/api/docs/${urlId}`, nobody); + assert.equal(resp.status, 403); + } + + // content was successfully stored + resp = await axios.get(`${serverUrl}/api/docs/${urlId}/tables/Table1/data`, user); + if (content === 'with content') { + assert.deepEqual(resp.data, { id: [ 1, 2 ], manualSort: [ 1, 2 ], A: [ 1, 3 ], B: [ 2, 4 ] }); + } else { + assert.deepEqual(resp.data, { id: [], manualSort: [], A: [], B: [], C: [] }); + } + }); + } + } + + it("GET /docs/{did}/tables/{tid}/data retrieves data in column format", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/tables/Table1/data`, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, { + id: [1, 2, 3, 4], + A: ['hello', '', '', ''], + B: ['', 'world', '', ''], + C: ['', '', '', ''], + D: [null, null, null, null], + E: ['HELLO', '', '', ''], + manualSort: [1, 2, 3, 4] + }); + }); + + it("GET /docs/{did}/tables/{tid}/records retrieves data in records format", async function () { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/tables/Table1/records`, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, + { + records: + [ + { + id: 1, + fields: { + A: 'hello', + B: '', + C: '', + D: null, + E: 'HELLO', + }, + }, + { + id: 2, + fields: { + A: '', + B: 'world', + C: '', + D: null, + E: '', + }, + }, + { + id: 3, + fields: { + A: '', + B: '', + C: '', + D: null, + E: '', + }, + }, + { + id: 4, + fields: { + A: '', + B: '', + C: '', + D: null, + E: '', + }, + }, + ] + }); + }); + + it("GET /docs/{did}/tables/{tid}/records handles errors and hidden columns", async function () { + let resp = await axios.get(`${serverUrl}/api/docs/${docIds.ApiDataRecordsTest}/tables/Table1/records`, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, + { + "records": [ + { + "id": 1, + "fields": { + "A": null, + "B": "Hi", + "C": 1, + }, + "errors": { + "A": "ZeroDivisionError" + } + } + ] + } + ); + + // /data format for comparison: includes manualSort, gristHelper_Display, and ["E", "ZeroDivisionError"] + resp = await axios.get(`${serverUrl}/api/docs/${docIds.ApiDataRecordsTest}/tables/Table1/data`, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, + { + "id": [ + 1 + ], + "manualSort": [ + 1 + ], + "A": [ + [ + "E", + "ZeroDivisionError" + ] + ], + "B": [ + "Hi" + ], + "C": [ + 1 + ], + "gristHelper_Display": [ + "Hi" + ] + } + ); + }); + + it("GET /docs/{did}/tables/{tid}/columns retrieves columns", async function () { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/tables/Table1/columns`, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, + { + columns: [ + { + id: 'A', + fields: { + colRef: 2, + parentId: 1, + parentPos: 1, + type: 'Text', + widgetOptions: '', + isFormula: false, + formula: '', + label: 'A', + untieColIdFromLabel: false, + summarySourceCol: 0, + displayCol: 0, + visibleCol: 0, + rules: null, + recalcWhen: 0, + recalcDeps: null + } + }, + { + id: 'B', + fields: { + colRef: 3, + parentId: 1, + parentPos: 2, + type: 'Text', + widgetOptions: '', + isFormula: false, + formula: '', + label: 'B', + untieColIdFromLabel: false, + summarySourceCol: 0, + displayCol: 0, + visibleCol: 0, + rules: null, + recalcWhen: 0, + recalcDeps: null + } + }, + { + id: 'C', + fields: { + colRef: 4, + parentId: 1, + parentPos: 3, + type: 'Text', + widgetOptions: '', + isFormula: false, + formula: '', + label: 'C', + untieColIdFromLabel: false, + summarySourceCol: 0, + displayCol: 0, + visibleCol: 0, + rules: null, + recalcWhen: 0, + recalcDeps: null + } + }, + { + id: 'D', + fields: { + colRef: 5, + parentId: 1, + parentPos: 3, + type: 'Any', + widgetOptions: '', + isFormula: true, + formula: '', + label: 'D', + untieColIdFromLabel: false, + summarySourceCol: 0, + displayCol: 0, + visibleCol: 0, + rules: null, + recalcWhen: 0, + recalcDeps: null + } + }, + { + id: 'E', + fields: { + colRef: 6, + parentId: 1, + parentPos: 4, + type: 'Any', + widgetOptions: '', + isFormula: true, + formula: '$A.upper()', + label: 'E', + untieColIdFromLabel: false, + summarySourceCol: 0, + displayCol: 0, + visibleCol: 0, + rules: null, + recalcWhen: 0, + recalcDeps: null + } + } + ] + } + ); + }); + + it("GET /docs/{did}/tables/{tid}/data returns 404 for non-existent doc", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/typotypotypo/tables/Table1/data`, chimpy); + assert.equal(resp.status, 404); + assert.match(resp.data.error, /document not found/i); + }); + + it("GET /docs/{did}/tables/{tid}/data returns 404 for non-existent table", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/tables/Typo1/data`, chimpy); + assert.equal(resp.status, 404); + assert.match(resp.data.error, /table not found/i); + }); + + it("GET /docs/{did}/tables/{tid}/columns returns 404 for non-existent doc", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/typotypotypo/tables/Table1/data`, chimpy); + assert.equal(resp.status, 404); + assert.match(resp.data.error, /document not found/i); + }); + + it("GET /docs/{did}/tables/{tid}/columns returns 404 for non-existent table", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/tables/Typo1/data`, chimpy); + assert.equal(resp.status, 404); + assert.match(resp.data.error, /table not found/i); + }); + + it("GET /docs/{did}/tables/{tid}/data supports filters", async function() { + function makeQuery(filters: {[colId: string]: any[]}) { + const query = "filter=" + encodeURIComponent(JSON.stringify(filters)); + return axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/tables/Table1/data?${query}`, chimpy); + } + function checkResults(resp: AxiosResponse, expectedData: any) { + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, expectedData); + } + + checkResults(await makeQuery({B: ['world']}), { + id: [2], A: [''], B: ['world'], C: [''], D: [null], E: [''], manualSort: [2], + }); + + // Can query by id + checkResults(await makeQuery({id: [1]}), { + id: [1], A: ['hello'], B: [''], C: [''], D: [null], E: ['HELLO'], manualSort: [1], + }); + + checkResults(await makeQuery({B: [''], A: ['']}), { + id: [3, 4], A: ['', ''], B: ['', ''], C: ['', ''], D: [null, null], E: ['', ''], manualSort: [3, 4], + }); + + // Empty filter is equivalent to no filter and should return full data. + checkResults(await makeQuery({}), { + id: [1, 2, 3, 4], + A: ['hello', '', '', ''], + B: ['', 'world', '', ''], + C: ['', '', '', ''], + D: [null, null, null, null], + E: ['HELLO', '', '', ''], + manualSort: [1, 2, 3, 4] + }); + + // An impossible filter should succeed but return an empty set of rows. + checkResults(await makeQuery({B: ['world'], C: ['Neptune']}), { + id: [], A: [], B: [], C: [], D: [], E: [], manualSort: [], + }); + + // An invalid filter should return an error + { + const resp = await makeQuery({BadCol: ['']}); + assert.equal(resp.status, 400); + assert.match(resp.data.error, /BadCol/); + } + + { + const resp = await makeQuery({B: 'world'} as any); + assert.equal(resp.status, 400); + assert.match(resp.data.error, /filter values must be arrays/); + } + }); + + for (const mode of ['url', 'header']) { + it(`GET /docs/{did}/tables/{tid}/data supports sorts and limits in ${mode}`, async function() { + function makeQuery(sort: string[]|null, limit: number|null) { + const url = new URL(`${serverUrl}/api/docs/${docIds.Timesheets}/tables/Table1/data`); + const config = configForUser('chimpy'); + if (mode === 'url') { + if (sort) { url.searchParams.append('sort', sort.join(',')); } + if (limit) { url.searchParams.append('limit', String(limit)); } + } else { + if (sort) { config.headers['x-sort'] = sort.join(','); } + if (limit) { config.headers['x-limit'] = String(limit); } + } + return axios.get(url.href, config); + } + function checkResults(resp: AxiosResponse, expectedData: any) { + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, expectedData); + } + + checkResults(await makeQuery(['-id'], null), { + id: [4, 3, 2, 1], + A: ['', '', '', 'hello'], + B: ['', '', 'world', ''], + C: ['', '', '', ''], + D: [null, null, null, null], + E: ['', '', '', 'HELLO'], + manualSort: [4, 3, 2, 1] + }); + + checkResults(await makeQuery(['-id'], 2), { + id: [4, 3], + A: ['', ''], + B: ['', ''], + C: ['', ''], + D: [null, null], + E: ['', ''], + manualSort: [4, 3] + }); + }); + } + + it("GET /docs/{did}/tables/{tid}/data respects document permissions", async function() { + // as not part of any group kiwi cannot fetch Timesheets + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/tables/Table1/data`, kiwi); + assert.equal(resp.status, 403); + }); + + it("GET /docs/{did}/tables/{tid}/data returns matches /not found/ for bad table id", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Bad_Foo_/data`, chimpy); + assert.equal(resp.status, 404); + assert.match(resp.data.error, /not found/); + }); + + it("POST /docs/{did}/apply applies user actions", async function() { + const userActions = [ + ['AddTable', 'Foo', [{id: 'A'}, {id: 'B'}]], + ['BulkAddRecord', 'Foo', [1, 2], {A: ["Santa", "Bob"], B: [1, 11]}] + ]; + const resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/apply`, userActions, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual( + (await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, chimpy)).data, + {id: [1, 2], A: ['Santa', 'Bob'], B: ['1', '11'], manualSort: [1, 2]}); + }); + + it("POST /docs/{did}/apply respects document permissions", async function() { + const userActions = [ + ['AddTable', 'FooBar', [{id: 'A'}]] + ]; + let resp: AxiosResponse; + + // as a guest chimpy cannot edit Bananas + resp = await axios.post(`${serverUrl}/api/docs/${docIds.Bananas}/apply`, userActions, chimpy); + assert.equal(resp.status, 403); + assert.deepEqual(resp.data, {error: 'No write access'}); + + // check that changes did not apply + resp = await axios.get(`${serverUrl}/api/docs/${docIds.Bananas}/tables/FooBar/data`, chimpy); + assert.equal(resp.status, 404); + assert.match(resp.data.error, /not found/); + + // as not in any group kiwi cannot edit TestDoc + resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/apply`, userActions, kiwi); + assert.equal(resp.status, 403); + + // check that changes did not apply + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/FooBar/data`, chimpy); + assert.equal(resp.status, 404); + assert.match(resp.data.error, /not found/); + + }); + + it("POST /docs/{did}/tables/{tid}/data adds records", async function() { + let resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, { + A: ['Alice', 'Felix'], + B: [2, 22] + }, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, [3, 4]); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, chimpy); + assert.deepEqual(resp.data, { + id: [1, 2, 3, 4], + A: ['Santa', 'Bob', 'Alice', 'Felix'], + B: ["1", "11", "2", "22"], + manualSort: [1, 2, 3, 4] + }); + }); + + it("POST /docs/{did}/tables/{tid}/records adds records", async function() { + let resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/records`, { + records: [ + {fields: {A: 'John', B: 55}}, + {fields: {A: 'Jane', B: 0}}, + ] + }, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, { + records: [ + {id: 5}, + {id: 6}, + ] + }); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/records`, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, + { + records: + [ + { + id: 1, + fields: { + A: 'Santa', + B: '1', + }, + }, + { + id: 2, + fields: { + A: 'Bob', + B: '11', + }, + }, + { + id: 3, + fields: { + A: 'Alice', + B: '2', + }, + }, + { + id: 4, + fields: { + A: 'Felix', + B: '22', + }, + }, + { + id: 5, + fields: { + A: 'John', + B: '55', + }, + }, + { + id: 6, + fields: { + A: 'Jane', + B: '0', + }, + }, + ] + }); + }); + + it("POST /docs/{did}/tables/{tid}/data/delete deletes records", async function() { + let resp = await axios.post( + `${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data/delete`, + [3, 4, 5, 6], + chimpy, + ); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, null); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, chimpy); + assert.deepEqual(resp.data, { + id: [1, 2], + A: ['Santa', 'Bob'], + B: ["1", "11"], + manualSort: [1, 2] + }); + + // restore rows + await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, { + A: ['Alice', 'Felix'], + B: [2, 22] + }, chimpy); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, chimpy); + assert.deepEqual(resp.data, { + id: [1, 2, 3, 4], + A: ['Santa', 'Bob', 'Alice', 'Felix'], + B: ["1", "11", "2", "22"], + manualSort: [1, 2, 3, 4] + }); + }); + + function checkError(status: number, test: RegExp|object, resp: AxiosResponse, message?: string) { + assert.equal(resp.status, status); + if (test instanceof RegExp) { + assert.match(resp.data.error, test, message); + } else { + try { + assert.deepEqual(resp.data, test, message); + } catch(err) { + console.log(JSON.stringify(resp.data)); + console.log(JSON.stringify(test)); + throw err; + } + } + } + + it("parses strings in user actions", async () => { + // Create a test document. + const ws1 = (await userApi.getOrgWorkspaces('current'))[0].id; + const docId = await userApi.newDoc({name: 'testdoc'}, ws1); + const docUrl = `${serverUrl}/api/docs/${docId}`; + const recordsUrl = `${docUrl}/tables/Table1/records`; + + // Make the column numeric, delete the other columns we don't care about + await axios.post(`${docUrl}/apply`, [ + ['ModifyColumn', 'Table1', 'A', {type: 'Numeric'}], + ['RemoveColumn', 'Table1', 'B'], + ['RemoveColumn', 'Table1', 'C'], + ], chimpy); + + // Add/update some records without and with string parsing + // Specifically test: + // 1. /apply, with an AddRecord + // 2. POST /records (BulkAddRecord) + // 3. PATCH /records (BulkUpdateRecord) + // Send strings that look like currency which need string parsing to become numbers + for (const queryParams of ['?noparse=1', '']) { + await axios.post(`${docUrl}/apply${queryParams}`, [ + ['AddRecord', 'Table1', null, {'A': '$1'}], + ], chimpy); + + const response = await axios.post(`${recordsUrl}${queryParams}`, + { + records: [ + {fields: {'A': '$2'}}, + {fields: {'A': '$3'}}, + ] + }, + chimpy); + + // Update $3 -> $4 + const rowId = response.data.records[1].id; + await axios.patch(`${recordsUrl}${queryParams}`, + { + records: [ + {id: rowId, fields: {'A': '$4'}} + ] + }, + chimpy); + } + + // Check the results + const resp = await axios.get(recordsUrl, chimpy); + assert.deepEqual(resp.data, { + records: + [ + // Without string parsing + {id: 1, fields: {A: '$1'}}, + {id: 2, fields: {A: '$2'}}, + {id: 3, fields: {A: '$4'}}, + + // With string parsing + {id: 4, fields: {A: 1}}, + {id: 5, fields: {A: 2}}, + {id: 6, fields: {A: 4}}, + ] + } + ); + }); + + describe("PUT /docs/{did}/tables/{tid}/records", async function() { + it("should add or update records", async function() { + // create sample document for testing + const wid = (await userApi.getOrgWorkspaces('current')).find((w) => w.name === 'Private')!.id; + const docId = await userApi.newDoc({name: 'BlankTest'}, wid); + const url = `${serverUrl}/api/docs/${docId}/tables/Table1/records`; + + async function check(records: AddOrUpdateRecord[], expectedTableData: BulkColValues, params: any={}) { + const resp = await axios.put(url, {records}, {...chimpy, params}); + assert.equal(resp.status, 200); + const table = await userApi.getTable(docId, "Table1"); + delete table.manualSort; + delete table.C; + assert.deepStrictEqual(table, expectedTableData); + } + + // Add 3 new records, since the table is empty so nothing matches `requires` + await check( + [ + { + require: {A: 1}, + }, + { + // Since no record with A=2 is found, create a new record, + // but `fields` overrides `require` for the value when creating, + // so the new record has A=3 + require: {A: 2}, + fields: {A: 3}, + }, + { + require: {A: 4}, + fields: {B: 5}, + }, + ], + {id: [1, 2, 3], A: [1, 3, 4], B: [0, 0, 5]} + ); + + // Update all three records since they all match the `require` values here + await check( + [ + { + // Does nothing + require: {A: 1}, + }, + { + // Changes A from 3 to 33 + require: {A: 3}, + fields: {A: 33}, + }, + { + // Changes B from 5 to 6 in the third record where A=4 + require: {A: 4}, + fields: {B: 6}, + }, + ], + {id: [1, 2, 3], A: [1, 33, 4], B: [0, 0, 6]} + ); + + // This would normally add a record, but noadd suppresses that + await check([ + { + require: {A: 100}, + }, + ], + {id: [1, 2, 3], A: [1, 33, 4], B: [0, 0, 6]}, + {noadd: "1"}, + ); + + // This would normally update A from 1 to 11, bot noupdate suppresses that + await check([ + { + require: {A: 1}, + fields: {A: 11}, + }, + ], + {id: [1, 2, 3], A: [1, 33, 4], B: [0, 0, 6]}, + {noupdate: "1"}, + ); + + // There are 2 records with B=0, update them both to B=1 + // Use onmany=all to specify that they should both be updated + await check([ + { + require: {B: 0}, + fields: {B: 1}, + }, + ], + {id: [1, 2, 3], A: [1, 33, 4], B: [1, 1, 6]}, + {onmany: "all"} + ); + + // In contrast to the above, the default behaviour for no value of onmany + // is to only update the first matching record, + // so only one of the records with B=1 is updated to B=2 + await check([ + { + require: {B: 1}, + fields: {B: 2}, + }, + ], + {id: [1, 2, 3], A: [1, 33, 4], B: [2, 1, 6]}, + ); + + // By default, strings in `require` and `fields` are parsed based on column type, + // so these dollar amounts are treated as currency + // and parsed as A=4 and A=44 + await check([ + { + require: {A: "$4"}, + fields: {A: "$44"}, + }, + ], + {id: [1, 2, 3], A: [1, 33, 44], B: [2, 1, 6]}, + ); + + // Turn off the default string parsing with noparse=1 + // Now we need A=44 to actually be a number to match, + // A="$44" wouldn't match and would create a new record. + // Because A="$55" isn't parsed, the raw string is stored in the table. + await check([ + { + require: {A: 44}, + fields: {A: "$55"}, + }, + ], + {id: [1, 2, 3], A: [1, 33, "$55"], B: [2, 1, 6]}, + {noparse: 1} + ); + + await check([ + // First three records already exist and nothing happens + {require: {A: 1}}, + {require: {A: 33}}, + {require: {A: "$55"}}, + // Without string parsing, A="$33" doesn't match A=33 and a new record is created + {require: {A: "$33"}}, + ], + {id: [1, 2, 3, 4], A: [1, 33, "$55", "$33"], B: [2, 1, 6, 0]}, + {noparse: 1} + ); + + // Checking that updating by `id` works. + await check([ + { + require: {id: 3}, + fields: {A: "66"}, + }, + ], + {id: [1, 2, 3, 4], A: [1, 33, 66, "$33"], B: [2, 1, 6, 0]}, + ); + + // allow_empty_require option with empty `require` updates all records + await check([ + { + require: {}, + fields: {A: 99, B: 99}, + }, + ], + {id: [1, 2, 3, 4], A: [99, 99, 99, 99], B: [99, 99, 99, 99]}, + {allow_empty_require: "1", onmany: "all"}, + ); + }); + + it("should 404 for missing tables", async () => { + checkError(404, /Table not found "Bad_Foo_"/, + await axios.put(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Bad_Foo_/records`, + {records: [{require: {id: 1}}]}, chimpy)); + }); + + it("should 400 for missing columns", async () => { + checkError(400, /Invalid column "no_such_column"/, + await axios.put(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/records`, + {records: [{require: {no_such_column: 1}}]}, chimpy)); + }); + + it("should 400 for an incorrect onmany parameter", async function() { + checkError(400, + /onmany parameter foo should be one of first,none,all/, + await axios.put(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/records`, + {records: [{require: {id: 1}}]}, {...chimpy, params: {onmany: "foo"}})); + }); + + it("should 400 for an empty require without allow_empty_require", async function() { + checkError(400, + /require is empty but allow_empty_require isn't set/, + await axios.put(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/records`, + {records: [{require: {}}]}, chimpy)); + }); + + it("should validate request schema", async function() { + const url = `${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/records`; + const test = async (payload: any, error: { error: string, details: string }) => { + const resp = await axios.put(url, payload, chimpy); + checkError(400, error, resp); + }; + await test({}, {error: 'Invalid payload', details: 'Error: body.records is missing'}); + await test({records: 1}, {error: 'Invalid payload', details: 'Error: body.records is not an array'}); + await test({records: [{fields: {}}]}, + { + error: 'Invalid payload', + details: 'Error: ' + + 'body.records[0] is not a AddOrUpdateRecord; ' + + 'body.records[0].require is missing', + }); + await test({records: [{require: {id: "1"}}]}, + { + error: 'Invalid payload', + details: 'Error: ' + + 'body.records[0] is not a AddOrUpdateRecord; ' + + 'body.records[0].require.id is not a number', + }); + }); + }); + + describe("POST /docs/{did}/tables/{tid}/records", async function() { + it("POST should have good errors", async () => { + checkError(404, /not found/, + await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Bad_Foo_/data`, + { A: ['Alice', 'Felix'], B: [2, 22] }, chimpy)); + + checkError(400, /Invalid column "Bad"/, + await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, + { A: ['Alice'], Bad: ['Monthy'] }, chimpy)); + + // Other errors should also be maximally informative. + checkError(400, /Error manipulating data/, + await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, + { A: ['Alice'], B: null }, chimpy)); + }); + + it("validates request schema", async function() { + const url = `${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/records`; + const test = async(payload: any, error: {error: string, details: string}) => { + const resp = await axios.post(url, payload, chimpy); + checkError(400, error, resp); + }; + await test({}, {error: 'Invalid payload', details: 'Error: body.records is missing'}); + await test({records: 1}, {error: 'Invalid payload', details: 'Error: body.records is not an array'}); + // All column types are allowed, except Arrays (or objects) without correct code. + const testField = async (A: any) => { + await test({records: [{ id: 1, fields: { A } }]}, {error: 'Invalid payload', details: + 'Error: body.records[0] is not a NewRecord; '+ + 'body.records[0].fields.A is not a CellValue; '+ + 'body.records[0].fields.A is none of number, '+ + 'string, boolean, null, 1 more; body.records[0].'+ + 'fields.A[0] is not a GristObjCode; body.records[0]'+ + '.fields.A[0] is not a valid enum value'}); + }; + // test no code at all + await testField([]); + // test invalid code + await testField(['ZZ']); + }); + + it("allows to create a blank record", async function() { + // create sample document for testing + const wid = (await userApi.getOrgWorkspaces('current')).find((w) => w.name === 'Private')!.id; + const docId = await userApi.newDoc({ name : 'BlankTest'}, wid); + // Create two blank records + const url = `${serverUrl}/api/docs/${docId}/tables/Table1/records`; + const resp = await axios.post(url, {records: [{}, { fields: {}}]}, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, { records : [{id: 1}, {id: 2}]}); + }); + + it("allows to create partial records", async function() { + // create sample document for testing + const wid = (await userApi.getOrgWorkspaces('current')).find((w) => w.name === 'Private')!.id; + const docId = await userApi.newDoc({ name : 'BlankTest'}, wid); + const url = `${serverUrl}/api/docs/${docId}/tables/Table1/records`; + // create partial records + const resp = await axios.post(url, {records: [{fields: { A: 1}}, { fields: {B: 2}}, {}]}, chimpy); + assert.equal(resp.status, 200); + const table = await userApi.getTable(docId, "Table1"); + delete table.manualSort; + assert.deepStrictEqual( + table, + { id: [1, 2, 3], A: [1, null, null], B: [null, 2, null], C:[null, null, null]}); + }); + + it("allows CellValue as a field", async function() { + // create sample document + const wid = (await userApi.getOrgWorkspaces('current')).find((w) => w.name === 'Private')!.id; + const docId = await userApi.newDoc({ name : 'PostTest'}, wid); + const url = `${serverUrl}/api/docs/${docId}/tables/Table1/records`; + const testField = async(A?: CellValue, message?: string) =>{ + const resp = await axios.post(url, {records: [{ fields: { A } }]}, chimpy); + assert.equal(resp.status, 200, message ?? `Error for code ${A}`); + }; + // test allowed types for a field + await testField(1); // ints + await testField(1.2); // floats + await testField("string"); // strings + await testField(true); // true and false + await testField(false); + await testField(null); // null + // encoded values (though not all make sense) + for (const code of [ + GristObjCode.List, + GristObjCode.Dict, + GristObjCode.DateTime, + GristObjCode.Date, + GristObjCode.Skip, + GristObjCode.Censored, + GristObjCode.Reference, + GristObjCode.ReferenceList, + GristObjCode.Exception, + GristObjCode.Pending, + GristObjCode.Unmarshallable, + GristObjCode.Versions, + ]) { + await testField([code]); + } + }); + }); + + it("POST /docs/{did}/tables/{tid}/data respects document permissions", async function() { + let resp: AxiosResponse; + const data = { + A: ['Alice', 'Felix'], + B: [2, 22] + }; + + // as a viewer charon cannot edit TestDoc + resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, data, charon); + assert.equal(resp.status, 403); + assert.deepEqual(resp.data, {error: 'No write access'}); + + // as not part of any group kiwi cannot edit TestDoc + resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, data, kiwi); + assert.equal(resp.status, 403); + assert.deepEqual(resp.data, {error: 'No view access'}); + + // check that TestDoc did not change + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, chimpy); + assert.deepEqual(resp.data, { + id: [1, 2, 3, 4], + A: ['Santa', 'Bob', 'Alice', 'Felix'], + B: ["1", "11", "2", "22"], + manualSort: [1, 2, 3, 4] + }); + }); + + describe("PATCH /docs/{did}/tables/{tid}/records", function() { + it("updates records", async function () { + let resp = await axios.patch(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/records`, { + records: [ + { + id: 1, + fields: { + A: 'Father Christmas', + }, + }, + ], + }, chimpy); + assert.equal(resp.status, 200); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/records`, chimpy); + // check that rest of the data is left unchanged + assert.deepEqual(resp.data, { + records: + [ + { + id: 1, + fields: { + A: 'Father Christmas', + B: '1', + }, + }, + { + id: 2, + fields: { + A: 'Bob', + B: '11', + }, + }, + { + id: 3, + fields: { + A: 'Alice', + B: '2', + }, + }, + { + id: 4, + fields: { + A: 'Felix', + B: '22', + }, + }, + ] + }); + }); + + it("validates request schema", async function() { + const url = `${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/records`; + async function failsWithError(payload: any, error: { error: string, details?: string }){ + const resp = await axios.patch(url, payload, chimpy); + checkError(400, error, resp); + } + + await failsWithError({}, {error: 'Invalid payload', details: 'Error: body.records is missing'}); + + await failsWithError({records: 1}, {error: 'Invalid payload', details: 'Error: body.records is not an array'}); + + await failsWithError({records: []}, {error: 'Invalid payload', details: + 'Error: body.records[0] is not a Record; body.records[0] is not an object'}); + + await failsWithError({records: [{}]}, {error: 'Invalid payload', details: + 'Error: body.records[0] is not a Record\n '+ + 'body.records[0].id is missing\n '+ + 'body.records[0].fields is missing'}); + + await failsWithError({records: [{id: "1"}]}, {error: 'Invalid payload', details: + 'Error: body.records[0] is not a Record\n' + + ' body.records[0].id is not a number\n' + + ' body.records[0].fields is missing'}); + + await failsWithError( + {records: [{id: 1, fields: {A : 1}}, {id: 2, fields: {B: 3}}]}, + {error: 'PATCH requires all records to have same fields'}); + + // Test invalid object codes + const fieldIsNotValid = async (A: any) => { + await failsWithError({records: [{ id: 1, fields: { A } }]}, {error: 'Invalid payload', details: + 'Error: body.records[0] is not a Record; '+ + 'body.records[0].fields.A is not a CellValue; '+ + 'body.records[0].fields.A is none of number, '+ + 'string, boolean, null, 1 more; body.records[0].'+ + 'fields.A[0] is not a GristObjCode; body.records[0]'+ + '.fields.A[0] is not a valid enum value'}); + }; + await fieldIsNotValid([]); + await fieldIsNotValid(['ZZ']); + }); + + it("allows CellValue as a field", async function() { + // create sample document for testing + const wid = (await userApi.getOrgWorkspaces('current')).find((w) => w.name === 'Private')!.id; + const docId = await userApi.newDoc({ name : 'PatchTest'}, wid); + const url = `${serverUrl}/api/docs/${docId}/tables/Table1/records`; + // create record for patching + const id = (await axios.post(url, { records: [{}] }, chimpy)).data.records[0].id; + const testField = async(A?: CellValue, message?: string) =>{ + const resp = await axios.patch(url, {records: [{ id, fields: { A } }]}, chimpy); + assert.equal(resp.status, 200, message ?? `Error for code ${A}`); + }; + await testField(1); + await testField(1.2); + await testField("string"); + await testField(true); + await testField(false); + await testField(null); + for (const code of [ + GristObjCode.List, + GristObjCode.Dict, + GristObjCode.DateTime, + GristObjCode.Date, + GristObjCode.Skip, + GristObjCode.Censored, + GristObjCode.Reference, + GristObjCode.ReferenceList, + GristObjCode.Exception, + GristObjCode.Pending, + GristObjCode.Unmarshallable, + GristObjCode.Versions, + ]) { + await testField([code]); + } + }); + }); + + describe("PATCH /docs/{did}/tables/{tid}/data", function() { + + it("updates records", async function() { + let resp = await axios.patch(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, { + id: [1], + A: ['Santa Klaus'], + }, chimpy); + assert.equal(resp.status, 200); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, chimpy); + // check that rest of the data is left unchanged + assert.deepEqual(resp.data, { + id: [1, 2, 3, 4], + A: ['Santa Klaus', 'Bob', 'Alice', 'Felix'], + B: ["1", "11", "2", "22"], + manualSort: [1, 2, 3, 4] + }); + + }); + + it("throws 400 for invalid row ids", async function() { + + // combination of valid and invalid ids fails + let resp = await axios.patch(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, { + id: [1, 5], + A: ['Alice', 'Felix'] + }, chimpy); + assert.equal(resp.status, 400); + assert.match(resp.data.error, /Invalid row id 5/); + + // only invalid ids also fails + resp = await axios.patch(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, { + id: [10, 5], + A: ['Alice', 'Felix'] + }, chimpy); + assert.equal(resp.status, 400); + assert.match(resp.data.error, /Invalid row id 10/); + + // check that changes related to id 1 did not apply + assert.deepEqual((await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, chimpy)).data, { + id: [1, 2, 3, 4], + A: ['Santa Klaus', 'Bob', 'Alice', 'Felix'], + B: ["1", "11", "2", "22"], + manualSort: [1, 2, 3, 4] + }); + }); + + it("throws 400 for invalid column", async function() { + const resp = await axios.patch(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, { + id: [1], + A: ['Alice'], + C: ['Monthy'] + }, chimpy); + assert.equal(resp.status, 400); + assert.match(resp.data.error, /Invalid column "C"/); + }); + + it("respects document permissions", async function() { + let resp: AxiosResponse; + const data = { + id: [1], + A: ['Santa'], + }; + + // check data + assert.deepEqual((await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, chimpy)).data, { + id: [1, 2, 3, 4], + A: ['Santa Klaus', 'Bob', 'Alice', 'Felix'], + B: ["1", "11", "2", "22"], + manualSort: [1, 2, 3, 4] + }); + + // as a viewer charon cannot patch TestDoc + resp = await axios.patch(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, data, charon); + assert.equal(resp.status, 403); + assert.deepEqual(resp.data, {error: 'No write access'}); + + // as not part of any group kiwi cannot patch TestDoc + resp = await axios.patch(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, data, kiwi); + assert.equal(resp.status, 403); + assert.deepEqual(resp.data, {error: 'No view access'}); + + // check that changes did not apply + assert.deepEqual((await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/tables/Foo/data`, chimpy)).data, { + id: [1, 2, 3, 4], + A: ['Santa Klaus', 'Bob', 'Alice', 'Felix'], + B: ["1", "11", "2", "22"], + manualSort: [1, 2, 3, 4] + }); + }); + + }); + + describe('attachments', function() { + it("POST /docs/{did}/attachments adds attachments", async function() { + let formData = new FormData(); + formData.append('upload', 'foobar', "hello.doc"); + formData.append('upload', '123456', "world.jpg"); + let resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments`, formData, + defaultsDeep({headers: formData.getHeaders()}, chimpy)); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, [1, 2]); + + // Another upload gets the next number. + formData = new FormData(); + formData.append('upload', 'abcdef', "hello.png"); + resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments`, formData, + defaultsDeep({headers: formData.getHeaders()}, chimpy)); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, [3]); + }); + + it("GET /docs/{did}/attachments/{id} returns attachment metadata", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/2`, chimpy); + assert.equal(resp.status, 200); + assert.include(resp.data, {fileName: "world.jpg", fileSize: 6}); + assert.match(resp.data.timeUploaded, /^\d{4}-\d{2}-\d{2}T/); + }); + + it("GET /docs/{did}/attachments/{id}/download downloads attachment contents", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/2/download`, + {...chimpy, responseType: 'arraybuffer'}); + assert.equal(resp.status, 200); + assert.deepEqual(resp.headers['content-type'], 'image/jpeg'); + assert.deepEqual(resp.headers['content-disposition'], 'attachment; filename="world.jpg"'); + assert.deepEqual(resp.headers['cache-control'], 'private, max-age=3600'); + assert.deepEqual(resp.data, Buffer.from('123456')); + }); + + it("GET /docs/{did}/attachments/{id}/download works after doc shutdown", async function() { + // Check that we can download when ActiveDoc isn't currently open. + let resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/force-reload`, null, chimpy); + assert.equal(resp.status, 200); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/2/download`, + {...chimpy, responseType: 'arraybuffer'}); + assert.equal(resp.status, 200); + assert.deepEqual(resp.headers['content-type'], 'image/jpeg'); + assert.deepEqual(resp.headers['content-disposition'], 'attachment; filename="world.jpg"'); + assert.deepEqual(resp.headers['cache-control'], 'private, max-age=3600'); + assert.deepEqual(resp.data, Buffer.from('123456')); + }); + + it("GET /docs/{did}/attachments/{id}... returns 404 when attachment not found", async function() { + let resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/22`, chimpy); + checkError(404, /Attachment not found: 22/, resp); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/moo`, chimpy); + checkError(404, /Attachment not found: moo/, resp); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/22/download`, chimpy); + checkError(404, /Attachment not found: 22/, resp); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/moo/download`, chimpy); + checkError(404, /Attachment not found: moo/, resp); + }); + + it("POST /docs/{did}/attachments produces reasonable errors", async function() { + // Check that it produces reasonable errors if we try to use it with non-form-data + let resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments`, [4, 5, 6], chimpy); + assert.equal(resp.status, 415); // Wrong content-type + + // Check for an error if there is no data included. + const formData = new FormData(); + resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments`, formData, + defaultsDeep({headers: formData.getHeaders()}, chimpy)); + assert.equal(resp.status, 400); + // TODO The error here is "stream ended unexpectedly", which isn't really reasonable. + }); + + it("POST/GET /docs/{did}/attachments respect document permissions", async function() { + const formData = new FormData(); + formData.append('upload', 'xyzzz', "wrong.png"); + let resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments`, formData, + defaultsDeep({headers: formData.getHeaders()}, kiwi)); + checkError(403, /No view access/, resp); + + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/3`, kiwi); + checkError(403, /No view access/, resp); + + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/3/download`, kiwi); + checkError(403, /No view access/, resp); + }); + + it("POST /docs/{did}/attachments respects untrusted content-type only if valid", async function() { + const formData = new FormData(); + formData.append('upload', 'xyz', {filename: "foo", contentType: "application/pdf"}); + formData.append('upload', 'abc', {filename: "hello.png", contentType: "invalid/content-type"}); + formData.append('upload', 'def', {filename: "world.doc", contentType: "text/plain\nbad-header: 1\n\nEvil"}); + let resp = await axios.post(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments`, formData, + defaultsDeep({headers: formData.getHeaders()}, chimpy)); + assert.equal(resp.status, 200); + assert.deepEqual(resp.data, [4, 5, 6]); + + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/4/download`, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.headers['content-type'], 'application/pdf'); // A valid content-type is respected + assert.deepEqual(resp.headers['content-disposition'], 'attachment; filename="foo.pdf"'); + assert.deepEqual(resp.data, 'xyz'); + + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/5/download`, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.headers['content-type'], 'image/png'); // Did not pay attention to invalid header + assert.deepEqual(resp.headers['content-disposition'], 'attachment; filename="hello.png"'); + assert.deepEqual(resp.data, 'abc'); + + resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/attachments/6/download`, chimpy); + assert.equal(resp.status, 200); + assert.deepEqual(resp.headers['content-type'], 'application/msword'); // Another invalid header ignored + assert.deepEqual(resp.headers['content-disposition'], 'attachment; filename="world.doc"'); + assert.deepEqual(resp.headers['cache-control'], 'private, max-age=3600'); + assert.deepEqual(resp.headers['bad-header'], undefined); // Attempt to hack in more headers didn't work + assert.deepEqual(resp.data, 'def'); + }); + }); + + it("GET /docs/{did}/download serves document", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/download`, chimpy); + assert.equal(resp.status, 200); + assert.match(resp.data, /grist_Tables_column/); + }); + + it("GET /docs/{did}/download respects permissions", async function() { + // kiwi has no access to TestDoc + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/download`, kiwi); + assert.equal(resp.status, 403); + assert.notMatch(resp.data, /grist_Tables_column/); + }); + + it("GET /docs/{did}/download/csv serves CSV-encoded document", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/download/csv?tableId=Table1`, chimpy); + assert.equal(resp.status, 200); + assert.equal(resp.data, 'A,B,C,D,E\nhello,,,,HELLO\n,world,,,\n,,,,\n,,,,\n'); + + const resp2 = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/download/csv?tableId=Foo`, chimpy); + assert.equal(resp2.status, 200); + assert.equal(resp2.data, 'A,B\nSanta,1\nBob,11\nAlice,2\nFelix,22\n'); + }); + + it("GET /docs/{did}/download/csv respects permissions", async function() { + // kiwi has no access to TestDoc + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/download/csv?tableId=Table1`, kiwi); + assert.equal(resp.status, 403); + assert.notEqual(resp.data, 'A,B,C,D,E\nhello,,,,HELLO\n,world,,,\n,,,,\n,,,,\n'); + }); + + it("GET /docs/{did}/download/csv returns 404 if tableId is invalid", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.TestDoc}/download/csv?tableId=MissingTableId`, chimpy); + assert.equal(resp.status, 404); + assert.deepEqual(resp.data, { error: 'Table MissingTableId not found.' }); + }); + + it("GET /docs/{did}/download/csv returns 404 if viewSectionId is invalid", async function() { + const resp = await axios.get( + `${serverUrl}/api/docs/${docIds.TestDoc}/download/csv?tableId=Table1&viewSection=9999`, chimpy); + assert.equal(resp.status, 404); + assert.deepEqual(resp.data, { error: 'No record 9999 in table _grist_Views_section' }); + }); + + it("GET /docs/{did}/download/csv returns 400 if tableId is missing", async function() { + const resp = await axios.get( + `${serverUrl}/api/docs/${docIds.TestDoc}/download/csv`, chimpy); + assert.equal(resp.status, 400); + assert.deepEqual(resp.data, { error: 'tableId parameter should be a string: undefined' }); + }); + + it('POST /workspaces/{wid}/import handles empty filenames', async function() { + if (!process.env.TEST_REDIS_URL) { this.skip(); } + const worker1 = await userApi.getWorkerAPI('import'); + const wid = (await userApi.getOrgWorkspaces('current')).find((w) => w.name === 'Private')!.id; + const fakeData1 = await testUtils.readFixtureDoc('Hello.grist'); + const uploadId1 = await worker1.upload(fakeData1, '.grist'); + const resp = await axios.post(`${worker1.url}/api/workspaces/${wid}/import`, {uploadId: uploadId1}, + configForUser('Chimpy')); + assert.equal(resp.status, 200); + assert.equal(resp.data.title, 'Untitled upload'); + assert.equal(typeof resp.data.id, 'string'); + assert.notEqual(resp.data.id, ''); + }); + + it("document is protected during upload-and-import sequence", async function() { + if (!process.env.TEST_REDIS_URL) { this.skip(); } + // Prepare an API for a different user. + const kiwiApi = new UserAPIImpl(`${home.serverUrl}/o/Fish`, { + headers: {Authorization: 'Bearer api_key_for_kiwi'}, + fetch : fetch as any, + newFormData: () => new FormData() as any, + logger: log + }); + // upload something for Chimpy and something else for Kiwi. + const worker1 = await userApi.getWorkerAPI('import'); + const fakeData1 = await testUtils.readFixtureDoc('Hello.grist'); + const uploadId1 = await worker1.upload(fakeData1, 'upload.grist'); + const worker2 = await kiwiApi.getWorkerAPI('import'); + const fakeData2 = await testUtils.readFixtureDoc('Favorite_Films.grist'); + const uploadId2 = await worker2.upload(fakeData2, 'upload2.grist'); + + // Check that kiwi only has access to their own upload. + let wid = (await kiwiApi.getOrgWorkspaces('current')).find((w) => w.name === 'Big')!.id; + let resp = await axios.post(`${worker2.url}/api/workspaces/${wid}/import`, {uploadId: uploadId1}, + configForUser('Kiwi')); + assert.equal(resp.status, 403); + assert.deepEqual(resp.data, {error: "access denied"}); + + resp = await axios.post(`${worker2.url}/api/workspaces/${wid}/import`, {uploadId: uploadId2}, + configForUser('Kiwi')); + assert.equal(resp.status, 200); + + // Check that chimpy has access to their own upload. + wid = (await userApi.getOrgWorkspaces('current')).find((w) => w.name === 'Private')!.id; + resp = await axios.post(`${worker1.url}/api/workspaces/${wid}/import`, {uploadId: uploadId1}, + configForUser('Chimpy')); + assert.equal(resp.status, 200); + }); + + it('limits parallel requests', async function() { + // Launch 30 requests in parallel and see how many are honored and how many + // return 429s. The timing of this test is a bit delicate. We close the doc + // to increase the odds that results won't start coming back before all the + // requests have passed authorization. May need to do something more sophisticated + // if this proves unreliable. + await axios.post(`${serverUrl}/api/docs/${docIds.Timesheets}/force-reload`, null, chimpy); + const reqs = [...Array(30).keys()].map( + i => axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/tables/Table1/data`, chimpy)); + const responses = await Promise.all(reqs); + assert.lengthOf(responses.filter(r => r.status === 200), 10); + assert.lengthOf(responses.filter(r => r.status === 429), 20); + }); + + it('allows forced reloads', async function() { + let resp = await axios.post(`${serverUrl}/api/docs/${docIds.Timesheets}/force-reload`, null, chimpy); + assert.equal(resp.status, 200); + // Check that support cannot force a reload. + resp = await axios.post(`${serverUrl}/api/docs/${docIds.Timesheets}/force-reload`, null, support); + assert.equal(resp.status, 403); + if (hasHomeApi) { + // Check that support can force a reload through housekeeping api. + resp = await axios.post(`${serverUrl}/api/housekeeping/docs/${docIds.Timesheets}/force-reload`, null, support); + assert.equal(resp.status, 200); + // Check that regular user cannot force a reload through housekeeping api. + resp = await axios.post(`${serverUrl}/api/housekeeping/docs/${docIds.Timesheets}/force-reload`, null, chimpy); + assert.equal(resp.status, 403); + } + }); + + it('allows assignments', async function() { + let resp = await axios.post(`${serverUrl}/api/docs/${docIds.Timesheets}/assign`, null, chimpy); + assert.equal(resp.status, 200); + // Check that support cannot force an assignment. + resp = await axios.post(`${serverUrl}/api/docs/${docIds.Timesheets}/assign`, null, support); + assert.equal(resp.status, 403); + if (hasHomeApi) { + // Check that support can force an assignment through housekeeping api. + resp = await axios.post(`${serverUrl}/api/housekeeping/docs/${docIds.Timesheets}/assign`, null, support); + assert.equal(resp.status, 200); + // Check that regular user cannot force an assignment through housekeeping api. + resp = await axios.post(`${serverUrl}/api/housekeeping/docs/${docIds.Timesheets}/assign`, null, chimpy); + assert.equal(resp.status, 403); + } + }); + + it('honors urlIds', async function() { + // Make a document with a urlId + const ws1 = (await userApi.getOrgWorkspaces('current'))[0].id; + const doc1 = await userApi.newDoc({name: 'testdoc1', urlId: 'urlid1'}, ws1); + try { + // Make sure an edit made by docId is visible when accessed via docId or urlId + let resp = await axios.post(`${serverUrl}/api/docs/${doc1}/tables/Table1/data`, { + A: ['Apple'], B: [99] + }, chimpy); + resp = await axios.get(`${serverUrl}/api/docs/${doc1}/tables/Table1/data`, chimpy); + assert.equal(resp.data.A[0], 'Apple'); + resp = await axios.get(`${serverUrl}/api/docs/urlid1/tables/Table1/data`, chimpy); + assert.equal(resp.data.A[0], 'Apple'); + // Make sure an edit made by urlId is visible when accessed via docId or urlId + resp = await axios.post(`${serverUrl}/api/docs/urlid1/tables/Table1/data`, { + A: ['Orange'], B: [42] + }, chimpy); + resp = await axios.get(`${serverUrl}/api/docs/${doc1}/tables/Table1/data`, chimpy); + assert.equal(resp.data.A[1], 'Orange'); + resp = await axios.get(`${serverUrl}/api/docs/urlid1/tables/Table1/data`, chimpy); + assert.equal(resp.data.A[1], 'Orange'); + } finally { + await userApi.deleteDoc(doc1); + } + }); + + it('filters urlIds by org', async function() { + // Make two documents with same urlId + const ws1 = (await userApi.getOrgWorkspaces('current'))[0].id; + const doc1 = await userApi.newDoc({name: 'testdoc1', urlId: 'urlid'}, ws1); + const nasaApi = new UserAPIImpl(`${home.serverUrl}/o/nasa`, { + headers: {Authorization: 'Bearer api_key_for_chimpy'}, + fetch : fetch as any, + newFormData: () => new FormData() as any, + logger: log + }); + const ws2 = (await nasaApi.getOrgWorkspaces('current'))[0].id; + const doc2 = await nasaApi.newDoc({name: 'testdoc2', urlId: 'urlid'}, ws2); + try { + // Place a value in "docs" doc + await axios.post(`${serverUrl}/o/docs/api/docs/urlid/tables/Table1/data`, { + A: ['Apple'], B: [99] + }, chimpy); + // Place a value in "nasa" doc + await axios.post(`${serverUrl}/o/nasa/api/docs/urlid/tables/Table1/data`, { + A: ['Orange'], B: [99] + }, chimpy); + // Check the values made it to the right places + let resp = await axios.get(`${serverUrl}/api/docs/${doc1}/tables/Table1/data`, chimpy); + assert.equal(resp.data.A[0], 'Apple'); + resp = await axios.get(`${serverUrl}/api/docs/${doc2}/tables/Table1/data`, chimpy); + assert.equal(resp.data.A[0], 'Orange'); + } finally { + await userApi.deleteDoc(doc1); + await nasaApi.deleteDoc(doc2); + } + }); + + it('allows docId access to any document from merged org', async function() { + // Make two documents + const ws1 = (await userApi.getOrgWorkspaces('current'))[0].id; + const doc1 = await userApi.newDoc({name: 'testdoc1'}, ws1); + const nasaApi = new UserAPIImpl(`${home.serverUrl}/o/nasa`, { + headers: {Authorization: 'Bearer api_key_for_chimpy'}, + fetch : fetch as any, + newFormData: () => new FormData() as any, + logger: log + }); + const ws2 = (await nasaApi.getOrgWorkspaces('current'))[0].id; + const doc2 = await nasaApi.newDoc({name: 'testdoc2'}, ws2); + try { + // Should fail to write to a document in "docs" from "nasa" url + let resp = await axios.post(`${serverUrl}/o/nasa/api/docs/${doc1}/tables/Table1/data`, { + A: ['Apple'], B: [99] + }, chimpy); + assert.equal(resp.status, 404); + // Should successfully write to a document in "nasa" from "docs" url + resp = await axios.post(`${serverUrl}/o/docs/api/docs/${doc2}/tables/Table1/data`, { + A: ['Orange'], B: [99] + }, chimpy); + assert.equal(resp.status, 200); + // Should fail to write to a document in "nasa" from "pr" url + resp = await axios.post(`${serverUrl}/o/pr/api/docs/${doc2}/tables/Table1/data`, { + A: ['Orange'], B: [99] + }, chimpy); + assert.equal(resp.status, 404); + } finally { + await userApi.deleteDoc(doc1); + await nasaApi.deleteDoc(doc2); + } + }); + + it("GET /docs/{did}/replace replaces one document with another", async function() { + const ws1 = (await userApi.getOrgWorkspaces('current'))[0].id; + const doc1 = await userApi.newDoc({name: 'testdoc1'}, ws1); + const doc2 = await userApi.newDoc({name: 'testdoc2'}, ws1); + const doc3 = await userApi.newDoc({name: 'testdoc2'}, ws1); + await userApi.updateDocPermissions(doc2, {users: {'kiwi@getgrist.com': 'editors'}}); + await userApi.updateDocPermissions(doc3, {users: {'kiwi@getgrist.com': 'viewers'}}); + try { + // Put some material in doc3 + let resp = await axios.post(`${serverUrl}/o/docs/api/docs/${doc3}/tables/Table1/data`, { + A: ['Orange'] + }, chimpy); + assert.equal(resp.status, 200); + + // Kiwi can replace doc2 with doc3 + resp = await axios.post(`${serverUrl}/o/docs/api/docs/${doc2}/replace`, { + sourceDocId: doc3 + }, kiwi); + assert.equal(resp.status, 200); + resp = await axios.get(`${serverUrl}/api/docs/${doc2}/tables/Table1/data`, chimpy); + assert.equal(resp.data.A[0], 'Orange'); + + // Kiwi can't replace doc1 with doc3, no write access to doc1 + resp = await axios.post(`${serverUrl}/o/docs/api/docs/${doc1}/replace`, { + sourceDocId: doc3 + }, kiwi); + assert.equal(resp.status, 403); + + // Kiwi can't replace doc2 with doc1, no read access to doc1 + resp = await axios.post(`${serverUrl}/o/docs/api/docs/${doc2}/replace`, { + sourceDocId: doc1 + }, kiwi); + assert.equal(resp.status, 403); + } finally { + await userApi.deleteDoc(doc1); + await userApi.deleteDoc(doc2); + } + }); + + it("GET /docs/{did}/snapshots retrieves a list of snapshots", async function() { + const resp = await axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/snapshots`, chimpy); + assert.equal(resp.status, 200); + assert.isAtLeast(resp.data.snapshots.length, 1); + assert.hasAllKeys(resp.data.snapshots[0], ['docId', 'lastModified', 'snapshotId']); + }); + + it("POST /docs/{did}/states/remove removes old states", async function() { + // Check doc has plenty of states. + let resp = await axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/states`, chimpy); + assert.equal(resp.status, 200); + const states: DocState[] = resp.data.states; + assert.isAbove(states.length, 5); + + // Remove all but 3. + resp = await axios.post(`${serverUrl}/api/docs/${docIds.Timesheets}/states/remove`, {keep: 3}, chimpy); + assert.equal(resp.status, 200); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/states`, chimpy); + assert.equal(resp.status, 200); + assert.lengthOf(resp.data.states, 3); + assert.equal(resp.data.states[0].h, states[0].h); + assert.equal(resp.data.states[1].h, states[1].h); + assert.equal(resp.data.states[2].h, states[2].h); + + // Remove all but 1. + resp = await axios.post(`${serverUrl}/api/docs/${docIds.Timesheets}/states/remove`, {keep: 1}, chimpy); + assert.equal(resp.status, 200); + resp = await axios.get(`${serverUrl}/api/docs/${docIds.Timesheets}/states`, chimpy); + assert.equal(resp.status, 200); + assert.lengthOf(resp.data.states, 1); + assert.equal(resp.data.states[0].h, states[0].h); + }); + + it("GET /docs/{did1}/compare/{did2} tracks changes between docs", async function() { + const ws1 = (await userApi.getOrgWorkspaces('current'))[0].id; + const docId1 = await userApi.newDoc({name: 'testdoc1'}, ws1); + const docId2 = await userApi.newDoc({name: 'testdoc2'}, ws1); + const doc1 = userApi.getDocAPI(docId1); + const doc2 = userApi.getDocAPI(docId2); + + // Stick some content in column A so it has a defined type + // so diffs are smaller and simpler. + await doc2.addRows('Table1', {A: [0]}); + + let comp = await doc1.compareDoc(docId2); + assert.hasAllKeys(comp, ['left', 'right', 'parent', 'summary']); + assert.equal(comp.summary, 'unrelated'); + assert.equal(comp.parent, null); + assert.hasAllKeys(comp.left, ['n', 'h']); + assert.hasAllKeys(comp.right, ['n', 'h']); + assert.equal(comp.left.n, 1); + assert.equal(comp.right.n, 2); + + await doc1.replace({sourceDocId: docId2}); + + comp = await doc1.compareDoc(docId2); + assert.equal(comp.summary, 'same'); + assert.equal(comp.left.n, 2); + assert.deepEqual(comp.left, comp.right); + assert.deepEqual(comp.left, comp.parent); + assert.equal(comp.details, undefined); + + comp = await doc1.compareDoc(docId2, { detail: true }); + assert.deepEqual(comp.details, { + leftChanges: { tableRenames: [], tableDeltas: {} }, + rightChanges: { tableRenames: [], tableDeltas: {} } + }); + + await doc1.addRows('Table1', {A: [1]}); + comp = await doc1.compareDoc(docId2); + assert.equal(comp.summary, 'left'); + assert.equal(comp.left.n, 3); + assert.equal(comp.right.n, 2); + assert.deepEqual(comp.right, comp.parent); + assert.equal(comp.details, undefined); + + comp = await doc1.compareDoc(docId2, { detail: true }); + assert.deepEqual(comp.details!.rightChanges, + { tableRenames: [], tableDeltas: {} }); + const addA1: ActionSummary = { + tableRenames: [], + tableDeltas: { Table1: { + updateRows: [], + removeRows: [], + addRows: [ 2 ], + columnDeltas: { + A: { [2]: [null, [1]] }, + manualSort: { [2]: [null, [2]] }, + }, + columnRenames: [], + } } + }; + assert.deepEqual(comp.details!.leftChanges, addA1); + + await doc2.addRows('Table1', {A: [1]}); + comp = await doc1.compareDoc(docId2); + assert.equal(comp.summary, 'both'); + assert.equal(comp.left.n, 3); + assert.equal(comp.right.n, 3); + assert.equal(comp.parent!.n, 2); + assert.equal(comp.details, undefined); + + comp = await doc1.compareDoc(docId2, { detail: true }); + assert.deepEqual(comp.details!.leftChanges, addA1); + assert.deepEqual(comp.details!.rightChanges, addA1); + + await doc1.replace({sourceDocId: docId2}); + + comp = await doc1.compareDoc(docId2); + assert.equal(comp.summary, 'same'); + assert.equal(comp.left.n, 3); + assert.deepEqual(comp.left, comp.right); + assert.deepEqual(comp.left, comp.parent); + assert.equal(comp.details, undefined); + + comp = await doc1.compareDoc(docId2, { detail: true }); + assert.deepEqual(comp.details, { + leftChanges: { tableRenames: [], tableDeltas: {} }, + rightChanges: { tableRenames: [], tableDeltas: {} } + }); + + await doc2.addRows('Table1', {A: [2]}); + comp = await doc1.compareDoc(docId2); + assert.equal(comp.summary, 'right'); + assert.equal(comp.left.n, 3); + assert.equal(comp.right.n, 4); + assert.deepEqual(comp.left, comp.parent); + assert.equal(comp.details, undefined); + + comp = await doc1.compareDoc(docId2, { detail: true }); + assert.deepEqual(comp.details!.leftChanges, + { tableRenames: [], tableDeltas: {} }); + const addA2: ActionSummary = { + tableRenames: [], + tableDeltas: { Table1: { + updateRows: [], + removeRows: [], + addRows: [ 3 ], + columnDeltas: { + A: { [3]: [null, [2]] }, + manualSort: { [3]: [null, [3]] }, + }, + columnRenames: [], + } } + }; + assert.deepEqual(comp.details!.rightChanges, addA2); + }); + + it("GET /docs/{did}/compare tracks changes within a doc", async function() { + // Create a test document. + const ws1 = (await userApi.getOrgWorkspaces('current'))[0].id; + const docId = await userApi.newDoc({name: 'testdoc'}, ws1); + const doc = userApi.getDocAPI(docId); + + // Give the document some history. + await doc.addRows('Table1', {A: ['a1'], B: ['b1']}); + await doc.addRows('Table1', {A: ['a2'], B: ['b2']}); + await doc.updateRows('Table1', {id: [1], A: ['A1']}); + + // Examine the most recent change, from HEAD~ to HEAD. + let comp = await doc.compareVersion('HEAD~', 'HEAD'); + assert.hasAllKeys(comp, ['left', 'right', 'parent', 'summary', 'details']); + assert.equal(comp.summary, 'right'); + assert.deepEqual(comp.parent, comp.left); + assert.notDeepEqual(comp.parent, comp.right); + assert.hasAllKeys(comp.left, ['n', 'h']); + assert.hasAllKeys(comp.right, ['n', 'h']); + assert.equal(comp.left.n, 3); + assert.equal(comp.right.n, 4); + assert.deepEqual(comp.details!.leftChanges, { tableRenames: [], tableDeltas: {} }); + assert.deepEqual(comp.details!.rightChanges, { + tableRenames: [], + tableDeltas: { + Table1: { + updateRows: [1], + removeRows: [], + addRows: [], + columnDeltas: { + A: { [1]: [['a1'], ['A1']] } + }, + columnRenames: [], + } + } + }); + + // Check we get the same result with actual hashes. + assert.notMatch(comp.left.h, /HEAD/); + assert.notMatch(comp.right.h, /HEAD/); + const comp2 = await doc.compareVersion(comp.left.h, comp.right.h); + assert.deepEqual(comp, comp2); + + // Check that comparing the HEAD with itself shows no changes. + comp = await doc.compareVersion('HEAD', 'HEAD'); + assert.equal(comp.summary, 'same'); + assert.deepEqual(comp.parent, comp.left); + assert.deepEqual(comp.parent, comp.right); + assert.deepEqual(comp.details!.leftChanges, { tableRenames: [], tableDeltas: {} }); + assert.deepEqual(comp.details!.rightChanges, { tableRenames: [], tableDeltas: {} }); + + // Examine the combination of the last two changes. + comp = await doc.compareVersion('HEAD~~', 'HEAD'); + assert.hasAllKeys(comp, ['left', 'right', 'parent', 'summary', 'details']); + assert.equal(comp.summary, 'right'); + assert.deepEqual(comp.parent, comp.left); + assert.notDeepEqual(comp.parent, comp.right); + assert.hasAllKeys(comp.left, ['n', 'h']); + assert.hasAllKeys(comp.right, ['n', 'h']); + assert.equal(comp.left.n, 2); + assert.equal(comp.right.n, 4); + assert.deepEqual(comp.details!.leftChanges, { tableRenames: [], tableDeltas: {} }); + assert.deepEqual(comp.details!.rightChanges, { + tableRenames: [], + tableDeltas: { + Table1: { + updateRows: [1], + removeRows: [], + addRows: [2], + columnDeltas: { + A: { [1]: [['a1'], ['A1']], + [2]: [null, ['a2']] }, + B: { [2]: [null, ['b2']] }, + manualSort: { [2]: [null, [2]] }, + }, + columnRenames: [], + } + } + }); + }); + + it('doc worker endpoints ignore any /dw/.../ prefix', async function() { + const docWorkerUrl = docs.serverUrl; + let resp = await axios.get(`${docWorkerUrl}/api/docs/${docIds.Timesheets}/tables/Table1/data`, chimpy); + assert.equal(resp.status, 200); + assert.containsAllKeys(resp.data, ['A', 'B', 'C']); + + resp = await axios.get(`${docWorkerUrl}/dw/zing/api/docs/${docIds.Timesheets}/tables/Table1/data`, chimpy); + assert.equal(resp.status, 200); + assert.containsAllKeys(resp.data, ['A', 'B', 'C']); + + if (docWorkerUrl !== homeUrl) { + resp = await axios.get(`${homeUrl}/api/docs/${docIds.Timesheets}/tables/Table1/data`, chimpy); + assert.equal(resp.status, 200); + assert.containsAllKeys(resp.data, ['A', 'B', 'C']); + + resp = await axios.get(`${homeUrl}/dw/zing/api/docs/${docIds.Timesheets}/tables/Table1/data`, chimpy); + assert.equal(resp.status, 404); + } + }); + + it("POST /docs/{did}/tables/{tid}/_subscribe validates inputs", async function () { + async function check(requestBody: any, status: number, error: string) { + const resp = await axios.post( + `${serverUrl}/api/docs/${docIds.Timesheets}/tables/Table1/_subscribe`, + requestBody, chimpy + ); + assert.equal(resp.status, status); + assert.deepEqual(resp.data, {error}); + } + + await check({}, 400, "eventTypes must be a non-empty array"); + await check({eventTypes: 0}, 400, "eventTypes must be a non-empty array"); + await check({eventTypes: []}, 400, "eventTypes must be a non-empty array"); + await check({eventTypes: ["foo"]}, 400, "Allowed values in eventTypes are: add,update"); + await check({eventTypes: ["add"]}, 400, "Bad request: url required"); + await check({eventTypes: ["add"], url: "https://evil.com"}, 403, "Provided url is forbidden"); + await check({eventTypes: ["add"], url: "http://example.com"}, 403, "Provided url is forbidden"); // not https + await check({eventTypes: ["add"], url: "https://example.com", isReadyColumn: "bar"}, 404, `Column not found "bar"`); + }); + + it("POST /docs/{did}/tables/{tid}/_unsubscribe validates inputs", async function() { + const subscribeResponse = await axios.post( + `${serverUrl}/api/docs/${docIds.Timesheets}/tables/Table1/_subscribe`, + {eventTypes: ["add"], url: "https://example.com"}, chimpy + ); + assert.equal(subscribeResponse.status, 200); + const {triggerId, unsubscribeKey, webhookId} = subscribeResponse.data; + + async function check(requestBody: any, status: number, responseBody: any) { + const resp = await axios.post( + `${serverUrl}/api/docs/${docIds.Timesheets}/tables/Table1/_unsubscribe`, + requestBody, chimpy + ); + assert.equal(resp.status, status); + if (status !== 200) { + responseBody = {error: responseBody}; + } + assert.deepEqual(resp.data, responseBody); + } + + await check({triggerId: 999}, 404, `Trigger not found "999"`); + await check({triggerId, webhookId: "foo"}, 404, `Webhook not found "foo"`); + await check({triggerId, webhookId}, 400, 'Bad request: id and unsubscribeKey both required'); + await check({triggerId, webhookId, unsubscribeKey: "foo"}, 401, 'Wrong unsubscribeKey'); + + // Actually unsubscribe + await check({triggerId, webhookId, unsubscribeKey}, 200, {success: true}); + + // Trigger is now deleted! + await check({triggerId, webhookId, unsubscribeKey}, 404, `Trigger not found "${triggerId}"`); + }); + + describe("Daily API Limit", () => { + let redisClient: RedisClient; + let workspaceId: number; + let freeTeamApi: UserAPIImpl; + + before(async function() { + if (!process.env.TEST_REDIS_URL) { this.skip(); } + redisClient = createClient(process.env.TEST_REDIS_URL); + freeTeamApi = makeUserApi('freeteam'); + workspaceId = await getWorkspaceId(freeTeamApi, 'FreeTeamWs'); + }); + + it("limits daily API usage", async function() { + // Make a new document in a free team site, currently the only product which limits daily API usage. + const docId = await freeTeamApi.newDoc({name: 'TestDoc'}, workspaceId); + const key = docDailyApiUsageKey(docId); + const limit = teamFreeFeatures.baseMaxApiUnitsPerDocumentPerDay!; + // Rather than making 5000 requests, set a high count directly in redis. + await redisClient.setAsync(key, String(limit - 2)); + + // Make three requests. The first two should succeed since we set the count to `limit - 2`. + // Wait a little after each request to allow time for the local cache to be updated with the redis count. + let response = await axios.get(`${serverUrl}/api/docs/${docId}/tables/Table1/records`, chimpy); + assert.equal(response.status, 200); + await delay(100); + + response = await axios.get(`${serverUrl}/api/docs/${docId}/tables/Table1/records`, chimpy); + assert.equal(response.status, 200); + await delay(100); + + // The count should now have reached the limit, and the key should expire in one day. + assert.equal(await redisClient.ttlAsync(key), 86400); + assert.equal(await redisClient.getAsync(key), String(limit)); + + // Making the same request a third time should fail. + response = await axios.get(`${serverUrl}/api/docs/${docId}/tables/Table1/records`, chimpy); + assert.equal(response.status, 429); + assert.deepEqual(response.data, {error: `Exceeded daily limit for document ${docId}`}); + }); + + after(async () => { + await redisClient.quitAsync(); + }); + }); + + describe("Webhooks", () => { + let serving: Serving; // manages the test webhook server + + let requests: WebhookRequests; + + let receivedLastEvent: Promise; + + // Requests corresponding to adding 200 rows, sent in two batches of 100 + const expected200AddEvents = [ + _.range(100).map(i => ({ + id: 9 + i, manualSort: 9 + i, A3: 200 + i, B3: true, + })), + _.range(100).map(i => ({ + id: 109 + i, manualSort: 109 + i, A3: 300 + i, B3: true, + })), + ]; + + // Every event is sent to three webhook URLs which differ by the subscribed eventTypes + // Each request is an array of one or more events. + // Multiple events caused by the same action bundle get batched into a single request. + const expectedRequests: WebhookRequests = { + "add": [ + [{id: 1, A: 1, B: true, C: null, manualSort: 1}], + [{id: 2, A: 4, B: true, C: null, manualSort: 2}], + + // After isReady (B) went to false and then true again + // we treat this as creation even though it's really an update + [{id: 2, A: 7, B: true, C: null, manualSort: 2}], + + // From the big applies + [{id: 3, A3: 13, B3: true, manualSort: 3}, + {id: 5, A3: 15, B3: true, manualSort: 5}], + [{id: 7, A3: 18, B3: true, manualSort: 7}], + + ...expected200AddEvents, + ], + "update": [ + [{id: 2, A: 8, B: true, C: null, manualSort: 2}], + + // From the big applies + [{id: 1, A3: 101, B3: true, manualSort: 1}], + ], + "add,update": [ + // add + [{id: 1, A: 1, B: true, C: null, manualSort: 1}], + [{id: 2, A: 4, B: true, C: null, manualSort: 2}], + [{id: 2, A: 7, B: true, C: null, manualSort: 2}], + + // update + [{id: 2, A: 8, B: true, C: null, manualSort: 2}], + + // from the big applies + [{id: 1, A3: 101, B3: true, manualSort: 1}, // update + {id: 3, A3: 13, B3: true, manualSort: 3}, // add + {id: 5, A3: 15, B3: true, manualSort: 5}], // add + + [{id: 7, A3: 18, B3: true, manualSort: 7}], // add + + ...expected200AddEvents, + ] + }; + + let redisMonitor: any; + let redisCalls: any[]; + + before(async function() { + if (!process.env.TEST_REDIS_URL) { this.skip(); } + requests = { + "add,update": [], + "add": [], + "update": [], + }; + + let resolveReceivedLastEvent: () => void; + receivedLastEvent = new Promise(r => { + resolveReceivedLastEvent = r; + }); + + // TODO test retries on failure and slowness in a new test + serving = await serveSomething(app => { + app.use(bodyParser.json()); + app.post('/:eventTypes', async ({body, params: {eventTypes}}, res) => { + requests[eventTypes as keyof WebhookRequests].push(body); + res.sendStatus(200); + if ( + _.flattenDeep(_.values(requests)).length >= + _.flattenDeep(_.values(expectedRequests)).length + ) { + resolveReceivedLastEvent(); + } + }); + }, webhooksTestPort); + + redisCalls = []; + redisMonitor = createClient(process.env.TEST_REDIS_URL); + redisMonitor.monitor(); + redisMonitor.on("monitor", (_time: any, args: any, _rawReply: any) => { + redisCalls.push(args); + }); + }); + + after(async function() { + if (!process.env.TEST_REDIS_URL) { this.skip(); } + serving.shutdown(); + await redisMonitor.quitAsync(); + }); + + it("delivers expected payloads from combinations of changes, with retrying and batching", async function() { + // Create a test document. + const ws1 = (await userApi.getOrgWorkspaces('current'))[0].id; + const docId = await userApi.newDoc({name: 'testdoc'}, ws1); + const doc = userApi.getDocAPI(docId); + + // For some reason B is turned into Numeric even when given bools + await axios.post(`${serverUrl}/api/docs/${docId}/apply`, [ + ['ModifyColumn', 'Table1', 'B', {type: 'Bool'}], + ], chimpy); + + // Make a webhook for every combination of event types + const subscribeResponses = []; + const webhookIds: Record = {}; + for (const eventTypes of [ + ["add"], + ["update"], + ["add", "update"], + ]) { + const {data, status} = await axios.post( + `${serverUrl}/api/docs/${docId}/tables/Table1/_subscribe`, + {eventTypes, url: `${serving.url}/${eventTypes}`, isReadyColumn: "B"}, chimpy + ); + assert.equal(status, 200); + subscribeResponses.push(data); + webhookIds[data.webhookId] = String(eventTypes); + } + + // Add and update some rows, trigger some events + // Values of A where B is true and thus the record is ready are [1, 4, 7, 8] + // So those are the values seen in expectedEvents + await doc.addRows("Table1", { + A: [1, 2], + B: [true, false], // 1 is ready, 2 is not ready yet + }); + await doc.updateRows("Table1", {id: [2], A: [3]}); // still not ready + await doc.updateRows("Table1", {id: [2], A: [4], B: [true]}); // ready! + await doc.updateRows("Table1", {id: [2], A: [5], B: [false]}); // not ready again + await doc.updateRows("Table1", {id: [2], A: [6]}); // still not ready + await doc.updateRows("Table1", {id: [2], A: [7], B: [true]}); // ready! + await doc.updateRows("Table1", {id: [2], A: [8]}); // still ready! + + // The end result here is additions for column A (now A3) with values [13, 15, 18] + // and an update for 101 + await axios.post(`${serverUrl}/api/docs/${docId}/apply`, [ + ['BulkAddRecord', 'Table1', [3, 4, 5, 6], {A: [9, 10, 11, 12], B: [true, true, false, false]}], + ['BulkUpdateRecord', 'Table1', [1, 2, 3, 4, 5, 6], { + A: [101, 102, 13, 14, 15, 16], + B: [true, false, true, false, true, false], + }], + + ['RenameColumn', 'Table1', 'A', 'A3'], + ['RenameColumn', 'Table1', 'B', 'B3'], + + ['RenameTable', 'Table1', 'Table12'], + + // FIXME a double rename A->A2->A3 doesn't seem to get summarised correctly + // ['RenameColumn', 'Table12', 'A2', 'A3'], + // ['RenameColumn', 'Table12', 'B2', 'B3'], + + ['RemoveColumn', 'Table12', 'C'], + ], chimpy); + + // FIXME record changes after a RenameTable in the same bundle + // don't appear in the action summary + await axios.post(`${serverUrl}/api/docs/${docId}/apply`, [ + ['AddRecord', 'Table12', 7, {A3: 17, B3: false}], + ['UpdateRecord', 'Table12', 7, {A3: 18, B3: true}], + + ['AddRecord', 'Table12', 8, {A3: 19, B3: true}], + ['UpdateRecord', 'Table12', 8, {A3: 20, B3: false}], + + ['AddRecord', 'Table12', 9, {A3: 20, B3: true}], + ['RemoveRecord', 'Table12', 9], + ], chimpy); + + // Add 200 rows. These become the `expected200AddEvents` + await doc.addRows("Table12", { + A3: _.range(200, 400), + B3: arrayRepeat(200, true), + }); + + await receivedLastEvent; + + // Unsubscribe + await Promise.all(subscribeResponses.map(async subscribeResponse => { + const unsubscribeResponse = await axios.post( + `${serverUrl}/api/docs/${docId}/tables/Table12/_unsubscribe`, + subscribeResponse, chimpy + ); + assert.equal(unsubscribeResponse.status, 200); + assert.deepEqual(unsubscribeResponse.data, {success: true}); + })); + + // Further changes should generate no events because the triggers are gone + await doc.addRows("Table12", { + A3: [88, 99], + B3: [true, false], + }); + + assert.deepEqual(requests, expectedRequests); + + // Check that the events were all pushed to the redis queue + const queueRedisCalls = redisCalls.filter(args => args[1] === "webhook-queue-" + docId); + const redisPushes = _.chain(queueRedisCalls) + .filter(args => args[0] === "rpush") // Array<["rpush", key, ...events: string[]]> + .flatMap(args => args.slice(2)) // events: string[] + .map(JSON.parse) // events: WebhookEvent[] + .groupBy('id') // {[webHookId: string]: WebhookEvent[]} + .mapKeys((_value, key) => webhookIds[key]) // {[eventTypes: 'add'|'update'|'add,update']: WebhookEvent[]} + .mapValues(group => _.map(group, 'payload')) // {[eventTypes: 'add'|'update'|'add,update']: RowRecord[]} + .value(); + const expectedPushes = _.mapValues(expectedRequests, value => _.flatten(value)); + assert.deepEqual(redisPushes, expectedPushes); + + // Check that the events were all removed from the redis queue + const redisTrims = queueRedisCalls.filter(args => args[0] === "ltrim") + .map(([,, start, end]) => { + assert.equal(end, '-1'); + start = Number(start); + assert.isTrue(start > 0); + return start; + }); + const expectedTrims = Object.values(redisPushes).map(value => value.length); + assert.equal( + _.sum(redisTrims), + _.sum(expectedTrims), + ); + + }); + }); + + // PLEASE ADD MORE TESTS HERE +} + +interface WebhookRequests { + add: object[][]; + update: object[][]; + "add,update": object[][]; +} + +function setup(name: string, cb: () => Promise) { + let api: UserAPIImpl; + + before(async function() { + suitename = name; + dataDir = path.join(tmpDir, `${suitename}-data`); + await fse.mkdirs(dataDir); + await setupDataDir(dataDir); + await cb(); + + // create TestDoc as an empty doc into Private workspace + userApi = api = makeUserApi('docs-1'); + const wid = await getWorkspaceId(api, 'Private'); + docIds.TestDoc = await api.newDoc({name: 'TestDoc'}, wid); + }); + + after(async function() { + // remove TestDoc + await api.deleteDoc(docIds.TestDoc); + delete docIds.TestDoc; + + // stop all servers + await home.stop(); + await docs.stop(); + }); +} + +function makeUserApi(org: string) { + return new UserAPIImpl(`${home.serverUrl}/o/${org}`, { + headers: {Authorization: 'Bearer api_key_for_chimpy'}, + fetch: fetch as any, + newFormData: () => new FormData() as any, + logger: log + }); +} + +async function getWorkspaceId(api: UserAPIImpl, name: string) { + const workspaces = await api.getOrgWorkspaces('current'); + return workspaces.find((w) => w.name === name)!.id; +} + +async function startServer(serverTypes: string, _homeUrl?: string): Promise { + const server = new TestServer(serverTypes); + await server.start(_homeUrl); + return server; +} + +const webhooksTestPort = 34365; + +class TestServer { + public testingSocket: string; + public testingHooks: TestingHooksClient; + public serverUrl: string; + public stopped = false; + + private _server: ChildProcess; + private _exitPromise: Promise; + + constructor(private _serverTypes: string) {} + + public async start(_homeUrl?: string) { + + // put node logs into files with meaningful name that relate to the suite name and server type + const fixedName = this._serverTypes.replace(/,/, '_'); + const nodeLogPath = path.join(tmpDir, `${suitename}-${fixedName}-node.log`); + const nodeLogFd = await fse.open(nodeLogPath, 'a'); + const serverLog = process.env.VERBOSE ? 'inherit' : nodeLogFd; + + // use a path for socket that relates to suite name and server types + this.testingSocket = path.join(tmpDir, `${suitename}-${fixedName}.socket`); + + // env + const env = { + GRIST_DATA_DIR: dataDir, + GRIST_INST_DIR: tmpDir, + GRIST_SERVERS: this._serverTypes, + // with port '0' no need to hard code a port number (we can use testing hooks to find out what + // port server is listening on). + GRIST_PORT: '0', + GRIST_TESTING_SOCKET: this.testingSocket, + GRIST_DISABLE_S3: 'true', + REDIS_URL: process.env.TEST_REDIS_URL, + APP_HOME_URL: _homeUrl, + ALLOWED_WEBHOOK_DOMAINS: `example.com,localhost:${webhooksTestPort}`, + ...process.env + }; + + const main = await testUtils.getBuildFile('app/server/mergedServerMain.js'); + this._server = spawn('node', [main, '--testingHooks'], { + env, + stdio: ['inherit', serverLog, serverLog] + }); + + this._exitPromise = exitPromise(this._server); + + // Try to be more helpful when server exits by printing out the tail of its log. + this._exitPromise.then((code) => { + if (this._server.killed) { return; } + log.error("Server died unexpectedly, with code", code); + const output = execFileSync('tail', ['-30', nodeLogPath]); + log.info(`\n===== BEGIN SERVER OUTPUT ====\n${output}\n===== END SERVER OUTPUT =====`); + }) + .catch(() => undefined); + + await this._waitServerReady(30000); + log.info(`server ${this._serverTypes} up and listening on ${this.serverUrl}`); + } + + public async stop() { + if (this.stopped) { return; } + log.info("Stopping node server: " + this._serverTypes); + this.stopped = true; + this._server.kill(); + this.testingHooks.close(); + await this._exitPromise; + } + + public async isServerReady(): Promise { + // Let's wait for the testingSocket to be created, then get the port the server is listening on, + // and then do an api check. This approach allow us to start server with GRIST_PORT set to '0', + // which will listen on first available port, removing the need to hard code a port number. + try { + + // wait for testing socket + while (!(await fse.pathExists(this.testingSocket))) { + await delay(200); + } + + // create testing hooks and get own port + this.testingHooks = await connectTestingHooks(this.testingSocket); + const port: number = await this.testingHooks.getOwnPort(); + this.serverUrl = `http://localhost:${port}`; + + // wait for check + return (await fetch(`${this.serverUrl}/status/hooks`, {timeout: 1000})).ok; + } catch (err) { + return false; + } + } + + + private async _waitServerReady(ms: number) { + // It's important to clear the timeout, because it can prevent node from exiting otherwise, + // which is annoying when running only this test for debugging. + let timeout: any; + const maxDelay = new Promise((resolve) => { + timeout = setTimeout(resolve, 30000); + }); + try { + await Promise.race([ + this.isServerReady(), + this._exitPromise.then(() => { throw new Error("Server exited while waiting for it"); }), + maxDelay, + ]); + } finally { + clearTimeout(timeout); + } + } +} + +async function setupDataDir(dir: string) { + // we'll be serving Hello.grist content for various document ids, so let's make copies of it in + // tmpDir + await testUtils.copyFixtureDoc('Hello.grist', path.resolve(dir, docIds.Timesheets + '.grist')); + await testUtils.copyFixtureDoc('Hello.grist', path.resolve(dir, docIds.Bananas + '.grist')); + await testUtils.copyFixtureDoc('Hello.grist', path.resolve(dir, docIds.Antartic + '.grist')); + + await testUtils.copyFixtureDoc( + 'ApiDataRecordsTest.grist', + path.resolve(dir, docIds.ApiDataRecordsTest + '.grist')); +} diff --git a/test/server/testUtils.ts b/test/server/testUtils.ts index 6cd7b85e..2d55c996 100644 --- a/test/server/testUtils.ts +++ b/test/server/testUtils.ts @@ -169,7 +169,7 @@ export function assertMatchArray(stringArray: string[], regexArray: RegExp[]) { * @param {String} errCode - Error code to check against `err.code` from the caller. * @param {RegExp} errRegexp - Regular expression to check against `err.message` from the caller. */ -export function expectRejection(promise: Promise, errCode: number, errRegexp: RegExp) { +export function expectRejection(promise: Promise, errCode: number|string, errRegexp: RegExp) { return promise .then(function() { assert(false, "Expected promise to return an error: " + errCode); @@ -307,4 +307,11 @@ export class EnvironmentSnapshot { } } +export async function getBuildFile(relativePath: string): Promise { + if (await fse.pathExists(path.join('_build', relativePath))) { + return path.join('_build', relativePath); + } + return path.join('_build', 'core', relativePath); +} + export { assert }; diff --git a/test/tsconfig.json b/test/tsconfig.json index cfec0a3a..ac7c07c9 100644 --- a/test/tsconfig.json +++ b/test/tsconfig.json @@ -9,6 +9,6 @@ ], "references": [ { "path": "../app" }, - { "path": "../stubs/app" }, + { "path": "../stubs/app" } ] } diff --git a/yarn.lock b/yarn.lock index 2c733fe3..fb576dd5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -741,6 +741,11 @@ anymatch@~3.1.1: normalize-path "^3.0.0" picomatch "^2.0.4" +app-module-path@2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/app-module-path/-/app-module-path-2.2.0.tgz#641aa55dfb7d6a6f0a8141c4b9c0aa50b6c24dd5" + integrity sha1-ZBqlXft9am8KgUHEucCqULbCTdU= + app-root-path@^2.0.1: version "2.2.1" resolved "https://registry.yarnpkg.com/app-root-path/-/app-root-path-2.2.1.tgz#d0df4a682ee408273583d43f6f79e9892624bc9a"