Summary: The new "copyDoc" query parameter on the login page sets a short-lived cookie, which is then read when welcoming a new user to copy that document to their Home workspace, and redirect to it. Currently, only templates and bare forks set this parameter. A new API endpoint for copying a document to a workspace was also added. Test Plan: Browser tests. Reviewers: paulfitz Reviewed By: paulfitz Differential Revision: https://phab.getgrist.com/D3992alex/skip-fstrings-3.9
parent
90fb4434cc
commit
3dadf93c98
@ -0,0 +1,158 @@
|
|||||||
|
import {ApiError} from 'app/common/ApiError';
|
||||||
|
import {parseSubdomainStrictly} from 'app/common/gristUrls';
|
||||||
|
import {removeTrailingSlash} from 'app/common/gutil';
|
||||||
|
import {DocStatus, IDocWorkerMap} from 'app/server/lib/DocWorkerMap';
|
||||||
|
import log from 'app/server/lib/log';
|
||||||
|
import {adaptServerUrl} from 'app/server/lib/requestUtils';
|
||||||
|
import * as express from 'express';
|
||||||
|
import fetch, {Response as FetchResponse, RequestInit} from 'node-fetch';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method transforms a doc worker's public url as needed based on the request.
|
||||||
|
*
|
||||||
|
* For historic reasons, doc workers are assigned a public url at the time
|
||||||
|
* of creation. In production/staging, this is of the form:
|
||||||
|
* https://doc-worker-NNN-NNN-NNN-NNN.getgrist.com/v/VVVV/
|
||||||
|
* and in dev:
|
||||||
|
* http://localhost:NNNN/v/VVVV/
|
||||||
|
*
|
||||||
|
* Prior to support for different base domains, this was fine. Now that different
|
||||||
|
* base domains are supported, a wrinkle arises. When a web client communicates
|
||||||
|
* with a doc worker, it is important that it accesses the doc worker via a url
|
||||||
|
* containing the same base domain as the web page the client is on (for cookie
|
||||||
|
* purposes). Hence this method.
|
||||||
|
*
|
||||||
|
* If both the request and docWorkerUrl contain identifiable base domains (not localhost),
|
||||||
|
* then the base domain of docWorkerUrl is replaced with that of the request.
|
||||||
|
*
|
||||||
|
* But wait, there's another wrinkle: custom domains. In this case, we have a single
|
||||||
|
* domain available to serve a particular org from. This method will use the origin of req
|
||||||
|
* and include a /dw/doc-worker-NNN-NNN-NNN-NNN/
|
||||||
|
* (or /dw/local-NNNN/) prefix in all doc worker paths. Once this is in place, it
|
||||||
|
* will allow doc worker routing to be changed so it can be overlaid on a custom
|
||||||
|
* domain.
|
||||||
|
*
|
||||||
|
* TODO: doc worker registration could be redesigned to remove the assumption
|
||||||
|
* of a fixed base domain.
|
||||||
|
*/
|
||||||
|
export function customizeDocWorkerUrl(
|
||||||
|
docWorkerUrlSeed: string|undefined,
|
||||||
|
req: express.Request
|
||||||
|
): string|null {
|
||||||
|
if (!docWorkerUrlSeed) {
|
||||||
|
// When no doc worker seed, we're in single server mode.
|
||||||
|
// Return null, to signify that the URL prefix serving the
|
||||||
|
// current endpoint is the only one available.
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const docWorkerUrl = new URL(docWorkerUrlSeed);
|
||||||
|
const workerSubdomain = parseSubdomainStrictly(docWorkerUrl.hostname).org;
|
||||||
|
adaptServerUrl(docWorkerUrl, req);
|
||||||
|
|
||||||
|
// We wish to migrate to routing doc workers by path, so insert a doc worker identifier
|
||||||
|
// in the path (if not already present).
|
||||||
|
if (!docWorkerUrl.pathname.startsWith('/dw/')) {
|
||||||
|
// When doc worker is localhost, the port number is necessary and sufficient for routing.
|
||||||
|
// Let's add a /dw/... prefix just for consistency.
|
||||||
|
const workerIdent = workerSubdomain || `local-${docWorkerUrl.port}`;
|
||||||
|
docWorkerUrl.pathname = `/dw/${workerIdent}${docWorkerUrl.pathname}`;
|
||||||
|
}
|
||||||
|
return docWorkerUrl.href;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* Gets the worker responsible for a given assignment, and fetches a url
|
||||||
|
* from the worker.
|
||||||
|
*
|
||||||
|
* If the fetch fails, we throw an exception, unless we see enough evidence
|
||||||
|
* to unassign the worker and try again.
|
||||||
|
*
|
||||||
|
* - If GRIST_MANAGED_WORKERS is set, we assume that we've arranged
|
||||||
|
* for unhealthy workers to be removed automatically, and that if a
|
||||||
|
* fetch returns a 404 with specific content, it is proof that the
|
||||||
|
* worker is no longer in existence. So if we see a 404 with that
|
||||||
|
* specific content, we can safely de-list the worker from redis,
|
||||||
|
* and repeat.
|
||||||
|
* - If GRIST_MANAGED_WORKERS is not set, we accept a broader set
|
||||||
|
* of failures as evidence of a missing worker.
|
||||||
|
*
|
||||||
|
* The specific content of a 404 that will be treated as evidence of
|
||||||
|
* a doc worker not being present is:
|
||||||
|
* - A json format body
|
||||||
|
* - With a key called "message"
|
||||||
|
* - With the value of "message" being "document worker not present"
|
||||||
|
* In production, this is provided by a special doc-worker-* load balancer
|
||||||
|
* rule.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
export async function getWorker(
|
||||||
|
docWorkerMap: IDocWorkerMap,
|
||||||
|
assignmentId: string,
|
||||||
|
urlPath: string,
|
||||||
|
config: RequestInit = {}
|
||||||
|
) {
|
||||||
|
if (!useWorkerPool()) {
|
||||||
|
// This should never happen. We are careful to not use getWorker
|
||||||
|
// when everything is on a single server, since it is burdensome
|
||||||
|
// for self-hosted users to figure out the correct settings for
|
||||||
|
// the server to be able to contact itself, and there are cases
|
||||||
|
// of the defaults not working.
|
||||||
|
throw new Error("AppEndpoint.getWorker was called unnecessarily");
|
||||||
|
}
|
||||||
|
let docStatus: DocStatus|undefined;
|
||||||
|
const workersAreManaged = Boolean(process.env.GRIST_MANAGED_WORKERS);
|
||||||
|
for (;;) {
|
||||||
|
docStatus = await docWorkerMap.assignDocWorker(assignmentId);
|
||||||
|
const configWithTimeout = {timeout: 10000, ...config};
|
||||||
|
const fullUrl = removeTrailingSlash(docStatus.docWorker.internalUrl) + urlPath;
|
||||||
|
try {
|
||||||
|
const resp: FetchResponse = await fetch(fullUrl, configWithTimeout);
|
||||||
|
if (resp.ok) {
|
||||||
|
return {
|
||||||
|
resp,
|
||||||
|
docStatus,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (resp.status === 403) {
|
||||||
|
throw new ApiError("You do not have access to this document.", resp.status);
|
||||||
|
}
|
||||||
|
if (resp.status !== 404) {
|
||||||
|
throw new ApiError(resp.statusText, resp.status);
|
||||||
|
}
|
||||||
|
let body: any;
|
||||||
|
try {
|
||||||
|
body = await resp.json();
|
||||||
|
} catch (e) {
|
||||||
|
throw new ApiError(resp.statusText, resp.status);
|
||||||
|
}
|
||||||
|
if (!(body && body.message && body.message === 'document worker not present')) {
|
||||||
|
throw new ApiError(resp.statusText, resp.status);
|
||||||
|
}
|
||||||
|
// This is a 404 with the expected content for a missing worker.
|
||||||
|
} catch (e) {
|
||||||
|
log.rawDebug(`AppEndpoint.getWorker failure`, {
|
||||||
|
url: fullUrl,
|
||||||
|
docId: assignmentId,
|
||||||
|
status: e.status,
|
||||||
|
message: String(e),
|
||||||
|
workerId: docStatus.docWorker.id,
|
||||||
|
});
|
||||||
|
// If workers are managed, no errors merit continuing except a 404.
|
||||||
|
// Otherwise, we continue if we see a system error (e.g. ECONNREFUSED).
|
||||||
|
// We don't accept timeouts since there is too much potential to
|
||||||
|
// bring down a single-worker deployment that has a hiccup.
|
||||||
|
if (workersAreManaged || !(e.type === 'system')) {
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.warn(`fetch from ${fullUrl} failed convincingly, removing that worker`);
|
||||||
|
await docWorkerMap.removeWorker(docStatus.docWorker.id);
|
||||||
|
docStatus = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return true if document related endpoints are served by separate workers.
|
||||||
|
export function useWorkerPool() {
|
||||||
|
return process.env.GRIST_SINGLE_PORT !== 'true';
|
||||||
|
}
|
@ -0,0 +1,44 @@
|
|||||||
|
import {assert, driver} from 'mocha-webdriver';
|
||||||
|
import * as gu from 'test/nbrowser/gristUtils';
|
||||||
|
import {setupTestSuite} from 'test/nbrowser/testUtils';
|
||||||
|
import {DocCreationInfo} from "app/common/DocListAPI";
|
||||||
|
|
||||||
|
describe('GridView', function() {
|
||||||
|
this.timeout(20000);
|
||||||
|
const cleanup = setupTestSuite();
|
||||||
|
let session: gu.Session, doc: DocCreationInfo, api;
|
||||||
|
|
||||||
|
it('should show tables with no columns without errors', async function() {
|
||||||
|
session = await gu.session().login();
|
||||||
|
doc = await session.tempDoc(cleanup, 'Hello.grist');
|
||||||
|
api = session.createHomeApi();
|
||||||
|
|
||||||
|
// Create and open a new table with no columns
|
||||||
|
await api.applyUserActions(doc.id, [
|
||||||
|
['AddTable', 'Empty', []],
|
||||||
|
]);
|
||||||
|
await gu.getPageItem(/Empty/).click();
|
||||||
|
|
||||||
|
// The only 'column' should be the button to add a column
|
||||||
|
const columnNames = await driver.findAll('.column_name', e => e.getText());
|
||||||
|
assert.deepEqual(columnNames, ['+']);
|
||||||
|
|
||||||
|
// There should be no errors
|
||||||
|
assert.lengthOf(await driver.findAll('.test-notifier-toast-wrapper'), 0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// When a grid is scrolled, and then data is changed (due to click in a linked section), some
|
||||||
|
// records are not rendered or the position of the scroll container is corrupted.
|
||||||
|
it('should render list with wrapped choices correctly', async function() {
|
||||||
|
await session.tempDoc(cleanup, 'Teams.grist');
|
||||||
|
await gu.selectSectionByTitle("PROJECTS");
|
||||||
|
await gu.getCell(0, 1).click();
|
||||||
|
await gu.selectSectionByTitle("TODO");
|
||||||
|
await gu.scrollActiveView(0, 300);
|
||||||
|
await gu.selectSectionByTitle("PROJECTS");
|
||||||
|
await gu.getCell(0, 2).click();
|
||||||
|
await gu.selectSectionByTitle("TODO");
|
||||||
|
// This throws an error, as the cell is not rendered.
|
||||||
|
assert.equal(await gu.getCell(0, 2).getText(), "2021-09-27 Mo\n2021-10-04 Mo");
|
||||||
|
});
|
||||||
|
});
|
@ -0,0 +1,212 @@
|
|||||||
|
/**
|
||||||
|
* Parsing strings as references when importing into an existing table
|
||||||
|
*/
|
||||||
|
import {assert, driver, Key, WebElement} from 'mocha-webdriver';
|
||||||
|
import * as gu from 'test/nbrowser/gristUtils';
|
||||||
|
import {openSource as openSourceMenu, waitForColumnMapping} from 'test/nbrowser/importerTestUtils';
|
||||||
|
import {setupTestSuite} from 'test/nbrowser/testUtils';
|
||||||
|
|
||||||
|
describe('ImportReferences', function() {
|
||||||
|
this.timeout(30000);
|
||||||
|
const cleanup = setupTestSuite();
|
||||||
|
|
||||||
|
before(async function() {
|
||||||
|
// Log in and import a sample document.
|
||||||
|
const session = await gu.session().teamSite.user('user1').login();
|
||||||
|
await session.tempDoc(cleanup, 'ImportReferences.grist');
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => gu.checkForErrors());
|
||||||
|
|
||||||
|
it('should convert strings to references', async function() {
|
||||||
|
// Import a CSV file containing strings representing references
|
||||||
|
await gu.importFileDialog('./uploads/name_references.csv');
|
||||||
|
assert.equal(await driver.findWait('.test-importer-preview', 2000).isPresent(), true);
|
||||||
|
|
||||||
|
// Change the destination to the existing table
|
||||||
|
await driver.findContent('.test-importer-target-existing-table', /Table1/).click();
|
||||||
|
await gu.waitForServer();
|
||||||
|
|
||||||
|
// Finish import, and verify the import succeeded.
|
||||||
|
await driver.find('.test-modal-confirm').click();
|
||||||
|
await gu.waitForServer();
|
||||||
|
|
||||||
|
// Verify data was imported to Names correctly.
|
||||||
|
assert.deepEqual(
|
||||||
|
await gu.getVisibleGridCells({rowNums: [1, 2, 3, 4, 5], cols: [0, 1, 2]}),
|
||||||
|
[
|
||||||
|
// Previously existing data in the fixture document
|
||||||
|
'Alice', '', '',
|
||||||
|
'Bob', '', '',
|
||||||
|
|
||||||
|
// Imported data from the CSV file
|
||||||
|
// The second column is references which have been successfully parsed from strings
|
||||||
|
// The third column is a formula equal to the second column to demonstrate the references
|
||||||
|
'Charlie', 'Alice', 'Table1[1]',
|
||||||
|
'Dennis', 'Bob', 'Table1[2]',
|
||||||
|
|
||||||
|
// 'add new' row
|
||||||
|
'', '', '',
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
// TODO this test relies on the imported data referring to names (Alice,Bob)
|
||||||
|
// already existing in the table before the import, and not being changed by the import
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support importing into any reference columns and show preview', async function() {
|
||||||
|
// Switch to page showing Projects and Tasks.
|
||||||
|
await gu.getPageItem('Projects').click();
|
||||||
|
await gu.waitForServer(); // wait for table load
|
||||||
|
|
||||||
|
// Load up a CSV file that matches the structure of the Tasks table.
|
||||||
|
await gu.importFileDialog('./uploads/ImportReferences-Tasks.csv');
|
||||||
|
|
||||||
|
// The default import into "New Table" just shows the content of the file.
|
||||||
|
assert.equal(await driver.findWait('.test-importer-preview', 2000).isPresent(), true);
|
||||||
|
assert.deepEqual(await gu.getPreviewContents([0, 1, 2, 3, 4, 5, 6], [1, 2, 3, 4], mapper), [
|
||||||
|
'Foo2', 'Clean', '1000', '1,000', '27 Mar 2023', '', '0',
|
||||||
|
'Bar2', 'Wash', '3000', '2,000', '', 'Projects[2]', '2',
|
||||||
|
'Baz2', 'Build2', '', '2', '20 Mar 2023', 'Projects[1]', '1',
|
||||||
|
'Zoo2', 'Clean', '2000', '4,000', '24 Apr 2023', 'Projects[3]', '3',
|
||||||
|
]);
|
||||||
|
|
||||||
|
await driver.findContent('.test-importer-target-existing-table', /Tasks/).click();
|
||||||
|
await gu.waitForServer();
|
||||||
|
|
||||||
|
// See that preview works, and cells that should be valid are valid.
|
||||||
|
assert.deepEqual(await gu.getPreviewContents([0, 1, 2, 3, 4], [1, 2, 3, 4], mapper), [
|
||||||
|
// Label, PName, PIndex, PDate, PRowID
|
||||||
|
'Foo2', 'Clean', '1,000', '27 Mar 2023', '',
|
||||||
|
'Bar2', 'Wash', '3,000', '', '!Projects[2]',
|
||||||
|
'Baz2', '!Build2', '', '!2023-03-20', '!Projects[1]',
|
||||||
|
'Zoo2', 'Clean', '2,000', '24 Apr 2023', '!Projects[3]',
|
||||||
|
]);
|
||||||
|
|
||||||
|
await driver.find('.test-modal-confirm').click();
|
||||||
|
await gu.waitForServer();
|
||||||
|
|
||||||
|
// Verify data was imported to Tasks correctly.
|
||||||
|
assert.deepEqual(
|
||||||
|
await gu.getVisibleGridCells({section: 'TASKS', cols: [0, 1, 2, 3, 4], rowNums: [4, 5, 6, 7, 8, 9], mapper}), [
|
||||||
|
// Label, PName, PIndex, PDate, PRowID
|
||||||
|
// Previous data in the fixture, in row 4
|
||||||
|
'Zoo', 'Clean', '2,000', '27 Mar 2023', 'Projects[3]',
|
||||||
|
// New rows (values like "!Project[2]" are invalid, which may be fixed in the future).
|
||||||
|
'Foo2', 'Clean', '1,000', '27 Mar 2023', '',
|
||||||
|
'Bar2', 'Wash', '3,000', '', '!Projects[2]',
|
||||||
|
'Baz2', '!Build2', '', '!2023-03-20', '!Projects[1]',
|
||||||
|
'Zoo2', 'Clean', '2,000', '24 Apr 2023', '!Projects[3]',
|
||||||
|
// 'Add New' row
|
||||||
|
'', '', '', '', '',
|
||||||
|
]);
|
||||||
|
|
||||||
|
await gu.undo();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support importing numeric columns as lookups or rowIDs', async function() {
|
||||||
|
// Load up the same CSV file again, with Tasks as the destination.
|
||||||
|
await gu.importFileDialog('./uploads/ImportReferences-Tasks.csv');
|
||||||
|
await driver.findContent('.test-importer-target-existing-table', /Tasks/).click();
|
||||||
|
await gu.waitForServer();
|
||||||
|
await waitForColumnMapping();
|
||||||
|
|
||||||
|
// Check that preview works, and cells are valid.
|
||||||
|
assert.deepEqual(await gu.getPreviewContents([0, 1, 2, 3, 4], [1, 2, 3, 4], mapper), [
|
||||||
|
// Label, PName, PIndex, PDate, PRowID
|
||||||
|
'Foo2', 'Clean', '1,000', '27 Mar 2023', '',
|
||||||
|
'Bar2', 'Wash', '3,000', '', '!Projects[2]',
|
||||||
|
'Baz2', '!Build2', '', '!2023-03-20', '!Projects[1]',
|
||||||
|
'Zoo2', 'Clean', '2,000', '24 Apr 2023', '!Projects[3]',
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Check that dropdown for Label does not include "(as row ID)" entries, but the dropdown for
|
||||||
|
// PName (a reference column) does.
|
||||||
|
await openSourceMenu('Label');
|
||||||
|
assert.equal(await findColumnMenuItem('PIndex').isPresent(), true);
|
||||||
|
assert.equal(await findColumnMenuItem(/as row ID/).isPresent(), false);
|
||||||
|
await driver.sendKeys(Key.ESCAPE);
|
||||||
|
|
||||||
|
await openSourceMenu('PName');
|
||||||
|
assert.equal(await findColumnMenuItem('PIndex').isPresent(), true);
|
||||||
|
assert.equal(await findColumnMenuItem('PIndex (as row ID)').isPresent(), true);
|
||||||
|
await driver.sendKeys(Key.ESCAPE);
|
||||||
|
|
||||||
|
// Change PIndex column from lookup to row ID.
|
||||||
|
await openSourceMenu('PIndex');
|
||||||
|
await findColumnMenuItem('PIndex (as row ID)').click();
|
||||||
|
await gu.waitForServer();
|
||||||
|
|
||||||
|
// The values become invalid because there are no such rowIDs.
|
||||||
|
assert.deepEqual(await gu.getPreviewContents([0, 1, 2, 3, 4], [1, 2, 3, 4], mapper), [
|
||||||
|
// Label, PName, PIndex, PDate, PRowID
|
||||||
|
'Foo2', 'Clean', '!1000', '27 Mar 2023', '',
|
||||||
|
'Bar2', 'Wash', '!3000', '', '!Projects[2]',
|
||||||
|
'Baz2', '!Build2', '', '!2023-03-20', '!Projects[1]',
|
||||||
|
'Zoo2', 'Clean', '!2000', '24 Apr 2023', '!Projects[3]',
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Try a lookup using PIndex2. It is differently formatted, one value is invalid, and one is a
|
||||||
|
// valid row ID (but shouldn't be seen as a rowID for a lookup)
|
||||||
|
await openSourceMenu('PIndex');
|
||||||
|
await findColumnMenuItem('PIndex2').click();
|
||||||
|
await gu.waitForServer();
|
||||||
|
|
||||||
|
// Note: two PIndex values are different, and two are invalid.
|
||||||
|
assert.deepEqual(await gu.getPreviewContents([0, 1, 2, 3, 4], [1, 2, 3, 4], mapper), [
|
||||||
|
// Label, PName, PIndex, PDate, PRowID
|
||||||
|
'Foo2', 'Clean', '1,000', '27 Mar 2023', '',
|
||||||
|
'Bar2', 'Wash', '2,000', '', '!Projects[2]',
|
||||||
|
'Baz2', '!Build2', '!2.0', '!2023-03-20', '!Projects[1]',
|
||||||
|
'Zoo2', 'Clean', '!4000.0', '24 Apr 2023', '!Projects[3]',
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Change PRowID column to use "PID (as row ID)". It has 3 valid rowIDs.
|
||||||
|
await openSourceMenu('PRowID');
|
||||||
|
await findColumnMenuItem('PID (as row ID)').click();
|
||||||
|
await gu.waitForServer();
|
||||||
|
|
||||||
|
// Note: PRowID values are now valid.
|
||||||
|
assert.deepEqual(await gu.getPreviewContents([0, 1, 2, 3, 4], [1, 2, 3, 4], mapper), [
|
||||||
|
// Label, PName, PIndex, PDate, PRowID
|
||||||
|
'Foo2', 'Clean', '1,000', '27 Mar 2023', '',
|
||||||
|
'Bar2', 'Wash', '2,000', '', 'Projects[2]',
|
||||||
|
'Baz2', '!Build2', '!2.0', '!2023-03-20', 'Projects[1]',
|
||||||
|
'Zoo2', 'Clean', '!4000.0', '24 Apr 2023', 'Projects[3]',
|
||||||
|
]);
|
||||||
|
|
||||||
|
await driver.find('.test-modal-confirm').click();
|
||||||
|
await gu.waitForServer();
|
||||||
|
|
||||||
|
// Verify data was imported to Tasks correctly.
|
||||||
|
assert.deepEqual(
|
||||||
|
await gu.getVisibleGridCells({section: 'TASKS', cols: [0, 1, 2, 3, 4], rowNums: [4, 5, 6, 7, 8, 9], mapper}), [
|
||||||
|
// Label, PName, PIndex, PDate, PRowID
|
||||||
|
// Previous data in the fixture, in row 4
|
||||||
|
'Zoo', 'Clean', '2,000', '27 Mar 2023', 'Projects[3]',
|
||||||
|
// New rows; PRowID values are valid.
|
||||||
|
'Foo2', 'Clean', '1,000', '27 Mar 2023', '',
|
||||||
|
'Bar2', 'Wash', '2,000', '', 'Projects[2]',
|
||||||
|
'Baz2', '!Build2', '!2.0', '!2023-03-20', 'Projects[1]',
|
||||||
|
'Zoo2', 'Clean', '!4000.0', '24 Apr 2023', 'Projects[3]',
|
||||||
|
// 'Add New' row
|
||||||
|
'', '', '', '', '',
|
||||||
|
]);
|
||||||
|
|
||||||
|
await gu.undo();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// mapper for getVisibleGridCells and getPreviewContents to get both text and whether the cell is
|
||||||
|
// invalid (pink). Invalid cells prefixed with "!".
|
||||||
|
async function mapper(el: WebElement) {
|
||||||
|
let text = await el.getText();
|
||||||
|
if (await el.find(".field_clip").matches(".invalid")) {
|
||||||
|
text = "!" + text;
|
||||||
|
}
|
||||||
|
return text;
|
||||||
|
}
|
||||||
|
|
||||||
|
function findColumnMenuItem(label: RegExp|string) {
|
||||||
|
return driver.findContent('.test-importer-column-match-menu-item', label);
|
||||||
|
}
|
@ -0,0 +1,143 @@
|
|||||||
|
/**
|
||||||
|
* Test of the importing logic in the DocMenu page.
|
||||||
|
*/
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import {assert, driver, Key} from 'mocha-webdriver';
|
||||||
|
import * as tmp from 'tmp-promise';
|
||||||
|
import * as util from 'util';
|
||||||
|
|
||||||
|
import { SQLiteDB } from 'app/server/lib/SQLiteDB';
|
||||||
|
import * as gu from 'test/nbrowser/gristUtils';
|
||||||
|
import {setupTestSuite} from 'test/nbrowser/testUtils';
|
||||||
|
import { copyFixtureDoc } from 'test/server/testUtils';
|
||||||
|
|
||||||
|
const write = util.promisify(fs.write);
|
||||||
|
|
||||||
|
describe('UploadLimits', function() {
|
||||||
|
this.timeout(20000);
|
||||||
|
const cleanup = setupTestSuite();
|
||||||
|
|
||||||
|
const cleanupCbs: Array<() => void> = [];
|
||||||
|
|
||||||
|
async function generateFile(postfix: string, size: number): Promise<string> {
|
||||||
|
const obj = await tmp.file({postfix, mode: 0o644});
|
||||||
|
await write(obj.fd, Buffer.alloc(size, 't'));
|
||||||
|
cleanupCbs.push(obj.cleanup);
|
||||||
|
return obj.path;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a valid Grist file of at least the desired length. The file may be
|
||||||
|
// slightly larger than requested.
|
||||||
|
async function generateGristFile(minSize: number): Promise<string> {
|
||||||
|
const obj = await tmp.file({postfix: '.grist', mode: 0o644});
|
||||||
|
await copyFixtureDoc('Hello.grist', obj.path);
|
||||||
|
const size = fs.statSync(obj.path).size;
|
||||||
|
const db = await SQLiteDB.openDBRaw(obj.path);
|
||||||
|
// Make a string that is long enough to push the doc over the required size.
|
||||||
|
const longString = 'x'.repeat(Math.max(1, minSize - size));
|
||||||
|
// Add the string somewhere in the doc. For now we place it in a separate
|
||||||
|
// table - this may eventually become invalid, but it works for now.
|
||||||
|
// There'll be a little overhead so we'll overshoot the target length a bit,
|
||||||
|
// but that's fine.
|
||||||
|
await db.exec('CREATE TABLE _gristsys_extra(txt)');
|
||||||
|
await db.run('INSERT INTO _gristsys_extra(txt) VALUES(?)', [longString]);
|
||||||
|
await db.close();
|
||||||
|
const size2 = fs.statSync(obj.path).size;
|
||||||
|
if (size2 < minSize || size2 > minSize * 1.2) {
|
||||||
|
throw new Error(`generateGristFile size is off, wanted ${minSize}, got ${size2}`);
|
||||||
|
}
|
||||||
|
cleanupCbs.push(obj.cleanup);
|
||||||
|
return obj.path;
|
||||||
|
}
|
||||||
|
|
||||||
|
after(function() {
|
||||||
|
for (const cleanup of cleanupCbs) {
|
||||||
|
cleanup();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async function() {
|
||||||
|
await gu.checkForErrors();
|
||||||
|
});
|
||||||
|
|
||||||
|
const maxImport = 1024 * 1024; // See GRIST_MAX_UPLOAD_IMPORT_MB = 1 in testServer.ts
|
||||||
|
const maxAttachment = 2 * 1024 * 1024; // See GRIST_MAX_UPLOAD_ATTACHMENT_MB = 2 in testServer.ts
|
||||||
|
|
||||||
|
it('should prevent large uploads for imports', async function() {
|
||||||
|
const session = await gu.session().teamSite.login();
|
||||||
|
await session.loadDocMenu('/');
|
||||||
|
|
||||||
|
// Generate and upload a large csv file. It should by blocked on the client side.
|
||||||
|
const largeFilePath = await generateFile(".csv", maxImport + 1000);
|
||||||
|
await gu.docMenuImport(largeFilePath);
|
||||||
|
|
||||||
|
// Ensure an error is shown.
|
||||||
|
assert.match(await driver.findWait('.test-notifier-toast-message', 1000).getText(),
|
||||||
|
/Imported files may not exceed 1.0MB/);
|
||||||
|
|
||||||
|
// Now try to import directly to server, and verify that the server enforces this limit too.
|
||||||
|
const p = gu.importFixturesDoc('Chimpy', 'nasa', 'Horizon', largeFilePath, {load: false});
|
||||||
|
await assert.isRejected(p, /Payload Too Large/);
|
||||||
|
const err = await p.catch((e) => e);
|
||||||
|
assert.equal(err.status, 413);
|
||||||
|
assert.isObject(err.details);
|
||||||
|
assert.match(err.details.userError, /Imported files must not exceed 1.0MB/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow large uploads of .grist docs', async function() {
|
||||||
|
const session = await gu.session().teamSite.login();
|
||||||
|
await session.loadDocMenu('/');
|
||||||
|
|
||||||
|
// Generate and upload a large .grist file. It should not be subject to limits.
|
||||||
|
const largeFilePath = await generateGristFile(maxImport * 2 + 1000);
|
||||||
|
await gu.docMenuImport(largeFilePath);
|
||||||
|
|
||||||
|
await gu.waitForDocToLoad();
|
||||||
|
assert.equal(await gu.getCell(0, 1).getText(), 'hello');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should prevent large uploads for attachments', async function() {
|
||||||
|
const session = await gu.session().teamSite.login();
|
||||||
|
await session.tempDoc(cleanup, 'Hello.grist');
|
||||||
|
|
||||||
|
// Clear the first cell.
|
||||||
|
await gu.getCell(0, 1).click();
|
||||||
|
await driver.sendKeys(Key.DELETE);
|
||||||
|
await gu.waitForServer();
|
||||||
|
|
||||||
|
// Change column to Attachments.
|
||||||
|
await gu.toggleSidePanel('right', 'open');
|
||||||
|
await driver.find('.test-right-tab-field').click();
|
||||||
|
await gu.setType(/Attachment/);
|
||||||
|
await driver.findWait('.test-type-transform-apply', 1000).click();
|
||||||
|
await gu.waitForServer();
|
||||||
|
|
||||||
|
// We can upload multiple smaller files (the limit is per-file here).
|
||||||
|
const largeFilePath1 = await generateFile(".png", maxAttachment - 1000);
|
||||||
|
const largeFilePath2 = await generateFile(".jpg", maxAttachment - 1000);
|
||||||
|
await gu.fileDialogUpload([largeFilePath1, largeFilePath2].join(","),
|
||||||
|
() => gu.getCell(0, 1).find('.test-attachment-icon').click());
|
||||||
|
await gu.getCell(0, 1).findWait('.test-attachment-widget > [class*=test-pw-]', 1000);
|
||||||
|
|
||||||
|
// We don't expect any errors here.
|
||||||
|
assert.lengthOf(await driver.findAll('.test-notifier-toast-wrapper'), 0);
|
||||||
|
|
||||||
|
// Expect to find two attachments in the cell.
|
||||||
|
assert.lengthOf(await gu.getCell(0, 1).findAll('.test-attachment-widget > [class*=test-pw-]'), 2);
|
||||||
|
|
||||||
|
// But we can't upload larger files, even one at a time.
|
||||||
|
const largeFilePath3 = await generateFile(".jpg", maxAttachment + 1000);
|
||||||
|
await gu.fileDialogUpload(largeFilePath3,
|
||||||
|
() => gu.getCell(0, 2).find('.test-attachment-icon').click());
|
||||||
|
await driver.sleep(200);
|
||||||
|
await gu.waitForServer();
|
||||||
|
|
||||||
|
// Check that there is a warning and the cell hasn't changed.
|
||||||
|
assert.match(await driver.findWait('.test-notifier-toast-message', 1000).getText(),
|
||||||
|
/Attachments may not exceed 2.0MB/);
|
||||||
|
assert.lengthOf(await gu.getCell(0, 2).findAll('.test-attachment-widget > [class*=test-pw-]'), 0);
|
||||||
|
|
||||||
|
// TODO We should try to add attachment via API and verify that the server enforces the limit
|
||||||
|
// too, but at the moment we don't have an endpoint to add attachments via the API.
|
||||||
|
});
|
||||||
|
});
|
@ -0,0 +1,74 @@
|
|||||||
|
/**
|
||||||
|
* Testing utilities used in Importer test suites.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {driver, stackWrapFunc, WebElementPromise} from 'mocha-webdriver';
|
||||||
|
import * as gu from 'test/nbrowser/gristUtils';
|
||||||
|
|
||||||
|
// Helper to get the input of a matching parse option in the ParseOptions dialog.
|
||||||
|
export const getParseOptionInput = stackWrapFunc((labelRE: RegExp): WebElementPromise =>
|
||||||
|
driver.findContent('.test-parseopts-opt', labelRE).find('input'));
|
||||||
|
|
||||||
|
type CellDiff = string|[string|undefined, string|undefined, string|undefined];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns preview diff cell values when the importer is updating existing records.
|
||||||
|
*
|
||||||
|
* If a cell has no diff, just the cell value is returned. Otherwise, a 3-tuple
|
||||||
|
* containing the parent, remote, and common values (in that order) is returned.
|
||||||
|
*/
|
||||||
|
export const getPreviewDiffCellValues = stackWrapFunc(async (cols: number[], rowNums: number[]) => {
|
||||||
|
return gu.getPreviewContents<CellDiff>(cols, rowNums, async (cell) => {
|
||||||
|
const hasParentDiff = await cell.find('.diff-parent').isPresent();
|
||||||
|
const hasRemoteDiff = await cell.find('.diff-remote').isPresent();
|
||||||
|
const hasCommonDiff = await cell.find('.diff-common').isPresent();
|
||||||
|
|
||||||
|
const isDiff = hasParentDiff || hasRemoteDiff || hasCommonDiff;
|
||||||
|
return !isDiff ? await cell.getText() :
|
||||||
|
[
|
||||||
|
hasParentDiff ? await cell.find('.diff-parent').getText() : undefined,
|
||||||
|
hasRemoteDiff ? await cell.find('.diff-remote').getText() : undefined,
|
||||||
|
hasCommonDiff ? await cell.find('.diff-common').getText() : undefined,
|
||||||
|
];
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Helper that waits for the diff preview to finish loading.
|
||||||
|
export const waitForDiffPreviewToLoad = stackWrapFunc(async (): Promise<void> => {
|
||||||
|
await driver.wait(() => driver.find('.test-importer-preview').isPresent(), 5000);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Helper that gets the list of visible column matching rows to the left of the preview.
|
||||||
|
export const getColumnMatchingRows = stackWrapFunc(async (): Promise<{source: string, destination: string}[]> => {
|
||||||
|
return await driver.findAll('.test-importer-column-match-source-destination', async (el) => {
|
||||||
|
const source = await el.find('.test-importer-column-match-formula').getText();
|
||||||
|
const destination = await el.find('.test-importer-column-match-destination').getText();
|
||||||
|
return {source, destination};
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export async function waitForColumnMapping() {
|
||||||
|
await driver.wait(() => driver.find(".test-importer-column-match-options").isDisplayed(), 300);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function openColumnMapping() {
|
||||||
|
const selected = driver.find('.test-importer-target-selected');
|
||||||
|
await selected.find('.test-importer-target-column-mapping').click();
|
||||||
|
await driver.sleep(200); // animation
|
||||||
|
await waitForColumnMapping();
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function openTableMapping() {
|
||||||
|
await driver.find('.test-importer-table-mapping').click();
|
||||||
|
await driver.sleep(200); // animation
|
||||||
|
await driver.wait(() => driver.find(".test-importer-target").isDisplayed(), 300);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Opens the menu for the destination column, by clicking the source.
|
||||||
|
*/
|
||||||
|
export async function openSource(text: string|RegExp) {
|
||||||
|
await driver.findContent('.test-importer-column-match-destination', text)
|
||||||
|
.findClosest('.test-importer-column-match-source-destination')
|
||||||
|
.find('.test-importer-column-match-source').click();
|
||||||
|
}
|
Loading…
Reference in new issue