2020-07-21 13:20:51 +00:00
|
|
|
import * as net from 'net';
|
|
|
|
|
|
|
|
import {UserProfile} from 'app/common/LoginSessionAPI';
|
|
|
|
import {Deps as ActiveDocDeps} from 'app/server/lib/ActiveDoc';
|
2021-10-01 14:24:23 +00:00
|
|
|
import {Deps as DiscourseConnectDeps} from 'app/server/lib/DiscourseConnect';
|
2022-06-04 04:12:30 +00:00
|
|
|
import {Deps as CommClientDeps} from 'app/server/lib/Client';
|
2023-08-07 14:28:17 +00:00
|
|
|
import * as Client from 'app/server/lib/Client';
|
2022-06-04 04:12:30 +00:00
|
|
|
import {Comm} from 'app/server/lib/Comm';
|
2022-07-04 14:14:55 +00:00
|
|
|
import log from 'app/server/lib/log';
|
2020-07-21 13:20:51 +00:00
|
|
|
import {IMessage, Rpc} from 'grain-rpc';
|
2023-11-30 19:08:46 +00:00
|
|
|
import {EventEmitter} from 'events';
|
2021-10-01 14:24:23 +00:00
|
|
|
import {Request} from 'express';
|
2021-04-26 21:54:09 +00:00
|
|
|
import * as t from 'ts-interface-checker';
|
2020-07-21 13:20:51 +00:00
|
|
|
import {FlexServer} from './FlexServer';
|
2023-11-30 19:08:46 +00:00
|
|
|
import {ClientJsonMemoryLimits, ITestingHooks} from './ITestingHooks';
|
2020-07-21 13:20:51 +00:00
|
|
|
import ITestingHooksTI from './ITestingHooks-ti';
|
|
|
|
import {connect, fromCallback} from './serverUtils';
|
2021-11-26 10:43:55 +00:00
|
|
|
import {WidgetRepositoryImpl} from 'app/server/lib/WidgetRepository';
|
2020-07-21 13:20:51 +00:00
|
|
|
|
2021-04-26 21:54:09 +00:00
|
|
|
const tiCheckers = t.createCheckers(ITestingHooksTI, {UserProfile: t.name("object")});
|
|
|
|
|
2021-07-01 15:15:43 +00:00
|
|
|
export function startTestingHooks(socketPath: string, port: number,
|
2020-07-21 13:20:51 +00:00
|
|
|
comm: Comm, flexServer: FlexServer,
|
|
|
|
workerServers: FlexServer[]): Promise<net.Server> {
|
|
|
|
// Create socket server listening on the given path for testing connections.
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
const server = net.createServer();
|
|
|
|
server.on('error', reject);
|
|
|
|
server.on('listening', () => resolve(server));
|
|
|
|
server.on('connection', socket => {
|
|
|
|
// On connection, create an Rpc object communicating over that socket.
|
|
|
|
const rpc = connectToSocket(new Rpc({logger: {}}), socket);
|
|
|
|
// Register the testing implementation.
|
|
|
|
rpc.registerImpl('testing',
|
2021-07-01 15:15:43 +00:00
|
|
|
new TestingHooks(port, comm, flexServer, workerServers),
|
2021-04-26 21:54:09 +00:00
|
|
|
tiCheckers.ITestingHooks);
|
2020-07-21 13:20:51 +00:00
|
|
|
});
|
|
|
|
server.listen(socketPath);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
function connectToSocket(rpc: Rpc, socket: net.Socket): Rpc {
|
|
|
|
socket.setEncoding('utf8');
|
2023-08-07 14:28:17 +00:00
|
|
|
// Poor-man's JSON processing, only OK because this is for testing only. If multiple messages
|
|
|
|
// are received quickly, they may arrive in the same buf, and JSON.parse will fail.
|
2020-07-21 13:20:51 +00:00
|
|
|
socket.on('data', (buf: string) => rpc.receiveMessage(JSON.parse(buf)));
|
|
|
|
rpc.setSendMessage((m: IMessage) => fromCallback(cb => socket.write(JSON.stringify(m), 'utf8', cb)));
|
|
|
|
return rpc;
|
|
|
|
}
|
|
|
|
|
|
|
|
export interface TestingHooksClient extends ITestingHooks {
|
|
|
|
close(): void;
|
|
|
|
}
|
|
|
|
|
|
|
|
export async function connectTestingHooks(socketPath: string): Promise<TestingHooksClient> {
|
|
|
|
const socket = await connect(socketPath);
|
|
|
|
const rpc = connectToSocket(new Rpc({logger: {}}), socket);
|
2021-04-26 21:54:09 +00:00
|
|
|
return Object.assign(rpc.getStub<TestingHooks>('testing', tiCheckers.ITestingHooks), {
|
2020-07-21 13:20:51 +00:00
|
|
|
close: () => socket.end(),
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
export class TestingHooks implements ITestingHooks {
|
|
|
|
constructor(
|
|
|
|
private _port: number,
|
|
|
|
private _comm: Comm,
|
|
|
|
private _server: FlexServer,
|
|
|
|
private _workerServers: FlexServer[]
|
|
|
|
) {}
|
|
|
|
|
2021-04-26 21:54:09 +00:00
|
|
|
public async getOwnPort(): Promise<number> {
|
2020-07-21 13:20:51 +00:00
|
|
|
log.info("TestingHooks.getOwnPort called");
|
|
|
|
return this._server.getOwnPort();
|
|
|
|
}
|
|
|
|
|
2021-04-26 21:54:09 +00:00
|
|
|
public async getPort(): Promise<number> {
|
2020-07-21 13:20:51 +00:00
|
|
|
log.info("TestingHooks.getPort called");
|
|
|
|
return this._port;
|
|
|
|
}
|
|
|
|
|
|
|
|
public async setLoginSessionProfile(gristSidCookie: string, profile: UserProfile|null, org?: string): Promise<void> {
|
|
|
|
log.info("TestingHooks.setLoginSessionProfile called with", gristSidCookie, profile, org);
|
|
|
|
const sessionId = this._comm.getSessionIdFromCookie(gristSidCookie);
|
2022-06-04 04:12:30 +00:00
|
|
|
const scopedSession = this._comm.getOrCreateSession(sessionId as string, {org});
|
2021-10-01 14:24:23 +00:00
|
|
|
const req = {} as Request;
|
(core) move more tests to grist-core
Summary:
* Tie build and run-time docker base images to a consistent version (buster)
* Extend the test login system activated by GRIST_TEST_LOGIN to ease porting tests that currently rely on cognito (many)
* Make org resets work in absence of billing endpoints
* When in-memory session caches are used, add missing invalidation steps
* Pass org information through sign-ups/sign-ins more carefully
* For CORS, explicitly trust GRIST_HOST origin when set
* Move some fixtures and tests to core, focussing on tests that cover existing failures or are in the set of tests run on deployments
* Retain regular `test` target to run the test suite directly, without docker
* Add a `test:smoke` target to run a single simple test without `GRIST_TEST_LOGIN` activated
* Add a `test:docker` target to run the tests against a grist-core docker image - since tests rely on certain fixture teams/docs, added `TEST_SUPPORT_API_KEY` and `TEST_ADD_SAMPLES` flags to ease porting
The tests ported were `nbrowser` tests: `ActionLog.ts` (the first test I tend to port to anything, out of habit), `Fork.ts` (exercises a lot of doc creation paths), `HomeIntro.ts` (a lot of DocMenu exercise), and `DuplicateDocument.ts` (covers a feature known to be failing prior to this diff, the CORS tweak resolves it).
Test Plan: Manually tested via `buildtools/build_core.sh`. In follow up, I want to add running the `test:docker` target in grist-core's workflows. In jenkins, only the smoke test is run. There'd be an argument for running all tests, but they include particularly slow tests, and are duplicates of tests already run (in different configuration admittedly), so I'd like to try first just using them in grist-core to gate updates to any packaged version of Grist (the docker image currently).
Reviewers: alexmojaki
Reviewed By: alexmojaki
Subscribers: alexmojaki
Differential Revision: https://phab.getgrist.com/D3176
2021-12-10 22:42:54 +00:00
|
|
|
await scopedSession.updateUserProfile(req, profile);
|
|
|
|
this._server.getSessions().clearCacheIfNeeded({email: profile?.email, org});
|
2020-07-21 13:20:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
public async setServerVersion(version: string|null): Promise<void> {
|
|
|
|
log.info("TestingHooks.setServerVersion called with", version);
|
|
|
|
this._comm.setServerVersion(version);
|
|
|
|
for (const server of this._workerServers) {
|
2021-08-17 15:22:30 +00:00
|
|
|
server.getComm().setServerVersion(version);
|
2020-07-21 13:20:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public async disconnectClients(): Promise<void> {
|
|
|
|
log.info("TestingHooks.disconnectClients called");
|
|
|
|
this._comm.destroyAllClients();
|
|
|
|
for (const server of this._workerServers) {
|
2021-08-17 15:22:30 +00:00
|
|
|
server.getComm().destroyAllClients();
|
2020-07-21 13:20:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public async commShutdown(): Promise<void> {
|
|
|
|
log.info("TestingHooks.commShutdown called");
|
|
|
|
await this._comm.testServerShutdown();
|
|
|
|
for (const server of this._workerServers) {
|
2021-08-17 15:22:30 +00:00
|
|
|
await server.getComm().testServerShutdown();
|
2020-07-21 13:20:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public async commRestart(): Promise<void> {
|
|
|
|
log.info("TestingHooks.commRestart called");
|
|
|
|
await this._comm.testServerRestart();
|
|
|
|
for (const server of this._workerServers) {
|
2021-08-17 15:22:30 +00:00
|
|
|
await server.getComm().testServerRestart();
|
2020-07-21 13:20:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Set how long new clients will persist after disconnection.
|
2022-06-04 04:12:30 +00:00
|
|
|
// Returns the previous value.
|
|
|
|
public async commSetClientPersistence(ttlMs: number): Promise<number> {
|
2023-08-07 14:28:17 +00:00
|
|
|
log.info("TestingHooks.commSetClientPersistence called with", ttlMs);
|
2022-06-04 04:12:30 +00:00
|
|
|
const prev = CommClientDeps.clientRemovalTimeoutMs;
|
|
|
|
CommClientDeps.clientRemovalTimeoutMs = ttlMs;
|
|
|
|
return prev;
|
2020-07-21 13:20:51 +00:00
|
|
|
}
|
|
|
|
|
2023-11-30 19:08:46 +00:00
|
|
|
// Set one or more limits that Client.ts can use for JSON responses, in bytes.
|
|
|
|
// Returns the old limits.
|
|
|
|
// - totalSize limits total amount of memory Client allocates to JSON response
|
|
|
|
// - jsonResponseReservation sets the initial amount reserved for each response
|
|
|
|
// - maxReservationSize monkey-patches reservation logic to fail when reservation exceeds the
|
|
|
|
// given amount, to simulate unexpected failures.
|
|
|
|
public async commSetClientJsonMemoryLimits(limits: ClientJsonMemoryLimits): Promise<ClientJsonMemoryLimits> {
|
|
|
|
log.info("TestingHooks.commSetClientJsonMemoryLimits called with", limits);
|
|
|
|
const previous: ClientJsonMemoryLimits = {};
|
|
|
|
if (limits.totalSize !== undefined) {
|
|
|
|
previous.totalSize = Client.jsonMemoryPool.setTotalSize(limits.totalSize);
|
|
|
|
}
|
|
|
|
if (limits.jsonResponseReservation !== undefined) {
|
|
|
|
previous.jsonResponseReservation = CommClientDeps.jsonResponseReservation;
|
|
|
|
CommClientDeps.jsonResponseReservation = limits.jsonResponseReservation;
|
|
|
|
}
|
|
|
|
if (limits.maxReservationSize !== undefined) {
|
|
|
|
previous.maxReservationSize = null;
|
|
|
|
const orig = Object.getPrototypeOf(Client.jsonMemoryPool)._updateReserved;
|
|
|
|
if (limits.maxReservationSize === null) {
|
|
|
|
(Client.jsonMemoryPool as any)._updateReserved = orig;
|
|
|
|
} else {
|
|
|
|
// Monkey-patch reservation logic to simulate unexpected failures.
|
|
|
|
const jsonMemoryThrowLimit = limits.maxReservationSize;
|
|
|
|
function updateReservedWithLimit(this: typeof Client.jsonMemoryPool, sizeDelta: number) {
|
|
|
|
const newSize: number = (this as any)._reservedSize + sizeDelta;
|
|
|
|
log.warn(`TestingHooks _updateReserved reserving ${newSize}, limit ${jsonMemoryThrowLimit}`);
|
|
|
|
if (newSize > jsonMemoryThrowLimit) {
|
|
|
|
throw new Error(`TestingHooks: hit JsonMemoryThrowLimit: ${newSize} > ${jsonMemoryThrowLimit}`);
|
|
|
|
}
|
|
|
|
return orig.call(this, sizeDelta);
|
|
|
|
}
|
|
|
|
(Client.jsonMemoryPool as any)._updateReserved = updateReservedWithLimit;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return previous;
|
2023-08-07 14:28:17 +00:00
|
|
|
}
|
|
|
|
|
2020-07-21 13:20:51 +00:00
|
|
|
public async closeDocs(): Promise<void> {
|
|
|
|
log.info("TestingHooks.closeDocs called");
|
|
|
|
if (this._server) {
|
|
|
|
await this._server.closeDocs();
|
|
|
|
}
|
|
|
|
for (const server of this._workerServers) {
|
|
|
|
await server.closeDocs();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public async setDocWorkerActivation(workerId: string, active: 'active'|'inactive'|'crash'):
|
|
|
|
Promise<void> {
|
|
|
|
log.info("TestingHooks.setDocWorkerActivation called with", workerId, active);
|
2022-03-07 14:27:43 +00:00
|
|
|
const matches = this._workerServers.filter(
|
|
|
|
server => server.worker.id === workerId ||
|
|
|
|
server.worker.publicUrl === workerId ||
|
|
|
|
(server.worker.publicUrl.startsWith('http://localhost:') &&
|
|
|
|
workerId.startsWith('http://localhost:') &&
|
|
|
|
new URL(server.worker.publicUrl).host === new URL(workerId).host));
|
|
|
|
if (matches.length !== 1) {
|
|
|
|
throw new Error(`could not find worker: ${workerId}`);
|
|
|
|
}
|
|
|
|
const server = matches[0];
|
|
|
|
switch (active) {
|
|
|
|
case 'active':
|
|
|
|
await server.restartListening();
|
|
|
|
break;
|
|
|
|
case 'inactive':
|
|
|
|
await server.stopListening();
|
|
|
|
break;
|
|
|
|
case 'crash':
|
|
|
|
await server.stopListening('crash');
|
|
|
|
break;
|
2020-07-21 13:20:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public async flushAuthorizerCache(): Promise<void> {
|
|
|
|
log.info("TestingHooks.flushAuthorizerCache called");
|
2021-08-17 15:22:30 +00:00
|
|
|
this._server.getHomeDBManager().flushDocAuthCache();
|
2020-07-21 13:20:51 +00:00
|
|
|
for (const server of this._workerServers) {
|
2021-08-17 15:22:30 +00:00
|
|
|
server.getHomeDBManager().flushDocAuthCache();
|
2020-07-21 13:20:51 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-07 14:27:43 +00:00
|
|
|
public async flushDocs(): Promise<void> {
|
|
|
|
log.info("TestingHooks.flushDocs called");
|
|
|
|
for (const server of this._workerServers) {
|
|
|
|
await server.testFlushDocs();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-21 13:20:51 +00:00
|
|
|
// Returns a Map from docId to number of connected clients for all open docs across servers,
|
|
|
|
// but represented as an array of pairs, to be serializable.
|
|
|
|
public async getDocClientCounts(): Promise<Array<[string, number]>> {
|
|
|
|
log.info("TestingHooks.getDocClientCounts called");
|
|
|
|
const counts = new Map<string, number>();
|
|
|
|
for (const server of [this._server, ...this._workerServers]) {
|
|
|
|
const c = await server.getDocClientCounts();
|
|
|
|
for (const [key, val] of c) {
|
|
|
|
counts.set(key, (counts.get(key) || 0) + val);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return Array.from(counts);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Sets the seconds for ActiveDoc timeout, and returns the previous value.
|
|
|
|
public async setActiveDocTimeout(seconds: number): Promise<number> {
|
|
|
|
const prev = ActiveDocDeps.ACTIVEDOC_TIMEOUT;
|
|
|
|
ActiveDocDeps.ACTIVEDOC_TIMEOUT = seconds;
|
|
|
|
return prev;
|
|
|
|
}
|
|
|
|
|
2021-10-01 14:24:23 +00:00
|
|
|
// Sets env vars for the DiscourseConnect module, and returns the previous value.
|
|
|
|
public async setDiscourseConnectVar(varName: string, value: string|null): Promise<string|null> {
|
|
|
|
const key = varName as keyof typeof DiscourseConnectDeps;
|
|
|
|
const prev = DiscourseConnectDeps[key] || null;
|
|
|
|
if (value == null) {
|
|
|
|
delete DiscourseConnectDeps[key];
|
|
|
|
} else {
|
|
|
|
DiscourseConnectDeps[key] = value;
|
|
|
|
}
|
|
|
|
return prev;
|
|
|
|
}
|
2021-11-26 10:43:55 +00:00
|
|
|
|
|
|
|
public async setWidgetRepositoryUrl(url: string): Promise<void> {
|
|
|
|
const repo = this._server.getWidgetRepository() as WidgetRepositoryImpl;
|
|
|
|
if (!(repo instanceof WidgetRepositoryImpl)) {
|
|
|
|
throw new Error("Unsupported widget repository");
|
|
|
|
}
|
|
|
|
repo.testOverrideUrl(url);
|
|
|
|
}
|
2023-08-07 14:28:17 +00:00
|
|
|
|
|
|
|
public async getMemoryUsage(): Promise<NodeJS.MemoryUsage> {
|
|
|
|
return process.memoryUsage();
|
|
|
|
}
|
2023-11-30 19:08:46 +00:00
|
|
|
|
|
|
|
// This is for testing the handling of unhandled exceptions and rejections.
|
|
|
|
public async tickleUnhandledErrors(errType: 'exception'|'rejection'|'error-event'): Promise<void> {
|
|
|
|
if (errType === 'exception') {
|
|
|
|
setTimeout(() => { throw new Error("TestingHooks: Fake exception"); }, 0);
|
|
|
|
} else if (errType === 'rejection') {
|
|
|
|
void(Promise.resolve(null).then(() => { throw new Error("TestingHooks: Fake rejection"); }));
|
|
|
|
} else if (errType === 'error-event') {
|
|
|
|
const emitter = new EventEmitter();
|
|
|
|
setTimeout(() => emitter.emit('error', new Error('TestingHooks: Fake error-event')), 0);
|
|
|
|
} else {
|
|
|
|
throw new Error(`Unrecognized errType ${errType}`);
|
|
|
|
}
|
|
|
|
}
|
2020-07-21 13:20:51 +00:00
|
|
|
}
|