(core) Add LogMethods helper and use it for more JSON data in logs. Reduce unhelpful logging.

Summary:
- Sharing, Client, DocClients, HostingStorageManager all include available info.
- In HostingStorageManager, log numSteps and maxStepTimeMs, in case that helps
  debug SQLITE_BUSY problem.
- Replace some action-bundle logging with a JSON version aggregating some info.
- Skip logging detailed list of actions in production.

Test Plan: Tested manually by eyeballing log output in dev environment.

Reviewers: paulfitz

Reviewed By: paulfitz

Differential Revision: https://phab.getgrist.com/D3086
This commit is contained in:
Dmitry S 2021-10-25 09:29:06 -04:00
parent 8eeeae7fbf
commit f2f4fe0eca
9 changed files with 184 additions and 122 deletions

View File

@ -79,6 +79,10 @@ export function isRenameTable(act: DocAction): act is RenameTable { return act[0
const SCHEMA_ACTIONS = new Set(['AddTable', 'RemoveTable', 'RenameTable', 'AddColumn', const SCHEMA_ACTIONS = new Set(['AddTable', 'RemoveTable', 'RenameTable', 'AddColumn',
'RemoveColumn', 'RenameColumn', 'ModifyColumn']); 'RemoveColumn', 'RenameColumn', 'ModifyColumn']);
// Maps each data action to whether it's a bulk action.
const DATA_ACTIONS = new Set(['AddRecord', 'RemoveRecord', 'UpdateRecord', 'BulkAddRecord',
'BulkRemoveRecord', 'BulkUpdateRecord', 'ReplaceTableData', 'TableData']);
/** /**
* Determines whether a given action is a schema action or not. * Determines whether a given action is a schema action or not.
*/ */
@ -87,6 +91,13 @@ export function isSchemaAction(action: DocAction):
return SCHEMA_ACTIONS.has(action[0]); return SCHEMA_ACTIONS.has(action[0]);
} }
export function isDataAction(action: DocAction):
action is AddRecord | RemoveRecord | UpdateRecord |
BulkAddRecord | BulkRemoveRecord | BulkUpdateRecord |
ReplaceTableData | TableDataAction {
return DATA_ACTIONS.has(action[0]);
}
/** /**
* Returns the tableId from the action. * Returns the tableId from the action.
*/ */
@ -149,6 +160,12 @@ export function getSelectionDesc(action: UserAction, optExcludeVals: boolean): s
return `table ${table}, row${s} ${rows}; ${columns.join(", ")}`; return `table ${table}, row${s} ${rows}; ${columns.join(", ")}`;
} }
export function getNumRows(action: DocAction): number {
return !isDataAction(action) ? 0
: Array.isArray(action[2]) ? action[2].length
: 1;
}
// Convert from TableColValues (used by DocStorage and external APIs) to TableDataAction (used // Convert from TableColValues (used by DocStorage and external APIs) to TableDataAction (used
// mainly by the sandbox). // mainly by the sandbox).
export function toTableDataAction(tableId: string, colValues: TableColValues): TableDataAction { export function toTableDataAction(tableId: string, colValues: TableColValues): TableDataAction {

View File

@ -63,6 +63,7 @@ import {makeForkIds} from 'app/server/lib/idUtils';
import {GRIST_DOC_SQL, GRIST_DOC_WITH_TABLE1_SQL} from 'app/server/lib/initialDocSql'; import {GRIST_DOC_SQL, GRIST_DOC_WITH_TABLE1_SQL} from 'app/server/lib/initialDocSql';
import {ISandbox} from 'app/server/lib/ISandbox'; import {ISandbox} from 'app/server/lib/ISandbox';
import * as log from 'app/server/lib/log'; import * as log from 'app/server/lib/log';
import {LogMethods} from "app/server/lib/LogMethods";
import {shortDesc} from 'app/server/lib/shortDesc'; import {shortDesc} from 'app/server/lib/shortDesc';
import {TableMetadataLoader} from 'app/server/lib/TableMetadataLoader'; import {TableMetadataLoader} from 'app/server/lib/TableMetadataLoader';
import {fetchURL, FileUploadInfo, globalUploadSet, UploadInfo} from 'app/server/lib/uploads'; import {fetchURL, FileUploadInfo, globalUploadSet, UploadInfo} from 'app/server/lib/uploads';
@ -140,6 +141,7 @@ export class ActiveDoc extends EventEmitter {
// result). // result).
protected _modificationLock: Mutex = new Mutex(); protected _modificationLock: Mutex = new Mutex();
private _log = new LogMethods('ActiveDoc ', (s: OptDocSession) => this.getLogMeta(s));
private _triggers: DocTriggers; private _triggers: DocTriggers;
private _dataEngine: Promise<ISandbox>|undefined; private _dataEngine: Promise<ISandbox>|undefined;
private _activeDocImport: ActiveDocImport; private _activeDocImport: ActiveDocImport;
@ -194,7 +196,7 @@ export class ActiveDoc extends EventEmitter {
// TODO: cache engine requirement for doc in home db so we can retain this parallelism // TODO: cache engine requirement for doc in home db so we can retain this parallelism
// when offering a choice of data engines. // when offering a choice of data engines.
if (!supportsEngineChoices()) { if (!supportsEngineChoices()) {
this._getEngine().catch(e => this.logError({client: null}, `engine for ${docName} failed to launch: ${e}`)); this._getEngine().catch(e => this._log.error({client: null}, `engine for ${docName} failed to launch: ${e}`));
} }
this._activeDocImport = new ActiveDocImport(this); this._activeDocImport = new ActiveDocImport(this);
@ -215,16 +217,10 @@ export class ActiveDoc extends EventEmitter {
return this._granularAccess.getUserOverride(docSession); return this._granularAccess.getUserOverride(docSession);
} }
// Helpers to log a message along with metadata about the request.
public logDebug(s: OptDocSession, msg: string, ...args: any[]) { this._log('debug', s, msg, ...args); }
public logInfo(s: OptDocSession, msg: string, ...args: any[]) { this._log('info', s, msg, ...args); }
public logWarn(s: OptDocSession, msg: string, ...args: any[]) { this._log('warn', s, msg, ...args); }
public logError(s: OptDocSession, msg: string, ...args: any[]) { this._log('error', s, msg, ...args); }
// Constructs metadata for logging, given a Client or an OptDocSession. // Constructs metadata for logging, given a Client or an OptDocSession.
public getLogMeta(docSession: OptDocSession, docMethod?: string): log.ILogMeta { public getLogMeta(docSession: OptDocSession|null, docMethod?: string): log.ILogMeta {
return { return {
...getLogMetaFromDocSession(docSession), ...(docSession ? getLogMetaFromDocSession(docSession) : {}),
docId: this._docName, docId: this._docName,
...(docMethod ? {docMethod} : {}), ...(docMethod ? {docMethod} : {}),
}; };
@ -307,7 +303,7 @@ export class ActiveDoc extends EventEmitter {
// If we had a shutdown scheduled, unschedule it. // If we had a shutdown scheduled, unschedule it.
if (this._inactivityTimer.isEnabled()) { if (this._inactivityTimer.isEnabled()) {
this.logInfo(docSession, "will stay open"); this._log.info(docSession, "will stay open");
this._inactivityTimer.disable(); this._inactivityTimer.disable();
} }
return docSession; return docSession;
@ -319,10 +315,10 @@ export class ActiveDoc extends EventEmitter {
*/ */
public async shutdown(removeThisActiveDoc: boolean = true): Promise<void> { public async shutdown(removeThisActiveDoc: boolean = true): Promise<void> {
const docSession = makeExceptionalDocSession('system'); const docSession = makeExceptionalDocSession('system');
this.logDebug(docSession, "shutdown starting"); this._log.debug(docSession, "shutdown starting");
this._inactivityTimer.disable(); this._inactivityTimer.disable();
if (this.docClients.clientCount() > 0) { if (this.docClients.clientCount() > 0) {
this.logWarn(docSession, `Doc being closed with ${this.docClients.clientCount()} clients left`); this._log.warn(docSession, `Doc being closed with ${this.docClients.clientCount()} clients left`);
await this.docClients.broadcastDocMessage(null, 'docShutdown', null); await this.docClients.broadcastDocMessage(null, 'docShutdown', null);
this.docClients.removeAllClients(); this.docClients.removeAllClients();
} }
@ -361,9 +357,9 @@ export class ActiveDoc extends EventEmitter {
} catch (err) { } catch (err) {
// Initialization errors do not matter at this point. // Initialization errors do not matter at this point.
} }
this.logDebug(docSession, "shutdown complete"); this._log.debug(docSession, "shutdown complete");
} catch (err) { } catch (err) {
this.logError(docSession, "failed to shutdown some resources", err); this._log.error(docSession, "failed to shutdown some resources", err);
} }
} }
@ -373,7 +369,7 @@ export class ActiveDoc extends EventEmitter {
*/ */
@ActiveDoc.keepDocOpen @ActiveDoc.keepDocOpen
public async createEmptyDocWithDataEngine(docSession: OptDocSession): Promise<ActiveDoc> { public async createEmptyDocWithDataEngine(docSession: OptDocSession): Promise<ActiveDoc> {
this.logDebug(docSession, "createEmptyDocWithDataEngine"); this._log.debug(docSession, "createEmptyDocWithDataEngine");
await this._docManager.storageManager.prepareToCreateDoc(this.docName); await this._docManager.storageManager.prepareToCreateDoc(this.docName);
await this.docStorage.createFile(); await this.docStorage.createFile();
await this._rawPyCall('load_empty'); await this._rawPyCall('load_empty');
@ -417,7 +413,7 @@ export class ActiveDoc extends EventEmitter {
skipInitialTable?: boolean, // If set, and document is new, "Table1" will not be added. skipInitialTable?: boolean, // If set, and document is new, "Table1" will not be added.
}): Promise<ActiveDoc> { }): Promise<ActiveDoc> {
const startTime = Date.now(); const startTime = Date.now();
this.logDebug(docSession, "loadDoc"); this._log.debug(docSession, "loadDoc");
try { try {
const isNew: boolean = options?.forceNew || await this._docManager.storageManager.prepareLocalDoc(this.docName); const isNew: boolean = options?.forceNew || await this._docManager.storageManager.prepareLocalDoc(this.docName);
if (isNew) { if (isNew) {
@ -585,7 +581,7 @@ export class ActiveDoc extends EventEmitter {
// If no more clients, schedule a shutdown. // If no more clients, schedule a shutdown.
if (this.docClients.clientCount() === 0) { if (this.docClients.clientCount() === 0) {
this.logInfo(docSession, "will self-close in %d ms", this._inactivityTimer.getDelay()); this._log.info(docSession, "will self-close in %d ms", this._inactivityTimer.getDelay());
this._inactivityTimer.enable(); this._inactivityTimer.enable();
} }
} }
@ -646,7 +642,7 @@ export class ActiveDoc extends EventEmitter {
// and serve the attachment. // and serve the attachment.
const data = await this.docStorage.getFileData(fileIdent); const data = await this.docStorage.getFileData(fileIdent);
if (!data) { throw new ApiError("Invalid attachment identifier", 404); } if (!data) { throw new ApiError("Invalid attachment identifier", 404); }
this.logInfo(docSession, "getAttachment: %s -> %s bytes", fileIdent, data.length); this._log.info(docSession, "getAttachment: %s -> %s bytes", fileIdent, data.length);
return data; return data;
} }
@ -654,7 +650,7 @@ export class ActiveDoc extends EventEmitter {
* Fetches the meta tables to return to the client when first opening a document. * Fetches the meta tables to return to the client when first opening a document.
*/ */
public async fetchMetaTables(docSession: OptDocSession) { public async fetchMetaTables(docSession: OptDocSession) {
this.logInfo(docSession, "fetchMetaTables"); this._log.info(docSession, "fetchMetaTables");
if (!this.docData) { throw new Error("No doc data"); } if (!this.docData) { throw new Error("No doc data"); }
// Get metadata from local cache rather than data engine, so that we can // Get metadata from local cache rather than data engine, so that we can
// still get it even if data engine is busy calculating. // still get it even if data engine is busy calculating.
@ -749,7 +745,7 @@ export class ActiveDoc extends EventEmitter {
const wantFull = waitForFormulas || query.tableId.startsWith('_grist_') || const wantFull = waitForFormulas || query.tableId.startsWith('_grist_') ||
this._granularAccess.getReadPermission(tableAccess) === 'mixed'; this._granularAccess.getReadPermission(tableAccess) === 'mixed';
const onDemand = this._onDemandActions.isOnDemand(query.tableId); const onDemand = this._onDemandActions.isOnDemand(query.tableId);
this.logInfo(docSession, "fetchQuery %s %s", JSON.stringify(query), this._log.info(docSession, "fetchQuery %s %s", JSON.stringify(query),
onDemand ? "(onDemand)" : "(regular)"); onDemand ? "(onDemand)" : "(regular)");
let data: TableDataAction; let data: TableDataAction;
if (onDemand) { if (onDemand) {
@ -773,7 +769,7 @@ export class ActiveDoc extends EventEmitter {
data = cloneDeep(data!); // Clone since underlying fetch may be cached and shared. data = cloneDeep(data!); // Clone since underlying fetch may be cached and shared.
await this._granularAccess.filterData(docSession, data); await this._granularAccess.filterData(docSession, data);
} }
this.logInfo(docSession, "fetchQuery -> %d rows, cols: %s", this._log.info(docSession, "fetchQuery -> %d rows, cols: %s",
data![2].length, Object.keys(data![3]).join(", ")); data![2].length, Object.keys(data![3]).join(", "));
return data!; return data!;
} }
@ -784,7 +780,7 @@ export class ActiveDoc extends EventEmitter {
* @returns {Promise} Promise for a string representing the generated table schema. * @returns {Promise} Promise for a string representing the generated table schema.
*/ */
public async fetchTableSchema(docSession: DocSession): Promise<string> { public async fetchTableSchema(docSession: DocSession): Promise<string> {
this.logInfo(docSession, "fetchTableSchema(%s)", docSession); this._log.info(docSession, "fetchTableSchema(%s)", docSession);
// Permit code view if user can read everything, or can download/copy (perhaps // Permit code view if user can read everything, or can download/copy (perhaps
// via an exceptional permission for sample documents) // via an exceptional permission for sample documents)
if (!(await this._granularAccess.canReadEverything(docSession) || if (!(await this._granularAccess.canReadEverything(docSession) ||
@ -800,7 +796,7 @@ export class ActiveDoc extends EventEmitter {
* docActions that affect this query's results. * docActions that affect this query's results.
*/ */
public async useQuerySet(docSession: OptDocSession, query: ServerQuery): Promise<QueryResult> { public async useQuerySet(docSession: OptDocSession, query: ServerQuery): Promise<QueryResult> {
this.logInfo(docSession, "useQuerySet(%s, %s)", docSession, query); this._log.info(docSession, "useQuerySet(%s, %s)", docSession, query);
// TODO implement subscribing to the query. // TODO implement subscribing to the query.
// - Convert tableId+colIds to TableData/ColData references // - Convert tableId+colIds to TableData/ColData references
// - Return a unique identifier for unsubscribing // - Return a unique identifier for unsubscribing
@ -817,7 +813,7 @@ export class ActiveDoc extends EventEmitter {
* docActions relevant only to this query. * docActions relevant only to this query.
*/ */
public async disposeQuerySet(docSession: DocSession, querySubId: number): Promise<void> { public async disposeQuerySet(docSession: DocSession, querySubId: number): Promise<void> {
this.logInfo(docSession, "disposeQuerySet(%s, %s)", docSession, querySubId); this._log.info(docSession, "disposeQuerySet(%s, %s)", docSession, querySubId);
// TODO To-be-implemented // TODO To-be-implemented
} }
@ -834,7 +830,7 @@ export class ActiveDoc extends EventEmitter {
optTableId?: string): Promise<number[]> { optTableId?: string): Promise<number[]> {
// This could leak information about private tables, so check for permission. // This could leak information about private tables, so check for permission.
if (!await this._granularAccess.canScanData(docSession)) { return []; } if (!await this._granularAccess.canScanData(docSession)) { return []; }
this.logInfo(docSession, "findColFromValues(%s, %s, %s)", docSession, values, n); this._log.info(docSession, "findColFromValues(%s, %s, %s)", docSession, values, n);
await this.waitForInitialization(); await this.waitForInitialization();
return this._pyCall('find_col_from_values', values, n, optTableId); return this._pyCall('find_col_from_values', values, n, optTableId);
} }
@ -883,7 +879,7 @@ export class ActiveDoc extends EventEmitter {
public async getFormulaError(docSession: DocSession, tableId: string, colId: string, public async getFormulaError(docSession: DocSession, tableId: string, colId: string,
rowId: number): Promise<CellValue> { rowId: number): Promise<CellValue> {
if (!await this._granularAccess.hasTableAccess(docSession, tableId)) { return null; } if (!await this._granularAccess.hasTableAccess(docSession, tableId)) { return null; }
this.logInfo(docSession, "getFormulaError(%s, %s, %s, %s)", this._log.info(docSession, "getFormulaError(%s, %s, %s, %s)",
docSession, tableId, colId, rowId); docSession, tableId, colId, rowId);
await this.waitForInitialization(); await this.waitForInitialization();
return this._pyCall('get_formula_error', tableId, colId, rowId); return this._pyCall('get_formula_error', tableId, colId, rowId);
@ -978,7 +974,7 @@ export class ActiveDoc extends EventEmitter {
} }
public async renameDocTo(docSession: OptDocSession, newName: string): Promise<void> { public async renameDocTo(docSession: OptDocSession, newName: string): Promise<void> {
this.logDebug(docSession, 'renameDoc', newName); this._log.debug(docSession, 'renameDoc', newName);
await this.docStorage.renameDocTo(newName); await this.docStorage.renameDocTo(newName);
this._docName = newName; this._docName = newName;
} }
@ -1314,7 +1310,7 @@ export class ActiveDoc extends EventEmitter {
const versionCol = docInfo.schemaVersion; const versionCol = docInfo.schemaVersion;
const docSchemaVersion = (versionCol && versionCol.length === 1 ? versionCol[0] : 0) as number; const docSchemaVersion = (versionCol && versionCol.length === 1 ? versionCol[0] : 0) as number;
if (docSchemaVersion < schemaVersion) { if (docSchemaVersion < schemaVersion) {
this.logInfo(docSession, "Doc needs migration from v%s to v%s", docSchemaVersion, schemaVersion); this._log.info(docSession, "Doc needs migration from v%s to v%s", docSchemaVersion, schemaVersion);
await this._beforeMigration(docSession, 'schema', docSchemaVersion, schemaVersion); await this._beforeMigration(docSession, 'schema', docSchemaVersion, schemaVersion);
let success: boolean = false; let success: boolean = false;
try { try {
@ -1330,7 +1326,7 @@ export class ActiveDoc extends EventEmitter {
// migration action, but that requires merging and still may not be safe. For now, doing // migration action, but that requires merging and still may not be safe. For now, doing
// nothing seems best, as long as we follow the recommendations in migrations.py (never // nothing seems best, as long as we follow the recommendations in migrations.py (never
// remove/modify/rename metadata tables or columns, or change their meaning). // remove/modify/rename metadata tables or columns, or change their meaning).
this.logWarn(docSession, "Doc is newer (v%s) than this version of Grist (v%s); " + this._log.warn(docSession, "Doc is newer (v%s) than this version of Grist (v%s); " +
"proceeding with fingers crossed", docSchemaVersion, schemaVersion); "proceeding with fingers crossed", docSchemaVersion, schemaVersion);
} }
@ -1350,10 +1346,10 @@ export class ActiveDoc extends EventEmitter {
const onDemandMap = zipObject(tablesParsed.tableId as string[], tablesParsed.onDemand); const onDemandMap = zipObject(tablesParsed.tableId as string[], tablesParsed.onDemand);
const onDemandNames = remove(tableNames, (t) => onDemandMap[t]); const onDemandNames = remove(tableNames, (t) => onDemandMap[t]);
this.logInfo(docSession, "Loading %s normal tables, skipping %s on-demand tables", this._log.debug(docSession, "Loading %s normal tables, skipping %s on-demand tables",
tableNames.length, onDemandNames.length); tableNames.length, onDemandNames.length);
this.logDebug(docSession, "Normal tables: %s", tableNames.join(", ")); this._log.debug(docSession, "Normal tables: %s", tableNames.join(", "));
this.logDebug(docSession, "On-demand tables: %s", onDemandNames.join(", ")); this._log.debug(docSession, "On-demand tables: %s", onDemandNames.join(", "));
return [tableNames, onDemandNames]; return [tableNames, onDemandNames];
} }
@ -1380,7 +1376,7 @@ export class ActiveDoc extends EventEmitter {
options: ApplyUAOptions = {}): Promise<ApplyUAResult> { options: ApplyUAOptions = {}): Promise<ApplyUAResult> {
const client = docSession.client; const client = docSession.client;
this.logDebug(docSession, "_applyUserActions(%s, %s)", client, shortDesc(actions)); this._log.debug(docSession, "_applyUserActions(%s, %s)", client, shortDesc(actions));
this._inactivityTimer.ping(); // The doc is in active use; ping it to stay open longer. this._inactivityTimer.ping(); // The doc is in active use; ping it to stay open longer.
if (options?.bestEffort) { if (options?.bestEffort) {
@ -1397,7 +1393,7 @@ export class ActiveDoc extends EventEmitter {
const result: ApplyUAResult = await new Promise<ApplyUAResult>( const result: ApplyUAResult = await new Promise<ApplyUAResult>(
(resolve, reject) => (resolve, reject) =>
this._sharing.addUserAction({action, docSession, resolve, reject})); this._sharing.addUserAction({action, docSession, resolve, reject}));
this.logDebug(docSession, "_applyUserActions returning %s", shortDesc(result)); this._log.debug(docSession, "_applyUserActions returning %s", shortDesc(result));
if (result.isModification) { if (result.isModification) {
this._fetchCache.clear(); // This could be more nuanced. this._fetchCache.clear(); // This could be more nuanced.
@ -1413,7 +1409,7 @@ export class ActiveDoc extends EventEmitter {
private async _createDocFile(docSession: OptDocSession, options?: { private async _createDocFile(docSession: OptDocSession, options?: {
skipInitialTable?: boolean, // If set, "Table1" will not be added. skipInitialTable?: boolean, // If set, "Table1" will not be added.
}): Promise<void> { }): Promise<void> {
this.logDebug(docSession, "createDoc"); this._log.debug(docSession, "createDoc");
await this._docManager.storageManager.prepareToCreateDoc(this.docName); await this._docManager.storageManager.prepareToCreateDoc(this.docName);
await this.docStorage.createFile(); await this.docStorage.createFile();
const sql = options?.skipInitialTable ? GRIST_DOC_SQL : GRIST_DOC_WITH_TABLE1_SQL; const sql = options?.skipInitialTable ? GRIST_DOC_SQL : GRIST_DOC_WITH_TABLE1_SQL;
@ -1461,7 +1457,7 @@ export class ActiveDoc extends EventEmitter {
const checksum = await checksumFile(fileData.absPath); const checksum = await checksumFile(fileData.absPath);
const fileIdent = checksum + fileData.ext; const fileIdent = checksum + fileData.ext;
const ret: boolean = await this.docStorage.findOrAttachFile(fileData.absPath, fileIdent); const ret: boolean = await this.docStorage.findOrAttachFile(fileData.absPath, fileIdent);
this.logInfo(docSession, "addAttachment: file %s (image %sx%s) %s", fileIdent, this._log.info(docSession, "addAttachment: file %s (image %sx%s) %s", fileIdent,
dimensions.width, dimensions.height, ret ? "attached" : "already exists"); dimensions.width, dimensions.height, ret ? "attached" : "already exists");
return ['AddRecord', '_grist_Attachments', null, { return ['AddRecord', '_grist_Attachments', null, {
fileIdent, fileIdent,
@ -1506,7 +1502,7 @@ export class ActiveDoc extends EventEmitter {
// If a new migration needs this flag, more work is needed. The current approach creates // If a new migration needs this flag, more work is needed. The current approach creates
// more memory pressure than usual since full data is present in memory at once both in node // more memory pressure than usual since full data is present in memory at once both in node
// and in Python; and it doesn't skip onDemand tables. This is liable to cause crashes. // and in Python; and it doesn't skip onDemand tables. This is liable to cause crashes.
this.logWarn(docSession, "_migrate: retrying with all tables"); this._log.warn(docSession, "_migrate: retrying with all tables");
for (const tableName of tableNames) { for (const tableName of tableNames) {
if (!tableData[tableName] && !tableName.startsWith('_gristsys_')) { if (!tableData[tableName] && !tableName.startsWith('_gristsys_')) {
tableData[tableName] = await this.docStorage.fetchTable(tableName); tableData[tableName] = await this.docStorage.fetchTable(tableName);
@ -1518,10 +1514,10 @@ export class ActiveDoc extends EventEmitter {
const processedTables = Object.keys(tableData); const processedTables = Object.keys(tableData);
const numSchema = countIf(processedTables, t => t.startsWith("_grist_")); const numSchema = countIf(processedTables, t => t.startsWith("_grist_"));
const numUser = countIf(processedTables, t => !t.startsWith("_grist_")); const numUser = countIf(processedTables, t => !t.startsWith("_grist_"));
this.logInfo(docSession, "_migrate: applying %d migration actions (processed %s schema, %s user tables)", this._log.info(docSession, "_migrate: applying %d migration actions (processed %s schema, %s user tables)",
docActions.length, numSchema, numUser); docActions.length, numSchema, numUser);
docActions.forEach((action, i) => this.logInfo(docSession, "_migrate: docAction %s: %s", i, shortDesc(action))); docActions.forEach((action, i) => this._log.info(docSession, "_migrate: docAction %s: %s", i, shortDesc(action)));
await this.docStorage.execTransaction(() => this.docStorage.applyStoredActions(docActions)); await this.docStorage.execTransaction(() => this.docStorage.applyStoredActions(docActions));
} }
@ -1529,7 +1525,7 @@ export class ActiveDoc extends EventEmitter {
* Load the specified tables into the data engine. * Load the specified tables into the data engine.
*/ */
private async _loadTables(docSession: OptDocSession, tableNames: string[]) { private async _loadTables(docSession: OptDocSession, tableNames: string[]) {
this.logDebug(docSession, "loading %s tables: %s", tableNames.length, this._log.debug(docSession, "loading %s tables: %s", tableNames.length,
tableNames.join(", ")); tableNames.join(", "));
// Pass the resulting array to `map`, which allows parallel processing of the tables. Database // Pass the resulting array to `map`, which allows parallel processing of the tables. Database
// and DataEngine may still do things serially, but it allows them to be busy simultaneously. // and DataEngine may still do things serially, but it allows them to be busy simultaneously.
@ -1545,7 +1541,7 @@ export class ActiveDoc extends EventEmitter {
* The loader can be directed to stream the tables on to the engine. * The loader can be directed to stream the tables on to the engine.
*/ */
private _startLoadingTables(docSession: OptDocSession, tableNames: string[]) { private _startLoadingTables(docSession: OptDocSession, tableNames: string[]) {
this.logDebug(docSession, "starting to load %s tables: %s", tableNames.length, this._log.debug(docSession, "starting to load %s tables: %s", tableNames.length,
tableNames.join(", ")); tableNames.join(", "));
for (const tableId of tableNames) { for (const tableId of tableNames) {
this._tableMetadataLoader.startFetchingTable(tableId); this._tableMetadataLoader.startFetchingTable(tableId);
@ -1597,11 +1593,11 @@ export class ActiveDoc extends EventEmitter {
// took longer, scale it up proportionately. // took longer, scale it up proportionately.
const closeTimeout = Math.max(loadMs, 1000) * Deps.ACTIVEDOC_TIMEOUT; const closeTimeout = Math.max(loadMs, 1000) * Deps.ACTIVEDOC_TIMEOUT;
this._inactivityTimer.setDelay(closeTimeout); this._inactivityTimer.setDelay(closeTimeout);
this.logDebug(docSession, `loaded in ${loadMs} ms, InactivityTimer set to ${closeTimeout} ms`); this._log.debug(docSession, `loaded in ${loadMs} ms, InactivityTimer set to ${closeTimeout} ms`);
return true; return true;
} catch (err) { } catch (err) {
if (!this._shuttingDown) { if (!this._shuttingDown) {
this.logWarn(docSession, "_finishInitialization stopped with %s", err); this._log.warn(docSession, "_finishInitialization stopped with %s", err);
} }
this._fullyLoaded = true; this._fullyLoaded = true;
return false; return false;
@ -1639,10 +1635,6 @@ export class ActiveDoc extends EventEmitter {
} }
} }
private _log(level: string, docSession: OptDocSession, msg: string, ...args: any[]) {
log.origLog(level, `ActiveDoc ` + msg, ...args, this.getLogMeta(docSession));
}
/** /**
* Called before a migration. Makes sure a back-up is made. * Called before a migration. Makes sure a back-up is made.
*/ */
@ -1652,7 +1644,7 @@ export class ActiveDoc extends EventEmitter {
const label = `migrate-${versionType}-last-v${currentVersion}-before-v${newVersion}`; const label = `migrate-${versionType}-last-v${currentVersion}-before-v${newVersion}`;
this._docManager.markAsChanged(this); // Give backup current time. this._docManager.markAsChanged(this); // Give backup current time.
const location = await this._docManager.makeBackup(this, label); const location = await this._docManager.makeBackup(this, label);
this.logInfo(docSession, "_beforeMigration: backup made with label %s at %s", label, location); this._log.info(docSession, "_beforeMigration: backup made with label %s at %s", label, location);
this.emit("backupMade", location); this.emit("backupMade", location);
} }

View File

@ -11,6 +11,7 @@ import {Authorizer} from 'app/server/lib/Authorizer';
import {ScopedSession} from 'app/server/lib/BrowserSession'; import {ScopedSession} from 'app/server/lib/BrowserSession';
import {DocSession} from 'app/server/lib/DocSession'; import {DocSession} from 'app/server/lib/DocSession';
import * as log from 'app/server/lib/log'; import * as log from 'app/server/lib/log';
import {LogMethods} from "app/server/lib/LogMethods";
import {shortDesc} from 'app/server/lib/shortDesc'; import {shortDesc} from 'app/server/lib/shortDesc';
import * as crypto from 'crypto'; import * as crypto from 'crypto';
import * as moment from 'moment'; import * as moment from 'moment';
@ -65,6 +66,8 @@ export class Client {
private _session: ScopedSession|null = null; private _session: ScopedSession|null = null;
private _log = new LogMethods('Client ', (s: null) => this.getLogMeta());
// Maps docFDs to DocSession objects. // Maps docFDs to DocSession objects.
private _docFDs: Array<DocSession|null> = []; private _docFDs: Array<DocSession|null> = [];
@ -163,12 +166,12 @@ export class Client {
if (docSession && docSession.activeDoc) { if (docSession && docSession.activeDoc) {
// Note that this indirectly calls to removeDocSession(docSession.fd) // Note that this indirectly calls to removeDocSession(docSession.fd)
docSession.activeDoc.closeDoc(docSession) docSession.activeDoc.closeDoc(docSession)
.catch((e) => { log.warn("%s: error closing docFD %d", this, fd); }); .catch((e) => { this._log.warn(null, "error closing docFD %d", fd); });
count++; count++;
} }
this._docFDs[fd] = null; this._docFDs[fd] = null;
} }
log.debug("%s: closeAllDocs() closed %d doc(s)", this, count); this._log.debug(null, "closeAllDocs() closed %d doc(s)", count);
} }
public interruptConnection() { public interruptConnection() {
@ -190,12 +193,8 @@ export class Client {
const message: string = JSON.stringify(messageObj); const message: string = JSON.stringify(messageObj);
// Log something useful about the message being sent. // Log something useful about the message being sent.
if (messageObj.type) { if (messageObj.error) {
log.info("%s: sending %s: %d bytes", this, messageObj.type, message.length); this._log.warn(null, "responding to #%d ERROR %s", messageObj.reqId, messageObj.error);
} else if (messageObj.error) {
log.warn("%s: responding to #%d ERROR %s", this, messageObj.reqId, messageObj.error);
} else {
log.info("%s: responding to #%d OK: %d bytes", this, messageObj.reqId, message.length);
} }
if (this._websocket) { if (this._websocket) {
@ -207,9 +206,9 @@ export class Client {
// NOTE: if this handler is run after onClose, we could have messages end up out of order. // NOTE: if this handler is run after onClose, we could have messages end up out of order.
// Let's check to make sure. If this can happen, we need to refactor for correct ordering. // Let's check to make sure. If this can happen, we need to refactor for correct ordering.
if (!this._websocket) { if (!this._websocket) {
log.error("%s sendMessage: UNEXPECTED ORDER OF CALLBACKS", this); this._log.error(null, "sendMessage: UNEXPECTED ORDER OF CALLBACKS");
} }
log.warn("%s sendMessage: queuing after send error: %s", this, err.toString()); this._log.warn(null, "sendMessage: queuing after send error: %s", err.toString());
this._missedMessages.push(message); this._missedMessages.push(message);
} }
} else if (this._missedMessages.length < clientMaxMissedMessages) { } else if (this._missedMessages.length < clientMaxMissedMessages) {
@ -217,7 +216,7 @@ export class Client {
this._missedMessages.push(message); this._missedMessages.push(message);
} else { } else {
// Too many messages queued. Boot the client now, to make it reset when/if it reconnects. // Too many messages queued. Boot the client now, to make it reset when/if it reconnects.
log.error("%s sendMessage: too many messages queued; booting client", this); this._log.error(null, "sendMessage: too many messages queued; booting client");
if (this._destroyTimer) { if (this._destroyTimer) {
clearTimeout(this._destroyTimer); clearTimeout(this._destroyTimer);
this._destroyTimer = null; this._destroyTimer = null;
@ -244,17 +243,17 @@ export class Client {
* indicate success or failure. * indicate success or failure.
*/ */
public async onMessage(message: string): Promise<void> { public async onMessage(message: string): Promise<void> {
const clientId = this.clientId;
const request = JSON.parse(message); const request = JSON.parse(message);
if (request.beat) { if (request.beat) {
const profile = this.getProfile();
// this is a heart beat, to keep the websocket alive. No need to reply. // this is a heart beat, to keep the websocket alive. No need to reply.
log.rawInfo('heartbeat', {clientId, counter: this._counter, url: request.url, log.rawInfo('heartbeat', {
docId: request.docId, // caution: trusting client for docId for this purpose. ...this.getLogMeta(),
email: profile?.email, userId: this.getCachedUserId()}); url: request.url,
docId: request.docId, // caution: trusting client for docId for this purpose.
});
return; return;
} else { } else {
log.info("%s: onMessage", this, shortDesc(message)); this._log.info(null, "onMessage", shortDesc(message));
} }
const response: any = {reqId: request.reqId}; const response: any = {reqId: request.reqId};
const method = this._methods[request.method]; const method = this._methods[request.method];
@ -274,7 +273,7 @@ export class Client {
(typeof code === 'string' && code.startsWith('AUTH_NO')) (typeof code === 'string' && code.startsWith('AUTH_NO'))
); );
log.warn("%s: Error %s %s", this, skipStack ? err : err.stack, code || ''); this._log.warn(null, "Error %s %s", skipStack ? err : err.stack, code || '');
response.error = err.message; response.error = err.message;
if (err.code) { if (err.code) {
response.errorCode = err.code; response.errorCode = err.code;

View File

@ -9,7 +9,7 @@ import {Authorizer} from 'app/server/lib/Authorizer';
import {Client} from 'app/server/lib/Client'; import {Client} from 'app/server/lib/Client';
import {sendDocMessage} from 'app/server/lib/Comm'; import {sendDocMessage} from 'app/server/lib/Comm';
import {DocSession, OptDocSession} from 'app/server/lib/DocSession'; import {DocSession, OptDocSession} from 'app/server/lib/DocSession';
import * as log from 'app/server/lib/log'; import {LogMethods} from "app/server/lib/LogMethods";
// Allow tests to impose a serial order for broadcasts if they need that for repeatability. // Allow tests to impose a serial order for broadcasts if they need that for repeatability.
export const Deps = { export const Deps = {
@ -18,6 +18,7 @@ export const Deps = {
export class DocClients { export class DocClients {
private _docSessions: DocSession[] = []; private _docSessions: DocSession[] = [];
private _log = new LogMethods('DocClients ', (s: DocSession|null) => this.activeDoc.getLogMeta(s));
constructor( constructor(
public readonly activeDoc: ActiveDoc public readonly activeDoc: ActiveDoc
@ -36,7 +37,7 @@ export class DocClients {
public addClient(client: Client, authorizer: Authorizer): DocSession { public addClient(client: Client, authorizer: Authorizer): DocSession {
const docSession = client.addDocSession(this.activeDoc, authorizer); const docSession = client.addDocSession(this.activeDoc, authorizer);
this._docSessions.push(docSession); this._docSessions.push(docSession);
log.debug("DocClients (%s) now has %d clients; new client is %s (fd %s)", this.activeDoc.docName, this._log.debug(docSession, "now %d clients; new client is %s (fd %s)",
this._docSessions.length, client.clientId, docSession.fd); this._docSessions.length, client.clientId, docSession.fd);
return docSession; return docSession;
} }
@ -46,11 +47,11 @@ export class DocClients {
* this DocSession. * this DocSession.
*/ */
public removeClient(docSession: DocSession): void { public removeClient(docSession: DocSession): void {
log.debug("DocClients.removeClient", docSession.client.clientId); this._log.debug(docSession, "removeClient", docSession.client.clientId);
docSession.client.removeDocSession(docSession.fd); docSession.client.removeDocSession(docSession.fd);
if (arrayRemove(this._docSessions, docSession)) { if (arrayRemove(this._docSessions, docSession)) {
log.debug("DocClients (%s) now has %d clients", this.activeDoc.docName, this._docSessions.length); this._log.debug(docSession, "now %d clients", this._docSessions.length);
} }
} }
@ -58,7 +59,7 @@ export class DocClients {
* Removes all active clients from this document, i.e. closes all DocSessions. * Removes all active clients from this document, i.e. closes all DocSessions.
*/ */
public removeAllClients(): void { public removeAllClients(): void {
log.debug("DocClients.removeAllClients() removing %s docSessions", this._docSessions.length); this._log.debug(null, "removeAllClients() removing %s docSessions", this._docSessions.length);
const docSessions = this._docSessions.splice(0); const docSessions = this._docSessions.splice(0);
for (const docSession of docSessions) { for (const docSession of docSessions) {
docSession.client.removeDocSession(docSession.fd); docSession.client.removeDocSession(docSession.fd);
@ -66,7 +67,7 @@ export class DocClients {
} }
public interruptAllClients() { public interruptAllClients() {
log.debug("DocClients.interruptAllClients() interrupting %s docSessions", this._docSessions.length); this._log.debug(null, "interruptAllClients() interrupting %s docSessions", this._docSessions.length);
for (const docSession of this._docSessions) { for (const docSession of this._docSessions) {
docSession.client.interruptConnection(); docSession.client.interruptConnection();
} }
@ -116,7 +117,7 @@ export class DocClients {
if (filteredMessageData) { if (filteredMessageData) {
sendDocMessage(target.client, target.fd, type, filteredMessageData, fromSelf); sendDocMessage(target.client, target.fd, type, filteredMessageData, fromSelf);
} else { } else {
this.activeDoc.logDebug(target, 'skip broadcastDocMessage because it is not allowed for this client'); this._log.debug(target, 'skip broadcastDocMessage because it is not allowed for this client');
} }
} catch (e) { } catch (e) {
if (e.code && e.code === 'NEED_RELOAD') { if (e.code && e.code === 'NEED_RELOAD') {
@ -129,10 +130,7 @@ export class DocClients {
} catch (e) { } catch (e) {
if (e.code === 'AUTH_NO_VIEW') { if (e.code === 'AUTH_NO_VIEW') {
// Skip sending data to this user, they have no view access. // Skip sending data to this user, they have no view access.
log.rawDebug('skip broadcastDocMessage because AUTH_NO_VIEW', { this._log.debug(target, 'skip broadcastDocMessage because AUTH_NO_VIEW');
docId: target.authorizer.getDocId(),
...target.client.getLogMeta()
});
// Go further and trigger a shutdown for this user, in case they are granted // Go further and trigger a shutdown for this user, in case they are granted
// access again later. // access again later.
sendDocMessage(target.client, target.fd, 'docShutdown', null, fromSelf); sendDocMessage(target.client, target.fd, 'docShutdown', null, fromSelf);

View File

@ -154,6 +154,14 @@ export function getDocSessionAccess(docSession: OptDocSession): Role {
throw new Error('getDocSessionAccess could not find access information in DocSession'); throw new Error('getDocSessionAccess could not find access information in DocSession');
} }
export function getDocSessionAccessOrNull(docSession: OptDocSession): Role|null {
try {
return getDocSessionAccess(docSession);
} catch (err) {
return null;
}
}
/** /**
* Get cached information about the document, if available. May be stale. * Get cached information about the document, if available. May be stale.
*/ */

View File

@ -14,7 +14,7 @@ import {ChecksummedExternalStorage, DELETED_TOKEN, ExternalStorage} from 'app/se
import {HostedMetadataManager} from 'app/server/lib/HostedMetadataManager'; import {HostedMetadataManager} from 'app/server/lib/HostedMetadataManager';
import {ICreate} from 'app/server/lib/ICreate'; import {ICreate} from 'app/server/lib/ICreate';
import {IDocStorageManager} from 'app/server/lib/IDocStorageManager'; import {IDocStorageManager} from 'app/server/lib/IDocStorageManager';
import * as log from 'app/server/lib/log'; import {LogMethods} from "app/server/lib/LogMethods";
import {fromCallback} from 'app/server/lib/serverUtils'; import {fromCallback} from 'app/server/lib/serverUtils';
import * as fse from 'fs-extra'; import * as fse from 'fs-extra';
import * as path from 'path'; import * as path from 'path';
@ -113,6 +113,8 @@ export class HostedStorageManager implements IDocStorageManager {
// Latest version ids of documents. // Latest version ids of documents.
private _latestVersions = new Map<string, string>(); private _latestVersions = new Map<string, string>();
private _log = new LogMethods('HostedStorageManager ', (docId: string|null) => ({docId}));
/** /**
* Initialize with the given root directory, which should be a fully-resolved path. * Initialize with the given root directory, which should be a fully-resolved path.
* If s3Bucket is blank, S3 storage will be disabled. * If s3Bucket is blank, S3 storage will be disabled.
@ -140,7 +142,7 @@ export class HostedStorageManager implements IDocStorageManager {
delayBeforeOperationMs: secondsBeforePush * 1000, delayBeforeOperationMs: secondsBeforePush * 1000,
retry: true, retry: true,
logError: (key, failureCount, err) => { logError: (key, failureCount, err) => {
log.error("HostedStorageManager: error pushing %s (%d): %s", key, failureCount, err); this._log.error(null, "error pushing %s (%d): %s", key, failureCount, err);
} }
}); });
@ -309,7 +311,7 @@ export class HostedStorageManager implements IDocStorageManager {
await fse.remove(this._getHashFile(this.getPath(docId))); await fse.remove(this._getHashFile(this.getPath(docId)));
this.markAsChanged(docId, 'edit'); this.markAsChanged(docId, 'edit');
} catch (err) { } catch (err) {
log.error("HostedStorageManager: problem replacing %s: %s", docId, err); this._log.error(docId, "problem replacing doc: %s", err);
await fse.move(tmpPath, docPath, {overwrite: true}); await fse.move(tmpPath, docPath, {overwrite: true});
throw err; throw err;
} finally { } finally {
@ -373,7 +375,7 @@ export class HostedStorageManager implements IDocStorageManager {
* Close the storage manager. Make sure any pending changes reach S3 first. * Close the storage manager. Make sure any pending changes reach S3 first.
*/ */
public async closeStorage(): Promise<void> { public async closeStorage(): Promise<void> {
await this._uploads.wait(() => log.info('HostedStorageManager: waiting for closeStorage to finish')); await this._uploads.wait(() => this._log.info(null, 'waiting for closeStorage to finish'));
// Close metadata manager. // Close metadata manager.
if (this._metadataManager) { await this._metadataManager.close(); } if (this._metadataManager) { await this._metadataManager.close(); }
@ -415,7 +417,7 @@ export class HostedStorageManager implements IDocStorageManager {
// pick up the pace of pushing to s3, from leisurely to urgent. // pick up the pace of pushing to s3, from leisurely to urgent.
public prepareToCloseStorage() { public prepareToCloseStorage() {
if (this._pruner) { if (this._pruner) {
this._pruner.close().catch(e => log.error("HostedStorageManager: pruning error %s", e)); this._pruner.close().catch(e => this._log.error(null, "pruning error %s", e));
} }
this._uploads.expediteOperations(); this._uploads.expediteOperations();
} }
@ -436,7 +438,7 @@ export class HostedStorageManager implements IDocStorageManager {
*/ */
public async flushDoc(docName: string): Promise<void> { public async flushDoc(docName: string): Promise<void> {
while (!this.isAllSaved(docName)) { while (!this.isAllSaved(docName)) {
log.info('HostedStorageManager: waiting for document to finish: %s', docName); this._log.info(docName, 'waiting for document to finish');
await this._uploads.expediteOperationAndWait(docName); await this._uploads.expediteOperationAndWait(docName);
await this._inventory?.flush(docName); await this._inventory?.flush(docName);
if (!this.isAllSaved(docName)) { if (!this.isAllSaved(docName)) {
@ -576,7 +578,7 @@ export class HostedStorageManager implements IDocStorageManager {
// Fine, accept the doc as existing on our file system. // Fine, accept the doc as existing on our file system.
return true; return true;
} else { } else {
log.info("Local hash does not match redis: %s vs %s", checksum, docStatus.docMD5); this._log.info(docName, "Local hash does not match redis: %s vs %s", checksum, docStatus.docMD5);
// The file that exists locally does not match S3. But S3 is the canonical version. // The file that exists locally does not match S3. But S3 is the canonical version.
// On the assumption that the local file is outdated, delete it. // On the assumption that the local file is outdated, delete it.
// TODO: may want to be more careful in case the local file has modifications that // TODO: may want to be more careful in case the local file has modifications that
@ -647,7 +649,7 @@ export class HostedStorageManager implements IDocStorageManager {
private async _prepareBackup(docId: string, postfix: string = 'backup'): Promise<string> { private async _prepareBackup(docId: string, postfix: string = 'backup'): Promise<string> {
const docPath = this.getPath(docId); const docPath = this.getPath(docId);
const tmpPath = `${docPath}-${postfix}`; const tmpPath = `${docPath}-${postfix}`;
return backupSqliteDatabase(docPath, tmpPath, undefined, postfix); return backupSqliteDatabase(docPath, tmpPath, undefined, postfix, {docId});
} }
/** /**
@ -775,10 +777,14 @@ export class HostedStorageManager implements IDocStorageManager {
*/ */
export async function backupSqliteDatabase(src: string, dest: string, export async function backupSqliteDatabase(src: string, dest: string,
testProgress?: (e: BackupEvent) => void, testProgress?: (e: BackupEvent) => void,
label?: string): Promise<string> { label?: string,
log.debug(`backupSqliteDatabase: starting copy of ${src} (${label})`); logMeta: object = {}): Promise<string> {
const _log = new LogMethods<null>('backupSqliteDatabase: ', () => logMeta);
_log.debug(null, `starting copy of ${src} (${label})`);
let db: sqlite3.DatabaseWithBackup|null = null; let db: sqlite3.DatabaseWithBackup|null = null;
let success: boolean = false; let success: boolean = false;
let maxStepTimeMs: number = 0;
let numSteps: number = 0;
try { try {
// NOTE: fse.remove succeeds also when the file does not exist. // NOTE: fse.remove succeeds also when the file does not exist.
await fse.remove(dest); // Just in case some previous process terminated very badly. await fse.remove(dest); // Just in case some previous process terminated very badly.
@ -808,9 +814,11 @@ export async function backupSqliteDatabase(src: string, dest: string,
// this message at most once a second. // this message at most once a second.
// See https://www.sqlite.org/c3ref/backup_finish.html and // See https://www.sqlite.org/c3ref/backup_finish.html and
// https://github.com/mapbox/node-sqlite3/pull/1116 for api details. // https://github.com/mapbox/node-sqlite3/pull/1116 for api details.
if (remaining >= 0 && backup.remaining > remaining && Date.now() - restartMsgTime > 1000) { numSteps++;
log.info(`backupSqliteDatabase: copy of ${src} (${label}) restarted`); const stepStart = Date.now();
restartMsgTime = Date.now(); if (remaining >= 0 && backup.remaining > remaining && stepStart - restartMsgTime > 1000) {
_log.info(null, `copy of ${src} (${label}) restarted`);
restartMsgTime = stepStart;
} }
remaining = backup.remaining; remaining = backup.remaining;
if (testProgress) { testProgress({action: 'step', phase: 'before'}); } if (testProgress) { testProgress({action: 'step', phase: 'before'}); }
@ -819,15 +827,18 @@ export async function backupSqliteDatabase(src: string, dest: string,
isCompleted = Boolean(await fromCallback(cb => backup.step(PAGES_TO_BACKUP_PER_STEP, cb))); isCompleted = Boolean(await fromCallback(cb => backup.step(PAGES_TO_BACKUP_PER_STEP, cb)));
} catch (err) { } catch (err) {
if (String(err) !== String(prevError) || Date.now() - errorMsgTime > 1000) { if (String(err) !== String(prevError) || Date.now() - errorMsgTime > 1000) {
log.info(`backupSqliteDatabase (${src} ${label}): ${err}`); _log.info(null, `error (${src} ${label}): ${err}`);
errorMsgTime = Date.now(); errorMsgTime = Date.now();
} }
prevError = err; prevError = err;
if (backup.failed) { throw new Error(`backupSqliteDatabase (${src} ${label}): internal copy failed`); } if (backup.failed) { throw new Error(`backupSqliteDatabase (${src} ${label}): internal copy failed`); }
} finally {
const stepTimeMs = Date.now() - stepStart;
if (stepTimeMs > maxStepTimeMs) { maxStepTimeMs = stepTimeMs; }
} }
if (testProgress) { testProgress({action: 'step', phase: 'after'}); } if (testProgress) { testProgress({action: 'step', phase: 'after'}); }
if (isCompleted) { if (isCompleted) {
log.info(`backupSqliteDatabase: copy of ${src} (${label}) completed successfully`); _log.info(null, `copy of ${src} (${label}) completed successfully`);
success = true; success = true;
break; break;
} }
@ -838,7 +849,7 @@ export async function backupSqliteDatabase(src: string, dest: string,
try { try {
if (db) { await fromCallback(cb => db!.close(cb)); } if (db) { await fromCallback(cb => db!.close(cb)); }
} catch (err) { } catch (err) {
log.debug(`backupSqliteDatabase: problem stopping copy of ${src} (${label}): ${err}`); _log.debug(null, `problem stopping copy of ${src} (${label}): ${err}`);
} }
if (!success) { if (!success) {
// Something went wrong, remove backup if it was started. // Something went wrong, remove backup if it was started.
@ -846,11 +857,11 @@ export async function backupSqliteDatabase(src: string, dest: string,
// NOTE: fse.remove succeeds also when the file does not exist. // NOTE: fse.remove succeeds also when the file does not exist.
await fse.remove(dest); await fse.remove(dest);
} catch (err) { } catch (err) {
log.debug(`backupSqliteDatabase: problem removing copy of ${src} (${label}): ${err}`); _log.debug(null, `problem removing copy of ${src} (${label}): ${err}`);
} }
} }
if (testProgress) { testProgress({action: 'close', phase: 'after'}); } if (testProgress) { testProgress({action: 'close', phase: 'after'}); }
log.debug(`backupSqliteDatabase: stopped copy of ${src} (${label})`); _log.rawLog('debug', null, `stopped copy of ${src} (${label})`, {maxStepTimeMs, numSteps});
} }
return dest; return dest;
} }

View File

@ -0,0 +1,36 @@
import * as log from 'app/server/lib/log';
export type ILogMeta = log.ILogMeta;
/**
* Helper for logging with metadata. The created object has methods similar to those of the `log`
* module, but with an extra required first argument. The produced messages get metadata produced
* by the constructor callback applied to that argument, and the specified prefix.
*
* Usage:
* _log = new LogMethods(prefix, (info) => ({...logMetadata...}))
* _log.info(info, "hello %", name);
* _log.warn(info, "hello %", name);
* etc.
*/
export class LogMethods<Info> {
constructor(
private _prefix: string,
private _getMeta: (info: Info) => log.ILogMeta,
) {}
public debug(info: Info, msg: string, ...args: any[]) { this.log('debug', info, msg, ...args); }
public info(info: Info, msg: string, ...args: any[]) { this.log('info', info, msg, ...args); }
public warn(info: Info, msg: string, ...args: any[]) { this.log('warn', info, msg, ...args); }
public error(info: Info, msg: string, ...args: any[]) { this.log('error', info, msg, ...args); }
public log(level: string, info: Info, msg: string, ...args: any[]): void {
log.origLog(level, this._prefix + msg, ...args, this._getMeta(info));
}
// Log with the given level, and include the provided log metadata in addition to that produced
// by _getMeta(info).
public rawLog(level: string, info: Info, msg: string, meta: ILogMeta): void {
log.origLog(level, this._prefix + msg, {...this._getMeta(info), ...meta});
}
}

View File

@ -6,10 +6,10 @@ import {
LocalActionBundle, LocalActionBundle,
UserActionBundle UserActionBundle
} from 'app/common/ActionBundle'; } from 'app/common/ActionBundle';
import {DocAction, UserAction} from 'app/common/DocActions'; import {DocAction, getNumRows, UserAction} from 'app/common/DocActions';
import {allToken} from 'app/common/sharing'; import {allToken} from 'app/common/sharing';
import {timeFormat} from 'app/common/timeFormat';
import * as log from 'app/server/lib/log'; import * as log from 'app/server/lib/log';
import {LogMethods} from "app/server/lib/LogMethods";
import {shortDesc} from 'app/server/lib/shortDesc'; import {shortDesc} from 'app/server/lib/shortDesc';
import * as assert from 'assert'; import * as assert from 'assert';
import {Mutex} from 'async-mutex'; import {Mutex} from 'async-mutex';
@ -42,6 +42,9 @@ interface UserResult {
// Internally-used enum to distinguish if applied actions should be logged as local or shared. // Internally-used enum to distinguish if applied actions should be logged as local or shared.
enum Branch { Local, Shared } enum Branch { Local, Shared }
// Don't log details of action bundles in production.
const LOG_ACTION_BUNDLE = (process.env.NODE_ENV !== 'production');
export class Sharing { export class Sharing {
protected _activeDoc: ActiveDoc; protected _activeDoc: ActiveDoc;
protected _actionHistory: ActionHistory; protected _actionHistory: ActionHistory;
@ -49,6 +52,8 @@ export class Sharing {
protected _pendingQueue: Deque<UserRequest> = new Deque(); protected _pendingQueue: Deque<UserRequest> = new Deque();
protected _workCoordinator: WorkCoordinator; protected _workCoordinator: WorkCoordinator;
private _log = new LogMethods('Sharing ', (s: OptDocSession|null) => this._activeDoc.getLogMeta(s));
constructor(activeDoc: ActiveDoc, actionHistory: ActionHistory, private _modificationLock: Mutex) { constructor(activeDoc: ActiveDoc, actionHistory: ActionHistory, private _modificationLock: Mutex) {
// TODO actionHistory is currently unused (we use activeDoc.actionLog). // TODO actionHistory is currently unused (we use activeDoc.actionLog).
assert(actionHistory.isInitialized()); assert(actionHistory.isInitialized());
@ -114,7 +119,7 @@ export class Sharing {
const ret = await this._doApplyUserActionBundle(userRequest.action, userRequest.docSession); const ret = await this._doApplyUserActionBundle(userRequest.action, userRequest.docSession);
userRequest.resolve(ret); userRequest.resolve(ret);
} catch (e) { } catch (e) {
log.warn("Unable to apply action...", e); this._log.warn(userRequest.docSession, "Unable to apply action...", e);
userRequest.reject(e); userRequest.reject(e);
} }
} }
@ -125,7 +130,7 @@ export class Sharing {
try { try {
await this._doApplySharedActionBundle(action); await this._doApplySharedActionBundle(action);
} catch (e) { } catch (e) {
log.error("Unable to apply hub action... skipping"); this._log.error(null, "Unable to apply hub action... skipping");
} }
} }
@ -141,7 +146,7 @@ export class Sharing {
await this._rebaseLocalActions(); await this._rebaseLocalActions();
} }
} catch (e) { } catch (e) {
log.error("Unable to apply hub action... skipping"); this._log.error(null, "Unable to apply hub action... skipping");
} }
} }
@ -155,7 +160,7 @@ export class Sharing {
rebaseQueue.push(...actions.map((a) => getUserActionBundle(a))); rebaseQueue.push(...actions.map((a) => getUserActionBundle(a)));
await this._actionHistory.clearLocalActions(); await this._actionHistory.clearLocalActions();
} catch (e) { } catch (e) {
log.error("Can't undo local actions; sharing is off"); this._log.error(null, "Can't undo local actions; sharing is off");
this._rollbackToCheckpoint(); this._rollbackToCheckpoint();
// TODO this.disconnect(); // TODO this.disconnect();
// TODO errorState = true; // TODO errorState = true;
@ -173,14 +178,14 @@ export class Sharing {
try { try {
await this._doApplyUserActionBundle(adjusted, null); await this._doApplyUserActionBundle(adjusted, null);
} catch (e) { } catch (e) {
log.warn("Unable to apply rebased action..."); this._log.warn(null, "Unable to apply rebased action...");
rebaseFailures.push([action, adjusted]); rebaseFailures.push([action, adjusted]);
} }
} }
if (rebaseFailures.length > 0) { if (rebaseFailures.length > 0) {
this._createBackupAtCheckpoint(); this._createBackupAtCheckpoint();
// TODO we should notify the user too. // TODO we should notify the user too.
log.error('Rebase failed to reapply some of your actions, backup of local at...'); this._log.error(null, 'Rebase failed to reapply some of your actions, backup of local at...');
} }
this._releaseCheckpoint(); this._releaseCheckpoint();
} }
@ -238,7 +243,19 @@ export class Sharing {
actionHash: null, // Gets set below by _actionHistory.recordNext... actionHash: null, // Gets set below by _actionHistory.recordNext...
parentActionHash: null, // Gets set below by _actionHistory.recordNext... parentActionHash: null, // Gets set below by _actionHistory.recordNext...
}; };
this._logActionBundle(`_doApplyUserActions (${Branch[branch]})`, localActionBundle);
const logMeta = {
actionNum,
linkId: info.linkId,
otherId: info.otherId,
numDocActions: localActionBundle.stored.length,
numRows: localActionBundle.stored.reduce((n, env) => n + getNumRows(env[1]), 0),
author: info.user,
};
this._log.rawLog('debug', docSession, '_doApplyUserActions', logMeta);
if (LOG_ACTION_BUNDLE) {
this._logActionBundle(`_doApplyUserActions (${Branch[branch]})`, localActionBundle);
}
// TODO Note that the sandbox may produce actions which are not addressed to us (e.g. when we // TODO Note that the sandbox may produce actions which are not addressed to us (e.g. when we
// have EDIT permission without VIEW). These are not sent to the browser or the database. But // have EDIT permission without VIEW). These are not sent to the browser or the database. But
@ -361,11 +378,6 @@ export class Sharing {
/** Log an action bundle to the debug log. */ /** Log an action bundle to the debug log. */
private _logActionBundle(prefix: string, actionBundle: ActionBundle) { private _logActionBundle(prefix: string, actionBundle: ActionBundle) {
const includeEnv = actionBundle.envelopes.map((e) => this.isOwnEnvelope(e.recipients)); const includeEnv = actionBundle.envelopes.map((e) => this.isOwnEnvelope(e.recipients));
log.debug("%s: ActionBundle #%s with #%s envelopes: %s",
prefix, actionBundle.actionNum, actionBundle.envelopes.length,
infoDesc(actionBundle.info[1]));
actionBundle.envelopes.forEach((env, i) =>
log.debug("%s: env #%s: %s", prefix, i, env.recipients.join(' ')));
actionBundle.stored.forEach((envAction, i) => actionBundle.stored.forEach((envAction, i) =>
log.debug("%s: stored #%s [%s%s]: %s", prefix, i, envAction[0], log.debug("%s: stored #%s [%s%s]: %s", prefix, i, envAction[0],
(includeEnv[envAction[0]] ? "" : " alien"), (includeEnv[envAction[0]] ? "" : " alien"),
@ -414,17 +426,6 @@ export function findOrAddAllEnvelope(envelopes: Envelope[]): number {
return envelopes.length - 1; return envelopes.length - 1;
} }
/**
* Convert actionInfo to a concise human-readable description, for debugging.
*/
function infoDesc(info: ActionInfo): string {
const timestamp = timeFormat('A', new Date(info.time));
const desc = info.desc ? ` desc=[${info.desc}]` : '';
const otherId = info.otherId ? ` [otherId=${info.otherId}]` : '';
const linkId = info.linkId ? ` [linkId=${info.linkId}]` : '';
return `${timestamp} on ${info.inst} by ${info.user}${desc}${otherId}${linkId}`;
}
/** /**
* Extract a UserActionBundle from a LocalActionBundle, which contains a superset of data. * Extract a UserActionBundle from a LocalActionBundle, which contains a superset of data.
*/ */

View File

@ -8,7 +8,7 @@ import * as uuidv4 from 'uuid/v4';
import {EngineCode} from 'app/common/DocumentSettings'; import {EngineCode} from 'app/common/DocumentSettings';
import * as log from 'app/server/lib/log'; import * as log from 'app/server/lib/log';
import { OpenMode, SQLiteDB } from 'app/server/lib/SQLiteDB'; import { OpenMode, SQLiteDB } from 'app/server/lib/SQLiteDB';
import { getDocSessionAccess, getDocSessionUser, OptDocSession } from './DocSession'; import { getDocSessionAccessOrNull, getDocSessionUser, OptDocSession } from './DocSession';
/** /**
* Promisify a node-style callback function. E.g. * Promisify a node-style callback function. E.g.
@ -142,7 +142,7 @@ export async function checkAllegedGristDoc(docSession: OptDocSession, fname: str
*/ */
export function getLogMetaFromDocSession(docSession: OptDocSession) { export function getLogMetaFromDocSession(docSession: OptDocSession) {
const client = docSession.client; const client = docSession.client;
const access = getDocSessionAccess(docSession); const access = getDocSessionAccessOrNull(docSession);
const user = getDocSessionUser(docSession); const user = getDocSessionUser(docSession);
return { return {
access, access,