(core) start applying defenses for untrusted document uploads

Summary:
This applies some mitigations suggested by SQLite authors when
opening untrusted SQLite databases, as we do when Grist docs
are uploaded by the user.  See:
  https://www.sqlite.org/security.html#untrusted_sqlite_database_files

Steps implemented in this diff are:
  * Setting `trusted_schema` to off
  * Running a SQLite-level integrity check on uploads

Other steps will require updates to our node-sqlite3 fork, since they
are not available via the node-sqlite3 api (one more reason to migrate
to better-sqlite3).

I haven't yet managed to create a file that triggers an integrity
check failure without also being detected as corruption by sqlite
at a more basic level, so that is a TODO for testing.

Test Plan:
existing tests pass; need to come up with exploits to
actually test the defences and have not yet

Reviewers: dsagal

Reviewed By: dsagal

Differential Revision: https://phab.getgrist.com/D2909
This commit is contained in:
Paul Fitzpatrick 2021-07-14 07:17:12 -04:00
parent 625fce5f65
commit 6e15d44cf6
6 changed files with 155 additions and 115 deletions

View File

@ -100,3 +100,7 @@ declare module '@gristlabs/pidusage' {
} }
declare module "csv"; declare module "csv";
declare module 'winston/lib/winston/common' {
export function serialize(meta: any): string;
}

View File

@ -64,7 +64,6 @@ import {DocClients} from './DocClients';
import {DocPluginManager} from './DocPluginManager'; import {DocPluginManager} from './DocPluginManager';
import { import {
DocSession, DocSession,
getDocSessionAccess,
getDocSessionUser, getDocSessionUser,
getDocSessionUserId, getDocSessionUserId,
makeExceptionalDocSession, makeExceptionalDocSession,
@ -74,6 +73,7 @@ import {DocStorage} from './DocStorage';
import {expandQuery} from './ExpandedQuery'; import {expandQuery} from './ExpandedQuery';
import {GranularAccess, GranularAccessForBundle} from './GranularAccess'; import {GranularAccess, GranularAccessForBundle} from './GranularAccess';
import {OnDemandActions} from './OnDemandActions'; import {OnDemandActions} from './OnDemandActions';
import {getLogMetaFromDocSession} from './serverUtils';
import {findOrAddAllEnvelope, Sharing} from './Sharing'; import {findOrAddAllEnvelope, Sharing} from './Sharing';
import cloneDeep = require('lodash/cloneDeep'); import cloneDeep = require('lodash/cloneDeep');
import flatten = require('lodash/flatten'); import flatten = require('lodash/flatten');
@ -195,15 +195,10 @@ export class ActiveDoc extends EventEmitter {
// Constructs metadata for logging, given a Client or an OptDocSession. // Constructs metadata for logging, given a Client or an OptDocSession.
public getLogMeta(docSession: OptDocSession, docMethod?: string): log.ILogMeta { public getLogMeta(docSession: OptDocSession, docMethod?: string): log.ILogMeta {
const client = docSession.client;
const access = getDocSessionAccess(docSession);
const user = getDocSessionUser(docSession);
return { return {
...getLogMetaFromDocSession(docSession),
docId: this._docName, docId: this._docName,
access,
...(docMethod ? {docMethod} : {}), ...(docMethod ? {docMethod} : {}),
...(user ? {userId: user.id, email: user.email} : {}),
...(client ? client.getLogMeta() : {}), // Client if present will repeat and add to user info.
}; };
} }

View File

@ -23,6 +23,7 @@ import * as docUtils from 'app/server/lib/docUtils';
import {GristServer} from 'app/server/lib/GristServer'; import {GristServer} from 'app/server/lib/GristServer';
import {IDocStorageManager} from 'app/server/lib/IDocStorageManager'; import {IDocStorageManager} from 'app/server/lib/IDocStorageManager';
import {makeForkIds, makeId} from 'app/server/lib/idUtils'; import {makeForkIds, makeId} from 'app/server/lib/idUtils';
import {checkAllegedGristDoc} from 'app/server/lib/serverUtils';
import * as log from 'app/server/lib/log'; import * as log from 'app/server/lib/log';
import {ActiveDoc} from './ActiveDoc'; import {ActiveDoc} from './ActiveDoc';
import {PluginManager} from './PluginManager'; import {PluginManager} from './PluginManager';
@ -476,7 +477,9 @@ export class DocManager extends EventEmitter {
// security vulnerability. See https://phab.getgrist.com/T457. // security vulnerability. See https://phab.getgrist.com/T457.
const docName = await this._createNewDoc(id); const docName = await this._createNewDoc(id);
const docPath: string = this.storageManager.getPath(docName); const docPath: string = this.storageManager.getPath(docName);
await docUtils.copyFile(uploadInfo.files[0].absPath, docPath); const srcDocPath = uploadInfo.files[0].absPath;
await checkAllegedGristDoc(docSession, srcDocPath);
await docUtils.copyFile(srcDocPath, docPath);
await this.storageManager.addToStorage(docName); await this.storageManager.addToStorage(docName);
return {title: basename, id: docName}; return {title: basename, id: docName};
} else { } else {

View File

@ -677,7 +677,8 @@ export class DocStorage implements ISQLiteDB {
// "PRAGMA journal_mode = WAL;" + // "PRAGMA journal_mode = WAL;" +
// "PRAGMA auto_vacuum = 0;" + // "PRAGMA auto_vacuum = 0;" +
// "PRAGMA synchronous = NORMAL" // "PRAGMA synchronous = NORMAL"
"PRAGMA synchronous = OFF;" "PRAGMA synchronous = OFF;" +
"PRAGMA trusted_schema = OFF;" // mitigation suggested by https://www.sqlite.org/security.html#untrusted_sqlite_database_files
); );
} }

View File

@ -1,8 +1,13 @@
import * as bluebird from 'bluebird'; import * as bluebird from 'bluebird';
import {ChildProcess} from 'child_process'; import { ChildProcess } from 'child_process';
import * as net from 'net'; import * as net from 'net';
import * as path from 'path'; import * as path from 'path';
import {ConnectionOptions} from 'typeorm'; import { ConnectionOptions } from 'typeorm';
import * as uuidv4 from 'uuid/v4';
import * as log from 'app/server/lib/log';
import { OpenMode, SQLiteDB } from 'app/server/lib/SQLiteDB';
import { getDocSessionAccess, getDocSessionUser, OptDocSession } from './DocSession';
/** /**
* Promisify a node-style callback function. E.g. * Promisify a node-style callback function. E.g.
@ -114,3 +119,33 @@ export function getDatabaseUrl(options: ConnectionOptions, includeCredentials: b
return `${options.type}://?`; return `${options.type}://?`;
} }
} }
/**
* Collect checks to be applied to incoming documents that are alleged to be
* Grist documents. For now, the only check is a sqlite-level integrity check,
* as suggested by https://www.sqlite.org/security.html#untrusted_sqlite_database_files
*/
export async function checkAllegedGristDoc(docSession: OptDocSession, fname: string) {
const db = await SQLiteDB.openDBRaw(fname, OpenMode.OPEN_READONLY);
const integrityCheckResults = await db.all('PRAGMA integrity_check');
if (integrityCheckResults.length !== 1 || integrityCheckResults[0].integrity_check !== 'ok') {
const uuid = uuidv4();
log.info('Integrity check failure on import', {uuid, integrityCheckResults,
...getLogMetaFromDocSession(docSession)});
throw new Error(`Document failed integrity checks - is it corrupted? Event ID: ${uuid}`);
}
}
/**
* Extract access, userId, email, and client (if applicable) from session, for logging purposes.
*/
export function getLogMetaFromDocSession(docSession: OptDocSession) {
const client = docSession.client;
const access = getDocSessionAccess(docSession);
const user = getDocSessionUser(docSession);
return {
access,
...(user ? {userId: user.id, email: user.email} : {}),
...(client ? client.getLogMeta() : {}), // Client if present will repeat and add to user info.
};
}

View File

@ -10,26 +10,21 @@
/* global before, after */ /* global before, after */
const _ = require('underscore'); import * as _ from 'underscore';
const chai = require('chai'); import * as chai from 'chai';
const assert = chai.assert; import { assert } from 'chai';
const chaiAsPromised = require('chai-as-promised'); import * as chaiAsPromised from 'chai-as-promised';
const path = require('path'); import * as path from 'path';
const util = require('util'); import * as fse from 'fs-extra';
import * as tmp from 'tmp-promise';
import * as winston from 'winston';
import { serialize } from 'winston/lib/winston/common';
const Promise = require('bluebird'); import * as docUtils from 'app/server/lib/docUtils';
const fs = Promise.promisifyAll(require('fs')); import * as log from 'app/server/lib/log';
const tmp = Promise.promisifyAll(require('tmp')); import { getAppRoot } from 'app/server/lib/places';
const tmpFile = Promise.promisify(tmp.file, {multiArgs: true});
const winston = require('winston');
const {serialize} = require('winston/lib/winston/common');
const docUtils = require('app/server/lib/docUtils');
const log = require('app/server/lib/log');
const {getAppRoot} = require('app/server/lib/places');
chai.use(chaiAsPromised); chai.use(chaiAsPromised);
Promise.config({longStackTraces: true});
/** /**
* Creates a temporary file with the given contents. * Creates a temporary file with the given contents.
@ -38,17 +33,14 @@ Promise.config({longStackTraces: true});
* is useful to see the content while debugging a test. * is useful to see the content while debugging a test.
* @returns {Promise} A promise for the path of the new file. * @returns {Promise} A promise for the path of the new file.
*/ */
function writeTmpFile(content, optKeep) { export async function writeTmpFile(content: any, optKeep?: boolean) {
// discardDescriptor ensures tmp module closes it. It can lead to horrible bugs to close this // discardDescriptor ensures tmp module closes it. It can lead to horrible bugs to close this
// descriptor yourself, since tmp also closes it on exit, and if it's a different descriptor by // descriptor yourself, since tmp also closes it on exit, and if it's a different descriptor by
// that time, it can lead to a crash. See https://github.com/raszi/node-tmp/issues/168 // that time, it can lead to a crash. See https://github.com/raszi/node-tmp/issues/168
return tmpFile({keep: optKeep, discardDescriptor: true}) const obj = await tmp.file({keep: optKeep, discardDescriptor: true});
.spread(function(path) { await fse.writeFile(obj.path, content);
return fs.writeFileAsync(path, content) return obj.path;
.thenReturn(path);
});
} }
exports.writeTmpFile = writeTmpFile;
/** /**
* Creates a temporary file with `numLines` of generated data, each line about 30 bytes long. * Creates a temporary file with `numLines` of generated data, each line about 30 bytes long.
@ -58,33 +50,35 @@ exports.writeTmpFile = writeTmpFile;
* is useful to see the content while debugging a test. * is useful to see the content while debugging a test.
* @returns {Promise} A promise for the path of the new file. * @returns {Promise} A promise for the path of the new file.
*/ */
function generateTmpFile(numLines, optKeep) { export async function generateTmpFile(numLines: number, optKeep?: boolean) {
// Generate a bigger data file. // Generate a bigger data file.
var data = []; const data = [];
for (var i = 0; i < numLines; i++) { for (let i = 0; i < numLines; i++) {
data.push(i + " abcdefghijklmnopqrstuvwxyz\n"); data.push(i + " abcdefghijklmnopqrstuvwxyz\n");
} }
return writeTmpFile(data.join(""), optKeep); return writeTmpFile(data.join(""), optKeep);
} }
exports.generateTmpFile = generateTmpFile;
/** /**
* Helper class to capture log output when we want to test it. * Helper class to capture log output when we want to test it.
*/ */
var CaptureTransport = function(options) { class CaptureTransport extends winston.Transport {
private _captureFunc: (level: string, msg: string, meta: any) => void;
public constructor(options: any) {
super();
this._captureFunc = options.captureFunc; this._captureFunc = options.captureFunc;
if (options.name) { if (options.name) {
this.name = options.name; this.name = options.name;
} }
}; }
util.inherits(CaptureTransport, winston.Transport);
CaptureTransport.prototype.name = 'CaptureTransport'; public log(level: string, msg: string, meta: any, callback: () => void) {
CaptureTransport.prototype.log = function(level, msg, meta, callback) {
this._captureFunc(level, msg, meta); this._captureFunc(level, msg, meta);
callback(null); }
}; }
/** /**
@ -94,7 +88,7 @@ CaptureTransport.prototype.log = function(level, msg, meta, callback) {
* *
* This should be called at the suite level (i.e. inside describe()). * This should be called at the suite level (i.e. inside describe()).
*/ */
function setTmpLogLevel(level, optCaptureFunc) { export function setTmpLogLevel(level: string, optCaptureFunc?: (level: string, msg: string, meta: any) => void) {
// If verbose is set in the environment, sabotage all reductions in logging level. // If verbose is set in the environment, sabotage all reductions in logging level.
// Handier than modifying the setTmpLogLevel line and then remembering to set it back // Handier than modifying the setTmpLogLevel line and then remembering to set it back
// before committing. // before committing.
@ -102,28 +96,28 @@ function setTmpLogLevel(level, optCaptureFunc) {
level = 'debug'; level = 'debug';
} }
var prevLogLevel = null; let prevLogLevel: string|undefined = undefined;
const name = _.uniqueId('CaptureLog');
before(function() { before(function() {
if (this.runnable().parent.root) { if (this.runnable().parent?.root) {
throw new Error("setTmpLogLevel should be called at suite level, not at root level"); throw new Error("setTmpLogLevel should be called at suite level, not at root level");
} }
prevLogLevel = log.transports.file.level; prevLogLevel = log.transports.file.level;
log.transports.file.level = level; log.transports.file.level = level;
if (optCaptureFunc) { if (optCaptureFunc) {
log.add(CaptureTransport, { captureFunc: optCaptureFunc }); log.add(CaptureTransport as any, { captureFunc: optCaptureFunc, name }); // typing is off.
} }
}); });
after(function() { after(function() {
if (optCaptureFunc) { if (optCaptureFunc) {
log.remove(CaptureTransport); log.remove(name);
} }
log.transports.file.level = prevLogLevel; log.transports.file.level = prevLogLevel;
}); });
} }
exports.setTmpLogLevel = setTmpLogLevel;
/** /**
@ -131,33 +125,33 @@ exports.setTmpLogLevel = setTmpLogLevel;
* captures those at minLevel and higher. Returns a promise for the array of "level: message" * captures those at minLevel and higher. Returns a promise for the array of "level: message"
* strings. These may be tested using testUtils.assertMatchArray(). Callback may return a promise. * strings. These may be tested using testUtils.assertMatchArray(). Callback may return a promise.
*/ */
function captureLog(minLevel, callback) { export async function captureLog(minLevel: string, callback: any) {
const messages = []; const messages: string[] = [];
const prevLogLevel = log.transports.file.level; const prevLogLevel = log.transports.file.level;
const name = _.uniqueId('CaptureLog'); const name = _.uniqueId('CaptureLog');
function capture(level, msg, meta) { function capture(level: string, msg: string, meta: any) {
if (log.levels[level] <= log.levels[minLevel]) { if ((log as any).levels[level] <= (log as any).levels[minLevel]) { // winston types are off?
messages.push(level + ': ' + msg + (meta ? ' ' + serialize(meta) : '')); messages.push(level + ': ' + msg + (meta ? ' ' + serialize(meta) : ''));
} }
} }
log.transports.file.level = -1; // Suppress all log output. log.transports.file.level = -1 as any; // Suppress all log output.
log.add(CaptureTransport, { captureFunc: capture, name: name }); log.add(CaptureTransport as any, { captureFunc: capture, name }); // types are off.
return Promise.try(() => callback()) try {
.finally(() => { await callback();
} finally {
log.remove(name); log.remove(name);
log.transports.file.level = prevLogLevel; log.transports.file.level = prevLogLevel;
}) }
.return(messages); return messages;
} }
exports.captureLog = captureLog;
/** /**
* Asserts that each string of stringArray matches the corresponding regex in regexArray. * Asserts that each string of stringArray matches the corresponding regex in regexArray.
*/ */
function assertMatchArray(stringArray, regexArray) { export function assertMatchArray(stringArray: string[], regexArray: RegExp[]) {
for (let i = 0; i < Math.min(stringArray.length, regexArray.length); i++) { for (let i = 0; i < Math.min(stringArray.length, regexArray.length); i++) {
assert.match(stringArray[i], regexArray[i]); assert.match(stringArray[i], regexArray[i]);
} }
@ -166,7 +160,6 @@ function assertMatchArray(stringArray, regexArray) {
assert.isAtLeast(stringArray.length, regexArray.length, assert.isAtLeast(stringArray.length, regexArray.length,
'Not all expected strings were seen'); 'Not all expected strings were seen');
} }
exports.assertMatchArray = assertMatchArray;
/** /**
* Helper method for handling expected Promise rejections. * Helper method for handling expected Promise rejections.
@ -175,7 +168,7 @@ exports.assertMatchArray = assertMatchArray;
* @param {String} errCode - Error code to check against `err.code` from the caller. * @param {String} errCode - Error code to check against `err.code` from the caller.
* @param {RegExp} errRegexp - Regular expression to check against `err.message` from the caller. * @param {RegExp} errRegexp - Regular expression to check against `err.message` from the caller.
*/ */
function expectRejection(promise, errCode, errRegexp) { export function expectRejection(promise: Promise<any>, errCode: number, errRegexp: RegExp) {
return promise return promise
.then(function() { .then(function() {
assert(false, "Expected promise to return an error: " + errCode); assert(false, "Expected promise to return an error: " + errCode);
@ -192,7 +185,6 @@ function expectRejection(promise, errCode, errRegexp) {
} }
}); });
} }
exports.expectRejection = expectRejection;
/** /**
* Reads in doc actions from a test script. Used in DocStorage_Script.js and DocData.js. * Reads in doc actions from a test script. Used in DocStorage_Script.js and DocData.js.
@ -203,10 +195,9 @@ exports.expectRejection = expectRejection;
* @param {String} file - Input test script * @param {String} file - Input test script
* @returns {Promise:Object} - Parsed test script object * @returns {Promise:Object} - Parsed test script object
*/ */
exports.readTestScript = function(file) { export async function readTestScript(file: string) {
return fs.readFileAsync(file, {encoding: 'utf8'}) const fullText = await fse.readFile(file, {encoding: 'utf8'});
.then(function(fullText) { const allLines: string[] = [];
var allLines = [];
fullText.split("\n").forEach(function(line, i) { fullText.split("\n").forEach(function(line, i) {
if (line.match(/^\s*\/\//)) { if (line.match(/^\s*\/\//)) {
allLines.push(''); allLines.push('');
@ -216,33 +207,34 @@ exports.readTestScript = function(file) {
} }
}); });
return JSON.parse(allLines.join("\n")); return JSON.parse(allLines.join("\n"));
}); }
};
/** /**
* For a test case step, such as ["APPLY", {actions}], checks if the step name has an encoded line * For a test case step, such as ["APPLY", {actions}], checks if the step name has an encoded line
* number, strips it, runs the callback with the step data, and inserts the line number into any * number, strips it, runs the callback with the step data, and inserts the line number into any
* errors thrown by the callback. * errors thrown by the callback.
*/ */
exports.processTestScriptSteps = function(body, stepCallback) { export async function processTestScriptSteps<T>(body: Promise<[string, T]>[],
return Promise.each(body, function(step) { stepCallback: (step: [string, T]) => Promise<void>) {
var stepName = step[0]; for (const promise of body) {
var lineNoPos = stepName.indexOf('@'); const step = await promise;
var lineNum = (lineNoPos === -1) ? null : stepName.slice(lineNoPos + 1); const stepName = step[0];
const lineNoPos = stepName.indexOf('@');
const lineNum = (lineNoPos === -1) ? null : stepName.slice(lineNoPos + 1);
step[0] = (lineNoPos === -1) ? stepName : stepName.slice(0, lineNoPos); step[0] = (lineNoPos === -1) ? stepName : stepName.slice(0, lineNoPos);
return Promise.try(() => stepCallback(step)) try {
.catch(function(e) { await stepCallback(step);
} catch (e) {
e.message = "LINE " + lineNum + ": " + e.message; e.message = "LINE " + lineNum + ": " + e.message;
throw e; throw e;
}); }
}); }
}; }
/** /**
* Helper that substitutes every instance of `from` value to `to` value. Iterates down the object. * Helper that substitutes every instance of `from` value to `to` value. Iterates down the object.
*/ */
function deepSubstitute(obj, from, to) { export function deepSubstitute(obj: any, from: any, to: any): any {
assert.lengthOf(arguments, 3, 'Must specify obj, from, and to params');
from = _.isArray(from) ? from : [from]; from = _.isArray(from) ? from : [from];
if (_.isArray(obj)) { if (_.isArray(obj)) {
return obj.map(el => deepSubstitute(el, from, to)); return obj.map(el => deepSubstitute(el, from, to));
@ -252,37 +244,47 @@ function deepSubstitute(obj, from, to) {
return from.indexOf(obj) !== -1 ? to : obj; return from.indexOf(obj) !== -1 ? to : obj;
} }
} }
exports.deepSubstitute = deepSubstitute;
const fixturesRoot = path.resolve(getAppRoot(), 'test', 'fixtures'); export const fixturesRoot = path.resolve(getAppRoot(), 'test', 'fixtures');
exports.fixturesRoot = fixturesRoot;
exports.appRoot = getAppRoot(); export const appRoot = getAppRoot();
/** /**
* Copy the given filename from the fixtures directory (test/fixtures) * Copy the given filename from the fixtures directory (test/fixtures)
* to the storage manager root. * to the storage manager root.
* @param {string} alias - Optional alias that lets you rename the document on disk. * @param {string} alias - Optional alias that lets you rename the document on disk.
*/ */
function useFixtureDoc(fileName, storageManager, alias = fileName) { export async function useFixtureDoc(fileName: string, storageManager: any, alias: string = fileName) {
var srcPath = path.resolve(fixturesRoot, "docs", fileName); const srcPath = path.resolve(fixturesRoot, "docs", fileName);
return useLocalDoc(srcPath, storageManager, alias) const docName = await useLocalDoc(srcPath, storageManager, alias);
.tap(docName => log.info("Using fixture %s as %s", fileName, docName + ".grist")) log.info("Using fixture %s as %s", fileName, docName + ".grist");
return docName;
} }
exports.useFixtureDoc = useFixtureDoc;
/** /**
* Copy the given filename from srcPath to the storage manager root. * Copy the given filename from srcPath to the storage manager root.
* @param {string} alias - Optional alias that lets you rename the document on disk. * @param {string} alias - Optional alias that lets you rename the document on disk.
*/ */
function useLocalDoc(srcPath, storageManager, alias = srcPath) { export async function useLocalDoc(srcPath: string, storageManager: any, alias: string = srcPath) {
var docName = path.basename(alias || srcPath, ".grist"); let docName = path.basename(alias || srcPath, ".grist");
return docUtils.createNumbered(docName, "-", docName = await docUtils.createNumbered(
name => docUtils.createExclusive(storageManager.getPath(name)) docName, "-",
) (name: string) => docUtils.createExclusive(storageManager.getPath(name)));
.tap(docName => docUtils.copyFile(srcPath, storageManager.getPath(docName))) await docUtils.copyFile(srcPath, storageManager.getPath(docName));
.tap(docName => storageManager.markAsChanged(docName)); await storageManager.markAsChanged(docName);
return docName;
} }
exports.useLocalDoc = useLocalDoc;
exports.assert = assert; // an helper to copy a fixtures document to destPath
export async function copyFixtureDoc(docName: string, destPath: string) {
const srcPath = path.resolve(fixturesRoot, 'docs', docName);
await docUtils.copyFile(srcPath, destPath);
}
// a helper to read a fixtures document into memory
export async function readFixtureDoc(docName: string) {
const srcPath = path.resolve(fixturesRoot, 'docs', docName);
return fse.readFile(srcPath);
}
export { assert };