run grist-core test batches in parallel (#444)

This sets up a framework for running tests in parallel.
It increases the total time taken (since some steps are
repeated) but reduces the turn-around time significantly
overall.

The main objective is to make it possible to release more
test batches to grist-core without bringing CI to a crawl.

The clever little test/split-test.js script is from the
Grist Labs mono-repo and is Dmitry's work.

I considered doing the build in one job, and copying
it to test jobs, since it feels wasteful to repeat it.
That may be worth trying, especially if we start getting
jobs backing up (total concurrent Linux jobs on free plan
is quoted at 20).

It might also be worth looking at doing some tests in
parallel on the same worker, perhaps using the relatively
new MOCHA_WORKER_ID feature, since the tests are often not
actually CPU or I/O bound.
pull/451/head
Paul Fitzpatrick 1 year ago committed by GitHub
parent 8b7aa12a0e
commit 4be4acca6e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -10,24 +10,35 @@ on:
workflow_dispatch:
jobs:
build:
build_and_test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.9]
node-version: [14.x]
tests:
- ':lint:python:client:common:smoke:'
- ':server-1-of-2:'
- ':server-2-of-2:'
- ':nbrowser-1-of-5:'
- ':nbrowser-2-of-5:'
- ':nbrowser-3-of-5:'
- ':nbrowser-4-of-5:'
- ':nbrowser-5-of-5:'
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: 'yarn'
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
- name: Install Python packages
run: |
@ -38,9 +49,11 @@ jobs:
run: yarn install
- name: Run eslint
if: contains(matrix.tests, ':lint:')
run: yarn run lint:ci
- name: Make sure bucket is versioned
if: contains(matrix.tests, ':server-')
env:
AWS_ACCESS_KEY_ID: administrator
AWS_SECRET_ACCESS_KEY: administrator
@ -49,15 +62,27 @@ jobs:
- name: Build Node.js code
run: yarn run build:prod
- name: Run smoke test
if: contains(matrix.tests, ':smoke:')
run: VERBOSE=1 DEBUG=1 MOCHA_WEBDRIVER_HEADLESS=1 yarn run test:smoke
- name: Run python tests
if: contains(matrix.tests, ':python:')
run: yarn run test:python
- name: Run client tests
if: contains(matrix.tests, ':client:')
run: yarn run test:client
- name: Run common tests
if: contains(matrix.tests, ':common:')
run: yarn run test:common
- name: Run server tests with minio and redis
run: MOCHA_WEBDRIVER_HEADLESS=1 yarn run test:server
if: contains(matrix.tests, ':server-')
run: |
export TEST_SPLITS=$(echo $TESTS | sed "s/.*:server-\([^:]*\).*/\1/")
MOCHA_WEBDRIVER_HEADLESS=1 yarn run test:server
env:
GRIST_DOCS_MINIO_ACCESS_KEY: administrator
GRIST_DOCS_MINIO_SECRET_KEY: administrator
@ -68,15 +93,12 @@ jobs:
GRIST_DOCS_MINIO_BUCKET: grist-docs-test
- name: Run main tests without minio and redis
run: MOCHA_WEBDRIVER_HEADLESS=1 yarn run test --exclude '_build/test/server/**/*'
- name: Update candidate branch
if: ${{ github.event_name == 'push' }}
uses: ad-m/github-push-action@8407731efefc0d8f72af254c74276b7a90be36e1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: latest_candidate
force: true
if: contains(matrix.tests, ':nbrowser-')
run: |
export TEST_SPLITS=$(echo $TESTS | sed "s/.*:nbrowser-\([^:]*\).*/\1/")
MOCHA_WEBDRIVER_HEADLESS=1 yarn run test:nbrowser
env:
TESTS: ${{ matrix.tests }}
services:
# https://github.com/bitnami/bitnami-docker-minio/issues/16
@ -103,3 +125,18 @@ jobs:
--health-interval 10s
--health-timeout 5s
--health-retries 5
candidate:
needs: build_and_test
if: ${{ success() && github.event_name == 'push' }}
runs-on: ubuntu-latest
steps:
- name: Fetch new candidate branch
uses: actions/checkout@v3
- name: Update candidate branch
uses: ad-m/github-push-action@8407731efefc0d8f72af254c74276b7a90be36e1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: latest_candidate
force: true

@ -13,10 +13,10 @@
"build:prod": "buildtools/build.sh",
"start:prod": "sandbox/run.sh",
"test": "GRIST_SESSION_COOKIE=grist_test_cookie GRIST_TEST_LOGIN=1 TEST_SUPPORT_API_KEY=api_key_for_support TEST_CLEAN_DATABASE=true NODE_PATH=_build:_build/stubs:_build/ext mocha ${DEBUG:+-b --no-exit} --slow 8000 ${DEBUG:---forbid-only} -g ${GREP_TESTS:-''} '_build/test/common/*.js' '_build/test/client/*.js' '_build/test/nbrowser/*.js' '_build/test/server/**/*.js' '_build/test/gen-server/**/*.js'",
"test:nbrowser": "GRIST_SESSION_COOKIE=grist_test_cookie GRIST_TEST_LOGIN=1 TEST_SUPPORT_API_KEY=api_key_for_support TEST_CLEAN_DATABASE=true NODE_PATH=_build:_build/stubs:_build/ext mocha ${DEBUG:+-b --no-exit} ${DEBUG:---forbid-only} -g ${GREP_TESTS:-''} --slow 8000 '_build/test/nbrowser/**/*.js'",
"test:nbrowser": "TEST_SUITE=nbrowser TEST_SUITE_FOR_TIMINGS=nbrowser TIMINGS_FILE=test/timings/nbrowser.txt GRIST_SESSION_COOKIE=grist_test_cookie GRIST_TEST_LOGIN=1 TEST_SUPPORT_API_KEY=api_key_for_support TEST_CLEAN_DATABASE=true NODE_PATH=_build:_build/stubs:_build/ext mocha ${DEBUG:+-b --no-exit} ${DEBUG:---forbid-only} -g ${GREP_TESTS:-''} --slow 8000 -R test/xunit-file '_build/test/nbrowser/**/*.js'",
"test:client": "GRIST_SESSION_COOKIE=grist_test_cookie NODE_PATH=_build:_build/stubs:_build/ext mocha ${DEBUG:+'-b'} '_build/test/client/**/*.js'",
"test:common": "GRIST_SESSION_COOKIE=grist_test_cookie NODE_PATH=_build:_build/stubs:_build/ext mocha ${DEBUG:+'-b'} '_build/test/common/**/*.js'",
"test:server": "GRIST_SESSION_COOKIE=grist_test_cookie NODE_PATH=_build:_build/stubs:_build/ext mocha ${DEBUG:+'-b'} '_build/test/server/**/*.js' '_build/test/gen-server/**/*.js'",
"test:server": "TEST_SUITE=server TEST_SUITE_FOR_TIMINGS=server TIMINGS_FILE=test/timings/server.txt GRIST_SESSION_COOKIE=grist_test_cookie NODE_PATH=_build:_build/stubs:_build/ext mocha ${DEBUG:+'-b'} -R test/xunit-file '_build/test/server/**/*.js' '_build/test/gen-server/**/*.js'",
"test:smoke": "NODE_PATH=_build:_build/stubs:_build/ext mocha _build/test/nbrowser/Smoke.js",
"test:docker": "./test/test_under_docker.sh",
"test:python": "sandbox_venv3/bin/python sandbox/grist/runtests.py ${GREP_TESTS:+discover -p \"test*${GREP_TESTS}*.py\"}",

@ -1,4 +1,5 @@
--require source-map-support/register
test/report-why-tests-hang
test/init-mocha-webdriver
test/split-tests
test/chai-as-promised

@ -0,0 +1,121 @@
/**
* This module handles splitting tests for parallelizing them. This module is imported by any run
* of mocha, due by being listed in test/mocha.opts.
*
* It only does anything if TEST_SPLITS is set, which must have the form "3-of-8".
*
* If TEST_SPLITS is set to M-of-N, it is used to divide up all test suites in this mocha run into
* N groups, and runs the Mth of them. Note that M is 1-based, i.e. in [1..N] range. To have all
* tests run, each of the groups 1-of-N through N-of-N must run on the same total set of tests.
*
* The actual breaking into groups is informed by a timings file, defaulting to
* test/timings-all.txt. This has the format "<top-suite> <file-suite-title> <duration-in-ms>".
* Only those lines whose <top-suite> matches process.env.TEST_SUITE_FOR_TIMINGS will be used.
*
* The timings for test/timings-all.txt are prepared by our test reporter and written during
* Jenkins run as the timings/timings-all.txt artifact. After tests are added or changed, if
* timings may have changed significantly, it's good to update test/timings-all.txt, so that the
* parallel groups can be evened out as much as possible.
*/
/* global before */
const fs = require('fs');
const { assert } = require('chai');
const testSuite = process.env.TEST_SUITE_FOR_TIMINGS || "unset_suite";
const timingsFile = process.env.TIMINGS_FILE || "test/timings-all.txt";
before(function() {
const testSplits = process.env.TEST_SPLITS;
if (!testSplits) {
return;
}
const match = testSplits.match(/^(\d+)-of-(\d+)$/);
if (!match) {
assert.fail(`Invalid test split spec '${testSplits}': use format 'N-of-M'`);
}
const group = Number(match[1]);
const groupCount = Number(match[2]);
if (!(group >= 1 && group <= groupCount)) {
assert.fail(`Invalid test split spec '${testSplits}': index must be in range 1..{groupCount}`);
}
const testParent = this.test.parent;
const timings = getTimings();
const groups = groupSuites(testParent.suites, timings, groupCount);
testParent.suites = groups[group - 1]; // Convert to a 0-based index.
console.log(`Split tests groups; will run group ${group} of ${groupCount}`);
});
/**
* Read timings from timingsFile into a Map mapping file-suite-title to duration.
*/
function getTimings() {
const timings = new Map();
try {
const content = fs.readFileSync(timingsFile, {encoding: 'utf8'})
for (const line of content.split(/\r?\n/)) {
const [bigSuite, fileSuite, duration] = line.split(/\s+/);
if (bigSuite === testSuite && !isNaN(Number(duration))) {
timings.set(fileSuite, Number(duration));
}
}
} catch (e) {
if (e.code === 'ENOENT') {
console.warn(`No timings found in ${timingsFile}; proceeding without timings`);
} else {
throw e;
}
}
return timings;
}
/**
* Splits suites into groups and returns the list of them.
*
* The algorithm to group tests into suites starts goes one by one from longest to shortest,
* adding them to the least filled-up group.
*/
function groupSuites(suites, timings, groupCount) {
// Calculate a fallback value for durations as the average of existing durations.
const totalDuration = Array.from(timings.values()).reduce(((s, dur) => s + dur), 0);
if (!totalDuration) {
console.warn("No timings; assuming all tests are equally long");
}
const fallbackDuration = totalDuration ? totalDuration / timings.size : 1000;
const groups = Array.from(Array(groupCount), () => []);
const groupDurations = groups.map(() => 0);
// Check for duplicate suite titles.
const suitesByTitle = new Map(suites.map(s => [s.title, s]));
for (const suite of suites) {
if (suitesByTitle.get(suite.title) !== suite) {
assert.fail(`Please fix duplicate suite title: ${suite.title}`);
}
}
// Get timing for the given suite, falling back to fallbackDuration.
function getTiming(suite) {
const value = timings.get(suite.title);
return (typeof value !== 'number' || isNaN(value)) ? fallbackDuration : value;
}
// Sort suites by descending duration.
const sortedSuites = suites.slice().sort((a, b) => getTiming(b) - getTiming(a));
for (const suite of sortedSuites) {
// Pick a least-duration group.
const index = groupDurations.indexOf(Math.min(...groupDurations));
groups[index].push(suite);
groupDurations[index] += getTiming(suite);
}
// Sort each group alphabetically by title.
for (const group of groups) {
group.sort((a, b) => a.title < b.title ? -1 : 1);
}
return groups;
}

@ -0,0 +1,28 @@
nbrowser ActionLog 14737
nbrowser ChoiceList 33037
nbrowser CustomView 22055
nbrowser CustomWidgets 14958
nbrowser CustomWidgetsConfig 48287
nbrowser DescriptionColumn 4649
nbrowser DuplicateDocument 14042
nbrowser Fork 112089
nbrowser HomeIntro 44706
nbrowser LanguageSettings 25427
nbrowser Localization 10069
nbrowser MultiColumn 455648
nbrowser Pages 24986
nbrowser ReferenceColumns 27590
nbrowser ReferenceList 34333
nbrowser RefTransforms 9072
nbrowser RightPanel 10530
nbrowser RightPanelSelectBy 6255
nbrowser RowMenu 3702
nbrowser saveViewSection 7596
nbrowser SelectBy 5846
nbrowser SelectByRefList 15186
nbrowser SelectByRightPanel 3531
nbrowser SelectBySummary 17516
nbrowser SelectBySummaryRef 5382
nbrowser SelectionSummary 6833
nbrowser Smoke 1800
nbrowser ToggleColumns 6530

@ -0,0 +1,7 @@
server Comm 9557
server generateInitialDocSql 1304
server Authorizer 2375
server DocApi 94358
server DocApi2 730
server HostedStorageManager 220307
server backupSqliteDatabase 4348
Loading…
Cancel
Save