diff --git a/.github/workflows/bookkeeping.yml b/.github/workflows/bookkeeping.yml index 11192c97b9..8a7dee986e 100644 --- a/.github/workflows/bookkeeping.yml +++ b/.github/workflows/bookkeeping.yml @@ -46,21 +46,24 @@ jobs: - name: Build and Start Test Database run: | docker-compose \ - -f docker-compose.test-parallel.yml \ + -f docker-compose.test-parallel-base.yml \ + -f docker-compose.test-parallel-ci.yml \ up --detach test_db env: TEST_TYPE: ${{ matrix.test_type }} - name: Build Test Container run: | docker-compose \ - -f docker-compose.test-parallel.yml \ + -f docker-compose.test-parallel-base.yml \ + -f docker-compose.test-parallel-ci.yml \ build test_app env: TEST_TYPE: ${{ matrix.test_type }} - name: Run Tests run: | docker-compose \ - -f docker-compose.test-parallel.yml \ + -f docker-compose.test-parallel-base.yml \ + -f docker-compose.test-parallel-ci.yml \ run test_app env: TEST_TYPE: ${{ matrix.test_type }} @@ -74,7 +77,8 @@ jobs: - name: Clean Up run: | docker-compose \ - -f docker-compose.test-parallel.yml \ + -f docker-compose.test-parallel-base.yml \ + -f docker-compose.test-parallel-ci.yml \ down env: TEST_TYPE: ${{ matrix.test_type }} diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 419a6cdd00..591b67673c 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -47,8 +47,20 @@ jobs: -f docker-compose.yml \ -f docker-compose.coverage.yml \ config --quiet - - name: Validate docker-compose.test-parallel.yml + - name: Validate docker-compose.test-parallel-base.yml run: | docker compose \ - -f docker-compose.test-parallel.yml \ + -f docker-compose.test-parallel-base.yml \ + config --quiet + - name: Validate docker-compose.test-parallel-ci.yml + run: | + docker compose \ + -f docker-compose.test-parallel-base.yml \ + -f docker-compose.test-parallel-ci.yml \ + config --quiet + - name: Validate docker-compose.test-parallel-local.yml + run: | + docker compose \ + -f docker-compose.test-parallel-base.yml \ + -f docker-compose.test-parallel-local.yml \ config --quiet diff --git a/Dockerfile b/Dockerfile index 1f7f3ffff5..c51dde9944 100644 --- a/Dockerfile +++ b/Dockerfile @@ -57,14 +57,16 @@ CMD [ "/opt/wait-for-it.sh", "-t", "0", "database:3306", "--", "npm", "run", "te # # ---- Test parallel for CI ---- -FROM developmentdependencies as test_parallel - -# Set ARG and ENV for TEST_TYPE -ARG TEST_TYPE -ENV TEST_TYPE=${TEST_TYPE} +FROM developmentdependencies as test_parallel_ci CMD [ "sh", "-c", "/opt/wait-for-it.sh -t 0 test_db:3306 -- npm run test:subset" ] +# +# ---- Test parallel local ---- +FROM developmentdependencies as test_parallel_local + +CMD [ "sh", "-c", "/opt/wait-for-it.sh -t 0 test_db:3306 -- npm run test:subset-local" ] + # # ---- Coverage ---- FROM developmentdependencies as coverage diff --git a/docker-compose.test-parallel.yml b/docker-compose.test-parallel-base.yml similarity index 71% rename from docker-compose.test-parallel.yml rename to docker-compose.test-parallel-base.yml index bd5c38b91b..73eff90355 100644 --- a/docker-compose.test-parallel.yml +++ b/docker-compose.test-parallel-base.yml @@ -5,9 +5,6 @@ services: build: context: . dockerfile: Dockerfile - target: test_parallel - args: - TEST_TYPE: ${TEST_TYPE} environment: TEST_TYPE: ${TEST_TYPE} NODE_ENV: test @@ -16,9 +13,9 @@ services: PAGE_ITEMS_LIMIT: 100 DATABASE_HOST: test_db ALI_ECS_GUI_URL: "${ALI_ECS_GUI_URL:-http://localhost:8080}" - FLP_INFOLOGGER_URL: "${ALI_ECS_GUI_URL:-http://localhost:8081}" - QC_GUI_URL: "${ALI_ECS_GUI_URL:-http://localhost:8082}" - ALI_FLP_INDEX_URL: "${ALI_ECS_GUI_URL:-http://localhost:80}" + FLP_INFOLOGGER_URL: "${FLP_INFOLOGGER_URL:-http://localhost:8081}" + QC_GUI_URL: "${QC_GUI_URL:-http://localhost:8082}" + ALI_FLP_INDEX_URL: "${ALI_FLP_INDEX_URL:-http://localhost:80}" links: - test_db restart: "no" @@ -30,12 +27,8 @@ services: source: ./scripts target: /opt - type: bind - read_only: false source: ./database/storage target: /var/storage - - type: bind - source: ${GITHUB_WORKSPACE}/coverage - target: /usr/src/app/coverage/${TEST_TYPE} read_only: false test_db: @@ -45,7 +38,6 @@ services: restart: unless-stopped volumes: - type: volume - read_only: false source: database-data target: /var/lib/mysql - type: bind @@ -56,8 +48,6 @@ services: read_only: true source: ./database/populate target: /docker-entrypoint-initdb.d - ports: - - "3306:3306" volumes: database-data: diff --git a/docker-compose.test-parallel-ci.yml b/docker-compose.test-parallel-ci.yml new file mode 100644 index 0000000000..e6853a58d6 --- /dev/null +++ b/docker-compose.test-parallel-ci.yml @@ -0,0 +1,11 @@ +version: '3.7' + +services: + test_app: + build: + target: test_parallel_ci + volumes: + - type: bind + source: ${GITHUB_WORKSPACE}/coverage + target: /usr/src/app/coverage/${TEST_TYPE} + read_only: false diff --git a/docker-compose.test-parallel-local.yml b/docker-compose.test-parallel-local.yml new file mode 100644 index 0000000000..483af32caf --- /dev/null +++ b/docker-compose.test-parallel-local.yml @@ -0,0 +1,7 @@ +version: '3.7' + +services: + test_app: + image: test-parallel-application:latest + build: + target: test_parallel_local diff --git a/docs/parallel-testing.md b/docs/parallel-testing.md new file mode 100644 index 0000000000..beb0ee50f5 --- /dev/null +++ b/docs/parallel-testing.md @@ -0,0 +1,77 @@ +# Parallel testing + +## Prerequisites + +Parallel testing requires the following programs to run: +- docker ([documentation](https://docs.docker.com/engine/install/)) +- npm, which is bundled with nodejs ([download](https://nodejs.org/en/download/)) + +# How it works +Parallel testing is implemented to decrease the time it takes to run all test suites by distributing them across multiple Docker containers. +the `child_process` module of Node.js is used to spawn worker processes. Each worker process is responsible for running a portion of the test suites. +The main.js script begins by building a Docker image using the Docker Compose files. This image is then tagged for use by the worker containers. +Before starting the workers, any existing test logs are cleaned up to ensure a fresh environment for the new test runs. + +Workers are spawned based on the predefined number set in the variable `amountOfWorkers` in main.js. Each worker is a Node.js process created using the fork() method from the child_process module. +These workers are isolated in that they can execute independently and in parallel, each in their own Docker container. + +Each worker initially requests a test suite to execute from the testSuites stack. Once a worker completes a test suite, it requests the next available suite from the stack. +This continues until there are no more test suites left to assign in the stack, at which point the worker will shut down. + +Inside each worker, the test-runner.js script handles the execution of the test suites. This script is responsible for reaching the target in the Dockerfile which will initiate a test run based on the `TEST_TYPE` given by main.js. +Custom logging and results handling are implemented using a custom Mocha reporter (custom-mocha-reporter.js). This reporter logs the results of the test executions into separate files. +These log files are then used to create the final result output. + + + +# How to configure +All configuration can be done in main.js: +1. Set amountOfWorkers to as many as your system can handle (default is 3) +2. Make sure the testSuites stack in main.js is populated with all necessary suites +3. Run the tests in parallel + +# How to run +After configuring everything, simply run: +```sh +npm run docker-test:parallel +``` +The amount of workers can also be set using the environment variable WORKERS: +```sh +WORKERS=5 npm run docker-test:parallel +``` +And the path where the result logs are stored can also be set: +```sh +STORAGE_PATH='./path/of/choice' npm run docker-test:parallel +``` + +## Docker configuration files + +### Docker Compose files + +There are two Docker Compose files used in this setup: + +1. `docker-compose.test-parallel-base.yml` - Sets up the base testing environment including the application and database services. +2. `docker-compose.test-parallel-local.yml` - Configures the test-specific settings like image and target. + +### Dockerfile + +The `Dockerfile` is structured in multiple stages: + +- **Base:** Sets up the Node.js environment. +- **Development Dependencies:** Installs tools and libraries required for testing. +- **Test Parallel Local:** Runs the command to execute a subset of tests. + +## Testing scripts + +### npm command + +- **test:subset-local**: Executes the Mocha test runner for a subset of tests specified by the `TEST_TYPE` environment variable, and uses the custom-mocha-reporter to create logs which are used in the Node.js scripts. + +### Node.js scripts + +- **main.js**: Handles the initialization and distribution of tests across multiple worker processes. +- **test-runner.js**: Manages the execution of tests for a given subset, handling the Docker commands and environment setup. +- **custom-mocha-reporter.js**: A custom Mocha reporter that logs test results into separate files which are used in main.js to create the final result output. + +### Test suites +For every test suite that needs to be ran by one of the workers, a script is provided in the test/scripts directory named test-suiteName. The suite name is given by the Node.js scripts and used in the npm command. diff --git a/package-lock.json b/package-lock.json index f0bf6e34a2..f4a8d3ade8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -28,8 +28,10 @@ "cls-hooked": "4.2.2", "d3": "7.8.5", "deepmerge": "4.3.0", + "dotenv": "16.4.5", "joi": "17.13.1", "mariadb": "3.0.0", + "mkdirp": "3.0.1", "multer": "1.4.5-lts.1", "node-fetch": "3.3.1", "sequelize": "6.37.0", @@ -3164,6 +3166,18 @@ "node": ">=6.0.0" } }, + "node_modules/dotenv": { + "version": "16.4.5", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", + "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, "node_modules/dottie": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/dottie/-/dottie-2.0.6.tgz", @@ -5443,10 +5457,14 @@ } }, "node_modules/minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", - "inBundle": true + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "inBundle": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/mithril": { "version": "1.1.7", @@ -5465,15 +5483,18 @@ "dev": true }, "node_modules/mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "inBundle": true, - "dependencies": { - "minimist": "^1.2.5" - }, + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", + "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", + "license": "MIT", "bin": { - "mkdirp": "bin/cmd.js" + "mkdirp": "dist/cjs/src/bin.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/mocha": { @@ -5661,6 +5682,19 @@ "node": ">= 6.0.0" } }, + "node_modules/multer/node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "inBundle": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, "node_modules/mysql": { "version": "2.18.1", "resolved": "https://registry.npmjs.org/mysql/-/mysql-2.18.1.tgz", @@ -10816,6 +10850,11 @@ "esutils": "^2.0.2" } }, + "dotenv": { + "version": "16.4.5", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", + "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==" + }, "dottie": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/dottie/-/dottie-2.0.6.tgz", @@ -12507,9 +12546,9 @@ } }, "minimist": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", - "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==" }, "mithril": { "version": "1.1.7", @@ -12523,12 +12562,9 @@ "dev": true }, "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", - "requires": { - "minimist": "^1.2.5" - } + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", + "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==" }, "mocha": { "version": "10.6.0", @@ -12665,6 +12701,16 @@ "object-assign": "^4.1.1", "type-is": "^1.6.4", "xtend": "^4.0.0" + }, + "dependencies": { + "mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "requires": { + "minimist": "^1.2.6" + } + } } }, "mysql": { diff --git a/package.json b/package.json index 2af30443e7..a46bd3f0f7 100644 --- a/package.json +++ b/package.json @@ -12,9 +12,10 @@ "start:prod": "node lib/main.js", "test": "mocha --exit --timeout 0", "test:subset": "nyc -- mocha --exit --timeout 0 test/scripts/test-${TEST_TYPE}.js && nyc report --report-dir=/usr/src/app/coverage/${TEST_TYPE} --reporter=json", + "test:subset-local": "mocha --exit --timeout 0 --reporter test/scripts/parallel-local/custom-mocha-reporter.js test/scripts/test-${TEST_TYPE}.js", "docker-run": "docker compose -f docker-compose.yml -f docker-compose.dev.yml up --build", "docker-test": "docker compose -p test -f docker-compose.yml -f docker-compose.test.yml up --build --abort-on-container-exit", - "docker-test:parallel": "docker compose -p test-parallel -f docker-compose.test-parallel.yml up --build", + "docker-test:parallel": "node test/scripts/parallel-local/main.js", "docker-update": "node scripts/update-dockerfile.js" }, "engines": { @@ -27,8 +28,10 @@ "cls-hooked": "4.2.2", "d3": "7.8.5", "deepmerge": "4.3.0", + "dotenv": "16.4.5", "joi": "17.13.1", "mariadb": "3.0.0", + "mkdirp": "3.0.1", "multer": "1.4.5-lts.1", "node-fetch": "3.3.1", "sequelize": "6.37.0", diff --git a/test/scripts/parallel-local/custom-mocha-reporter.js b/test/scripts/parallel-local/custom-mocha-reporter.js new file mode 100644 index 0000000000..d6fbec7dd9 --- /dev/null +++ b/test/scripts/parallel-local/custom-mocha-reporter.js @@ -0,0 +1,129 @@ +const Mocha = require('mocha'); + +const { Base } = Mocha.reporters; +const fs = require('fs'); +const mkdirp = require('mkdirp'); + +/** + * CustomReporter for Mocha that logs test results into separate files. + * + * @param {Mocha.Runner} runner - The Mocha runner instance. + * @returns {CustomReporter} An instance of CustomReporter, set up with the given Mocha runner. + */ +function CustomReporter(runner) { + Base.call(this, runner); + + const testType = process.env.TEST_TYPE || 'default'; + const basePath = process.env.STORAGE_PATH ? `${process.env.STORAGE_PATH}/${testType}` : `/var/storage/${testType}`; + mkdirp.sync(basePath); + + const testsBuffer = []; + const failsBuffer = []; + const resultsBuffer = []; + + const suiteStack = []; + const indent = ' '; + let failCount = 0; + + /** + * Event handler for 'suite' event, triggered when a suite begins. + * Logs the suite title with indentation corresponding to its level in the suite hierarchy. + * + * @param {Mocha.Suite} suite - The Mocha suite instance. + */ + runner.on('suite', (suite) => { + if (suite.title && suite.title !== 'Bookkeeping') { + suiteStack.push(suite.title); + testsBuffer.push(`${indent.repeat(suiteStack.length)}${suite.title}`); + } + }); + + /** + * Event handler for 'suite end' event, triggered when a suite ends. + * Manages the current suite context by popping the suite from the stack. + * + * @param {Mocha.Suite} suite - The Mocha suite instance. + */ + runner.on('suite end', (suite) => { + if (suite.title) { + suiteStack.pop(); + } + }); + + /** + * Event handler for 'pass' event, triggered when a test passes. + * Logs a passing test message with indentation indicating its level within the suite. + * + * @param {Mocha.Test} test - The Mocha test instance that passed. + */ + runner.on('pass', (test) => { + testsBuffer.push(`${indent.repeat(suiteStack.length + 1)}✔ ${test.title} (${test.duration}ms)`); + }); + + /** + * Event handler for 'fail' event, triggered when a test fails. + * Logs a failing test message in the tests buffer and detailed error information in the fails buffer. + * + * @param {Mocha.Test} test - The Mocha test instance that failed. + * @param {Error} err - The error that caused the test to fail. + */ + runner.on('fail', (test, err) => { + failCount++; + testsBuffer.push(`${indent.repeat(suiteStack.length + 1)}${failCount}) ${test.title}`); + + suiteStack.forEach((title, idx) => { + // Add the failCount only next to the first level line and an extra space to all subsequent lines. + if (idx === 0) { + // First level line with failCount prefixed. + failsBuffer.push(`${indent.repeat(idx + 1)}${failCount}) ${title}`); + } else { + // Subsequent lines with one extra space. + failsBuffer.push(`${indent.repeat(idx + 2)} ${title}`); + } + }); + + // Add one additional indent level to all subsequent details. + const detailIndentDouble = indent.repeat(suiteStack.length + 2); + const detailIndentSingle = indent.repeat(suiteStack.length); + failsBuffer.push(`${detailIndentDouble} ${test.title}:`); + failsBuffer.push(`\n${detailIndentSingle} AssertionError: ${err.message}`); + failsBuffer.push(`${detailIndentSingle} + expected - actual`); + failsBuffer.push(`\n${detailIndentSingle} -${err.actual}`); + failsBuffer.push(`${detailIndentSingle} +${err.expected}`); + failsBuffer.push(`\n${detailIndentSingle} at ${err.stack}`); + + // Add a blank line after each failure for better readability + failsBuffer.push('\n'); + }); + + /** + * Event handler for 'pending' event, triggered when a test is pending. + * Logs a pending test message with indentation indicating its level within the suite. + * + * @param {Mocha.Test} test - The Mocha test instance that is pending. + */ + runner.on('pending', (test) => { + testsBuffer.push(`${indent.repeat(suiteStack.length + 1)}- ${test.title}`); + }); + + /** + * Event handler for 'end' event, triggered when all tests have completed. + * Logs the final passing, failing, and pending counts to the results buffer and writes all buffers to their respective files. + */ + runner.on('end', () => { + if (testsBuffer.length > 0) { + fs.writeFileSync(`${basePath}/tests.log`, testsBuffer.join('\n')); + } + if (failsBuffer.length > 0) { + fs.writeFileSync(`${basePath}/fails.log`, failsBuffer.join('\n')); + } + resultsBuffer.push(`${runner.stats.passes} Passing`); + resultsBuffer.push(`${runner.stats.failures} Failing`); + resultsBuffer.push(`${runner.stats.pending} Pending`); + if (resultsBuffer.length > 0) { + fs.writeFileSync(`${basePath}/results.log`, resultsBuffer.join('\n')); + } + }); +} + +module.exports = CustomReporter; diff --git a/test/scripts/parallel-local/main.js b/test/scripts/parallel-local/main.js new file mode 100644 index 0000000000..6929e4737c --- /dev/null +++ b/test/scripts/parallel-local/main.js @@ -0,0 +1,287 @@ +const dotenv = require('dotenv'); +const { fork, execSync } = require('child_process'); +const path = require('path'); +const fs = require('fs'); +const { BASE_STORAGE_PATH, MessageKey } = require('./test-runner'); + +dotenv.config(); + +const fsPromises = fs.promises; +const startTime = new Date(); +const testSuites = [ + 'unit', + 'api', + 'lhcPeriods', + 'lhcFills', + 'logs', + 'envs', + 'runs', + 'subsystems', + 'tags', + 'flps', + 'home', + 'about', + 'eosReport', + 'dataPasses', + 'simulationPasses', + 'qcFlagTypes', + 'qcFlags', +]; +const remainingTests = [...testSuites]; + +const amountOfWorkers = parseInt(process.env?.WORKERS, 10) || 3; +const workersExited = new Set(); + +const imageTag = 'test-parallel-application:latest'; + +/** + * Builds the Docker image used by all the workers. + * + * @returns {void} + */ +const buildDockerImage = () => { + const command = 'docker-compose -f docker-compose.test-parallel-base.yml -f docker-compose.test-parallel-local.yml build'; + execSync(`${command} && docker tag test-parallel-application ${imageTag}`, { stdio: 'inherit' }); +}; + +/** + * Initializes and starts worker processes to handle tests in parallel. + * @returns {void} + */ +const initializeWorkers = () => { + cleanupTestDirectories(testSuites); + + const workers = []; + + for (let i = 0; i < amountOfWorkers; i++) { + const workerName = `worker-${i}`; + const worker = fork(path.resolve(__dirname, 'test-runner.js'), [workerName], { + stdio: ['pipe', 'pipe', 'pipe', 'ipc'], + }); + + setupWorkerListeners(worker, workerName); + assignTestToWorker(worker, workerName); + workers.push(worker); + } +}; + +/** + * Sets up event listeners for a worker process. + * @param {ChildProcess} worker - The worker process. + * @param {string} workerName - The worker name assigned to the worker. + * @returns {void} + */ +const setupWorkerListeners = (worker, workerName) => { + // eslint-disable-next-line no-console + worker.stdout.on('data', (data) => console.log(`${workerName}: ${data.toString()}`)); + // eslint-disable-next-line no-console + worker.stderr.on('data', (data) => console.error(`${workerName} Error: ${data.toString()}`)); + + worker.on('message', (msg) => handleWorkerMessage(msg, worker, workerName)); + worker.on('exit', (code) => handleWorkerExit(code, workerName)); +}; + +/** + * Handles messages from worker processes, potentially assigning new tests. + * @param {string} msg - The message from the worker. + * @param {ChildProcess} worker - The worker process. + * @param {string} workerName - The worker name. + * @returns {void} + */ +const handleWorkerMessage = (msg, worker, workerName) => { + if (msg === MessageKey.RequestNextTest) { + assignTestToWorker(worker, workerName); + } +}; + +/** + * Assigns a test to a worker and logs the activity. + * @param {ChildProcess} worker - The worker process. + * @param {string} workerName - The worker name. + * @returns {void} + */ +const assignTestToWorker = (worker, workerName) => { + if (remainingTests.length > 0) { + const test = remainingTests.pop(); + // eslint-disable-next-line no-console + console.log(`${workerName} starting new suite: ${test} (${testSuites.length - remainingTests.length}/${testSuites.length})`); + worker.send({ test, workerName }); + } else { + // eslint-disable-next-line no-console + console.log(`${workerName} found no more tests...`); + worker.send(MessageKey.NoMoreTests); + } +}; + +/** + * Logs the exit of worker processes and checks for completion. + * @param {number} code - The exit code of the worker process. + * @param {string} workerName - The worker name. + * @returns {void} + */ +const handleWorkerExit = (code, workerName) => { + if (code !== 0) { + // eslint-disable-next-line no-console + console.error(`Worker ${workerName} exited with code ${code}`); + } else { + // eslint-disable-next-line no-console + console.log(`Worker ${workerName} completed successfully`); + } + workersExited.add(workerName); + handleAllWorkersExited(); +}; + +/** + * Handles actions to be taken once all workers have exited. + * This function checks if all worker processes have exited. If so, it proceeds to display the results + * of the testing process. This function shows results only once all workers have stopped running, regardless of + * whether they completed all assigned tests in case of a crash. + * @returns {void} + */ +const handleAllWorkersExited = () => { + if (workersExited.size === amountOfWorkers) { + displayResults(); + } +}; + +/** + * Displays the results of all tests by reading specific log files and aggregating results. + * It first prints the contents of 'tests.log', calculates totals from 'results.log', + * and finally reads 'fails.log' for any failures. + * @returns {void} + */ +const displayResults = async () => { + // eslint-disable-next-line no-console + console.log('\nResults:\n'); + await readAllLogFiles('tests.log'); + + // eslint-disable-next-line no-console + console.log('\n'); + + let totalPassing = 0; + let totalFailing = 0; + let totalPending = 0; + + for (const testSuiteName of testSuites) { + totalPassing += aggregateResults(testSuiteName, 'results.log', 'Passing'); + totalFailing += aggregateResults(testSuiteName, 'results.log', 'Failing'); + totalPending += aggregateResults(testSuiteName, 'results.log', 'Pending'); + } + + // Calculate elapsed time + const endTime = new Date(); + const elapsed = new Date(endTime - startTime); + const minutes = elapsed.getUTCMinutes(); + + // Display total passing with elapsed time in minutes and seconds + // eslint-disable-next-line no-console + console.log(' ', totalPassing, 'Passing', `(${minutes}m)`); + // eslint-disable-next-line no-console + console.log(' ', totalFailing, 'Failing'); + if (totalPending > 0) { + // eslint-disable-next-line no-console + console.log(' ', totalPending, 'Pending'); + } + // eslint-disable-next-line no-console + console.log('\n'); + + await readAllLogFiles('fails.log'); +}; + +/** + * Asynchronously reads and logs the contents of a specified log file from the directory of each test suite. + * This function finds the log file in the test suite's directory, reads it if present, and logs the content. + * If the file is not found or an error occurs during reading, an error is logged. + * + * @param {string} testSuiteName - The name of the test suite directory. + * @param {string} logFileName - The name of the log file to read. + * @returns {Promise} A Promise that resolves when the file has been read and logged, or an error has occurred. + */ +const dumpLogFileContentToConsole = async (testSuiteName, logFileName) => { + const filePath = path.join(BASE_STORAGE_PATH, testSuiteName, logFileName); + try { + const data = await fsPromises.readFile(filePath, 'utf8'); + // eslint-disable-next-line no-console + console.log(data); + } catch (err) { + // Don't log error when no directories are found, because directories are managed by the custom mocha reporter. + if (!err.message.includes('ENOENT')) { + // eslint-disable-next-line no-console + console.error(`Error reading log file at ${filePath}: ${err}`); + } + } +}; + +/** + * Initiates the concurrent reading of a specified log file for all test suites. Uses dumpLogFileContentToConsole to perform + * concurrent asynchronous read operations on each test suite's specified log file. Uses Promise.all to manage the concurrency, + * ensuring all read operations are initiated at the same time and handling their failure. + * + * @param {string} logFileName - The name of the log file to read for each test suite (e.g., 'tests.log' or 'fails.log'). + * @returns {Promise} A Promise that resolves when all log files have been read and logged, or rejects if any read operation fails. + */ +const readAllLogFiles = async (logFileName) => { + const readOperations = testSuites.map((testSuiteName) => dumpLogFileContentToConsole(testSuiteName, logFileName)); + await Promise.all(readOperations) + .catch((err) => { + // eslint-disable-next-line no-console + console.error('An error occurred while reading log files:', err); + }); +}; + +/** + * Aggregates results by counting amount of occurrences of passed, failing, and pending tests. + * @param {string} testSuiteName - The name of the test suite. + * @param {string} logFileName - The log file from which to read and count results. + * @param {string} searchKeyword - The keyword to search for in the log entries (e.g., "Passing"). + * @returns {number} The total count of occurrences of the search keyword. + */ +const aggregateResults = (testSuiteName, logFileName, searchKeyword) => { + const testDirectoryPath = path.join(BASE_STORAGE_PATH, testSuiteName); + try { + const files = fs.readdirSync(testDirectoryPath); + const file = files.find((file) => file === logFileName); + if (file) { + const filePath = path.join(testDirectoryPath, file); + const data = fs.readFileSync(filePath, 'utf8'); + return data.split('\n') + .map((line) => line.trim()) // Remove any leading or trailing whitespace + .filter((line) => line.endsWith(searchKeyword)) // Ensure the line ends with the keyword + .reduce((acc, line) => { + const parts = line.split(' '); // Split the line into parts + const count = parseInt(parts[0], 10); // The number is the first part of the line + return acc + (isNaN(count) ? 0 : count); // Add it to the accumulator ensuring it's a number + }, 0); + } + } catch (err) { + // eslint-disable-next-line no-console + console.error(`Error reading directory for ${testSuiteName}: ${err}`); + } + return 0; +}; + +/** + * Cleans up directories for each test suite before tests are run. + * This function iterates through each test suite and removes its corresponding directory, + * ensuring the testing environment is clean for the run. + * + * @param {Array} testSuites - Array of test suite names whose directories are to be cleaned up. + * @returns {void} + */ +const cleanupTestDirectories = (testSuites) => { + testSuites.forEach((testSuiteName) => { + const testDirectoryPath = path.join(BASE_STORAGE_PATH, testSuiteName); + try { + fs.rmSync(testDirectoryPath, { recursive: true }); + } catch (err) { + // Don't log error when no directories are found, because directories are managed by the custom mocha reporter. + if (!err.message.includes('ENOENT')) { + // eslint-disable-next-line no-console + console.error(`Error removing directory ${testDirectoryPath}: ${err}`); + } + } + }); +}; + +buildDockerImage(); +initializeWorkers(); diff --git a/test/scripts/parallel-local/test-runner.js b/test/scripts/parallel-local/test-runner.js new file mode 100644 index 0000000000..d11315b35a --- /dev/null +++ b/test/scripts/parallel-local/test-runner.js @@ -0,0 +1,100 @@ +const { exec } = require('child_process'); +const fs = require('fs'); +const path = require('path'); + +const BASE_STORAGE_PATH = process.env.STORAGE_PATH || './database/storage'; +const MessageKey = Object.freeze({ + NoMoreTests: 'no_more_tests', + RequestNextTest: 'request_next_test', +}); + +/** + * Ensures the directory exists before executing tests. + * @param {string} testType The type of the test. + * @returns {void} + */ +const createTestDirectoryIfNotExist = (testType) => { + const dirPath = path.join(BASE_STORAGE_PATH, testType); + if (!fs.existsSync(dirPath)) { + fs.mkdirSync(dirPath, { recursive: true }); + } +}; + +/** + * Handles incoming messages to determine test execution workflow. + * @param {string|Object} message - Message received from the main process. + * @returns {void} + */ +const processMessage = (message) => { + if (message === MessageKey.NoMoreTests) { + process.exit(); + } else { + manageTestExecution(message); + } +}; + +process.on('message', processMessage); + +/** + * Executes a set of tests based on received settings. + * @param {Object} testConfiguration - Configuration for the test. + * @returns {void} + */ +const manageTestExecution = (testConfiguration) => { + createTestDirectoryIfNotExist(testConfiguration.test); + executeTest(testConfiguration) + .then(() => process.send(MessageKey.RequestNextTest)) + .catch((error) => { + // eslint-disable-next-line no-console + console.error('Test execution error:', error); + process.send(MessageKey.RequestNextTest); + }); +}; + +/** + * Executes a specified test using Docker Compose. + * @param {Object} testConfiguration - Test details including type and worker name. + * @returns {Promise} Resolves on successful test execution. + */ +const executeTest = ({ test, workerName }) => { + const dockerCommand = buildDockerCommand(workerName); + return new Promise((resolve) => { + executeDockerCommand(dockerCommand, test, workerName, resolve); + }); +}; + +/** + * Constructs the Docker Compose command based on test parameters. + * @param {string} workerName - Name of the worker under test. + * @returns {string} Docker command string. + */ +const buildDockerCommand = (workerName) => + `COMPOSE_PROJECT_NAME=${workerName} \ + docker-compose \ + -f docker-compose.test-parallel-base.yml \ + -f docker-compose.test-parallel-local.yml \ + up --abort-on-container-exit`; + +/** + * Executes the Docker command and manages the process's output and lifecycle. + * @param {string} command - Docker command to be executed. + * @param {string} testType - Type of test being executed. + * @param {string} workerName - Name of the worker. + * @param {Function} resolve - Function to resolve the promise once execution completes. + * @returns {void} + */ +const executeDockerCommand = (command, testType, workerName, resolve) => { + const environment = { + ...process.env, + TEST_TYPE: testType, + }; + + exec(command, { env: environment }, () => { + resolve(); + }); +}; + +module.exports = { + BASE_STORAGE_PATH, + MessageKey, +};