From 0ca754bd130e8f6271882d331902ae724d4e4fde Mon Sep 17 00:00:00 2001 From: Neal Beeken Date: Thu, 6 Jan 2022 12:10:25 -0500 Subject: [PATCH 01/11] chore: improve test filtering to use mocha hooks --- .mocharc.json | 1 + test/tools/sdam_viz => etc/sdam_viz.js | 8 +- test/tools/deprecate_warning_test_program.js | 28 ----- test/tools/docker-mongodb/Dockerfile | 78 ------------ .../tools/docker-mongodb/docker-entrypoint.sh | 37 ------ test/tools/docker_cluster.sh | 59 --------- test/tools/run_each_test.sh | 9 -- test/tools/runner/index.js | 119 ++++++++---------- .../runner/plugins/session_leak_checker.js | 6 +- test/tools/unified-spec-runner/schema.ts | 2 + 10 files changed, 65 insertions(+), 282 deletions(-) rename test/tools/sdam_viz => etc/sdam_viz.js (93%) mode change 100755 => 100644 delete mode 100644 test/tools/deprecate_warning_test_program.js delete mode 100644 test/tools/docker-mongodb/Dockerfile delete mode 100644 test/tools/docker-mongodb/docker-entrypoint.sh delete mode 100755 test/tools/docker_cluster.sh delete mode 100755 test/tools/run_each_test.sh diff --git a/.mocharc.json b/.mocharc.json index 36ef3d057f..c7c394bfd2 100644 --- a/.mocharc.json +++ b/.mocharc.json @@ -13,5 +13,6 @@ "recursive": true, "timeout": 60000, "reporter": "test/tools/reporter/mongodb_reporter.js", + "sort": true, "color": true } diff --git a/test/tools/sdam_viz b/etc/sdam_viz.js old mode 100755 new mode 100644 similarity index 93% rename from test/tools/sdam_viz rename to etc/sdam_viz.js index 2ac76a670f..e9b6215f47 --- a/test/tools/sdam_viz +++ b/etc/sdam_viz.js @@ -1,9 +1,9 @@ -#!/usr/bin/env node -'use strict'; +/* eslint-disable no-console */ +/* eslint-disable @typescript-eslint/no-var-requires */ const { MongoClient } = require('../../src'); const visualizeMonitoringEvents = require('./utils').visualizeMonitoringEvents; -const { now, calculateDurationInMs } = require('../../lib/utils'); +const { now, calculateDurationInMs } = require('../../src/utils'); const chalk = require('chalk'); const argv = require('yargs') .usage('Usage: $0 [options] ') @@ -84,7 +84,7 @@ async function scheduleWriteWorkload(client) { try { const start = now(); - const result = await client.db('test').collection('test').insertOne({ a: 42 }); + await client.db('test').collection('test').insertOne({ a: 42 }); averageWriteMS = 0.2 * calculateDurationInMs(start) + 0.8 * averageWriteMS; completedWriteWorkloads++; diff --git a/test/tools/deprecate_warning_test_program.js b/test/tools/deprecate_warning_test_program.js deleted file mode 100644 index 14f75572d6..0000000000 --- a/test/tools/deprecate_warning_test_program.js +++ /dev/null @@ -1,28 +0,0 @@ -'use strict'; - -// prevent this file from being imported; it is only for use in functional/deprecate_warning_tests.js -if (require.main !== module) { - throw new Error('This file is not meant to be imported'); -} - -const deprecateOptions = require('../../src/utils').deprecateOptions; - -const testDeprecationFlags = deprecateOptions( - { - name: 'testDeprecationFlags', - deprecatedOptions: ['maxScan', 'snapshot', 'fields'], - optionsIndex: 0 - }, - options => { - if (options) options = null; - } -); - -testDeprecationFlags({ maxScan: 0 }); - -// for tests that throw error on calling deprecated fn - this should never happen; stdout should be empty -if (process.argv[2]) { - console.log(process.argv[2]); -} - -process.nextTick(() => process.exit()); diff --git a/test/tools/docker-mongodb/Dockerfile b/test/tools/docker-mongodb/Dockerfile deleted file mode 100644 index bd54ecdf62..0000000000 --- a/test/tools/docker-mongodb/Dockerfile +++ /dev/null @@ -1,78 +0,0 @@ -# Stage 1: install general dependencies -# Separating the build into multiple stages lets Docker skip rebuilding previous stages, improving build times e.g. when only updating the entrypoint script -# `docker build --no-cache` to force a full rebuild, e.g. when new server versions are released - -FROM ubuntu:bionic as mtools - -RUN apt-get update \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y \ - nodejs-dev node-gyp npm curl \ - python3-pip python3-dev python3-setuptools python3-wheel \ - build-essential libssl1.0-dev \ - && rm -rf /var/lib/apt/lists/* - -RUN pip3 install wheel -RUN pip3 install psutil pymongo mtools - -# Stage 2: install m via npm and preload "hot" versions of MongoDB -# This allows a fresh container to skip downloading common versions of MongoDB, at the cost of increased image size - -FROM mtools as mongo_preloaded - -ENV MONGO_VERSION latest -ARG HOSTNAME - -RUN npm install -g m - -RUN mkdir /data - -# preload mongo binaries -RUN m 3.6 -RUN m 4.0 -RUN m 4.2 -RUN m 4.4 -RUN m latest - -# Stage 3: add entrypoint script - -FROM mongo_preloaded - -COPY docker-entrypoint.sh /entrypoint.sh -RUN chmod +x /entrypoint.sh - -ENTRYPOINT ["/entrypoint.sh"] - -# Example usage: - -# 1. Build the docker image and tag it as e.g. `docker-mongodb` -# Then in the same folder as this Dockerfile, run -# -# > docker build -t [--no-cache] docker-mongodb . -# Note: passing --no-cache will force a full rebuild i.e. if a new version of the server is released; otherwise it should be omitted to reduce build time - -# 2. Run the appropriate topology -# -# a) single -# -# > docker run --rm -d -p 27017:27017 -e MONGO_VERSION=4.2 -e HOSTNAME=$(hostname) docker-mongodb single -# Note: passing the hostname is required for the networking to work for a single server, but not for replica/sharded -# -# b) replica set -# -# > docker run --rm -d -p 31000-31003:31000-31003 -e MONGO_VERSION=3.6 docker-mongodb replica -# -# c) sharded cluster -# -# > docker run --rm -d -p 51000-51006:51000-51006 -e MONGO_VERSION=latest docker-mongodb sharded - -# 3. See what's running -# -# > docker ps - -# 4. Follow output -# -# > docker logs -f - -# 5. Run arbitrary mlaunch commands -# -# > docker run --rm -d -p 27017:27017 -e MONGO_VERSION=4.2 docker-mongodb mlaunch init --dir /data --bind_ip 0.0.0.0 --hostname $(hostname) --single --setParameter enableTestCommands=1 diff --git a/test/tools/docker-mongodb/docker-entrypoint.sh b/test/tools/docker-mongodb/docker-entrypoint.sh deleted file mode 100644 index b845b693b6..0000000000 --- a/test/tools/docker-mongodb/docker-entrypoint.sh +++ /dev/null @@ -1,37 +0,0 @@ -#! /bin/bash -set -e - -if [ "${1:0:1}" = '-' ]; then - set -- mlaunch "$@" -fi - -if [ "$1" = 'mlaunch' ]; then - if [ -f /data/.mlaunch_startup ] ; then - echo 'Already initialized. Ignoring provided command!' - mlaunch start - else - m $MONGO_VERSION - $@ - fi -elif [ "$1" = 'single' ]; then - m $MONGO_VERSION - mlaunch init --dir /data --bind_ip 0.0.0.0 --hostname $HOSTNAME --single --setParameter enableTestCommands=1 -elif [ "$1" = 'replica' ]; then - m $MONGO_VERSION - mlaunch init --dir /data --bind_ip 0.0.0.0 --replicaset --nodes 3 --arbiter --name rs --port 31000 --enableMajorityReadConcern --setParameter enableTestCommands=1 -elif [ "$1" = 'sharded' ]; then - m $MONGO_VERSION - mlaunch init --dir /data --bind_ip 0.0.0.0 --replicaset --nodes 3 --arbiter --name rs --port 51000 --enableMajorityReadConcern --setParameter enableTestCommands=1 --sharded 1 --mongos 2 -else - echo "Invalid syntax" -fi - -sleep 2 - -if [ -d /data/rs ]; then - tail -f /data/rs/*/mongod.log -elif [ -d /data/configRepl ]; then - tail -f /data/mongos/mongos_*.log /data/**/**/mongod.log -else - tail -f /data/mongod.log -fi diff --git a/test/tools/docker_cluster.sh b/test/tools/docker_cluster.sh deleted file mode 100755 index 6499edb126..0000000000 --- a/test/tools/docker_cluster.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/bash -set -e - -DOCKER_IMAGE=node-mongodb-native/docker-mongodb - -function die_with_usage { - printf "usage:\tdocker_cluster \n\tdocker_cluster killall\n" - exit -} - -function docker_mongodb { - if [[ $1 == "replica_set" ]]; then - docker run --name "mongo_${1}_${2}" --rm -d -p 31000-31003:31000-31003 -e MONGO_VERSION=$2 ${DOCKER_IMAGE} replica - echo "mongodb://localhost:31000/?replicaSet=rs" - elif [[ $1 == "sharded_cluster" ]]; then - docker run --name "mongo_${1}_${2}" --rm -d -p 51000-51006:51000-51006 -e MONGO_VERSION=$2 ${DOCKER_IMAGE} sharded - echo "mongodb://localhost:51000,localhost:51001/" - elif [[ $1 == "server" ]]; then - docker run --name "mongo_${1}_${2}" --rm -d -p 27017:27017 -e MONGO_VERSION=$2 -e HOSTNAME=$(hostname) ${DOCKER_IMAGE} single - echo "mongodb://localhost:27017" - elif [[ $1 == "all" ]]; then - docker_mongodb server $2 & - docker_mongodb replica_set $2 & - docker_mongodb sharded_cluster $2 & - wait - return - else - echo "unsupported topology: $1" - die_with_usage - fi - - docker ps -f name=mongo_${1}_${2} - - printf "\n[ Tailing container logs, Ctrl+C to exit; the container is detached and will continue running until stopped with 'docker kill' ]\n\n" - docker logs -f $(docker ps -f name=mongo_${1}_${2} -q) -} - -if [ "$#" -ne 2 ] && [ ${1:-''} != "killall" ]; then - die_with_usage -fi - -if [[ $1 == "killall" ]]; then - RUNNING=$(docker ps -f ancestor=${DOCKER_IMAGE} -q) - if [[ $RUNNING ]]; then - docker kill $RUNNING - echo "Killed all running mongo containers" - else - echo "No running mongo containers" - fi - exit -else - if [[ $(docker image ls -q ${DOCKER_IMAGE}) ]]; then - echo "Image already cached, skipping build; force a rebuild with 'docker build --no-cache'" - else - cd "${0%/*}/docker-mongodb" - docker build -t ${DOCKER_IMAGE} . - fi - docker_mongodb $1 $2 -fi \ No newline at end of file diff --git a/test/tools/run_each_test.sh b/test/tools/run_each_test.sh deleted file mode 100755 index 78921c8b33..0000000000 --- a/test/tools/run_each_test.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -if [ "$#" -ne 1 ]; then - echo "usage: run_each_test " - exit -fi - -TEST_PATH=$1 -find $TEST_PATH -type f \( -iname "*.test.js" ! -iname "*atlas*" ! -path "*node-next*" \) -exec npx mocha {} \; diff --git a/test/tools/runner/index.js b/test/tools/runner/index.js index 56ba128e6d..0356bfc7fd 100644 --- a/test/tools/runner/index.js +++ b/test/tools/runner/index.js @@ -9,7 +9,6 @@ const fs = require('fs'); const { MongoClient } = require('../../../src'); const { TestConfiguration } = require('./config'); const { getEnvironmentalOptions } = require('../utils'); -const { eachAsync } = require('../../../src/utils'); const mock = require('../mongodb-mock/index'); const MONGODB_URI = process.env.MONGODB_URI || 'mongodb://localhost:27017'; @@ -18,92 +17,84 @@ const MONGODB_API_VERSION = process.env.MONGODB_API_VERSION; const SINGLE_MONGOS_LB_URI = process.env.SINGLE_MONGOS_LB_URI; // Load balancer fronting 2 mongoses. const MULTI_MONGOS_LB_URI = process.env.MULTI_MONGOS_LB_URI; +const loadBalanced = SINGLE_MONGOS_LB_URI && MULTI_MONGOS_LB_URI; const filters = []; -function initializeFilters(client, callback) { +const LOG_FILTER_REASON = false; + +let initializedFilters = false; +async function initializeFilters(client) { + if (initializedFilters) { + return; + } + initializedFilters = true; + const context = {}; + const filterFiles = fs .readdirSync(path.join(__dirname, 'filters')) .filter(x => x.indexOf('js') !== -1); - // context object that can be appended to as part of filter initialization - const context = {}; + for (const filterName of filterFiles) { + const FilterModule = require(path.join(__dirname, 'filters', filterName)); + const filter = new FilterModule(); - eachAsync( - filterFiles, - (filterName, cb) => { - const FilterModule = require(path.join(__dirname, 'filters', filterName)); - const filter = new FilterModule(); + console.assert(typeof filter === 'object'); + console.assert(filter.filter && typeof filter.filter === 'function'); - if (typeof filter !== 'object') { - cb(new TypeError('Type of filter must be an object')); - return; - } + filters.push(filter); - if (!filter.filter || typeof filter.filter !== 'function') { - cb(new TypeError('Object filters must have a function named filter')); - return; - } + if (typeof filter.initializeFilter === 'function') { + await new Promise((resolve, reject) => + filter.initializeFilter(client, context, e => (e ? reject(e) : resolve())) + ); + } + } - filters.push(filter); - if (typeof filter.initializeFilter === 'function') { - filter.initializeFilter(client, context, cb); - } else { - cb(); - } - }, - err => callback(err, context) - ); + return context; } -function filterOutTests(suite) { - suite.tests = suite.tests.filter(test => filters.every(f => f.filter(test))); - suite.suites.forEach(suite => filterOutTests(suite)); -} +beforeEach(async function () { + if (Object.keys(this.currentTest.metadata).length > 0) { + let ok = true; + for (const filter of filters) { + ok = ok && filter.filter(this.currentTest); + if (!ok) { + if (LOG_FILTER_REASON) { + this.currentTest.title += ` ## filtered by ${filter.constructor.name} - ${JSON.stringify( + this.currentTest.metadata + )}`; + } + break; + } + } -before(function (_done) { - // NOTE: if we first parse the connection string and redact auth, then we can reenable this - // const usingUnifiedTopology = !!process.env.MONGODB_UNIFIED_TOPOLOGY; - // console.log( - // `connecting to: ${chalk.bold(MONGODB_URI)} using ${chalk.bold( - // usingUnifiedTopology ? 'unified' : 'legacy' - // )} topology` - // ); + if (!ok) { + this.skip(); + } + } +}); - const loadBalanced = SINGLE_MONGOS_LB_URI && MULTI_MONGOS_LB_URI; +before(async function () { const client = new MongoClient( loadBalanced ? SINGLE_MONGOS_LB_URI : MONGODB_URI, getEnvironmentalOptions() ); - const done = err => client.close(err2 => _done(err || err2)); - client.connect(err => { - if (err) { - done(err); - return; - } + await client.connect(); - initializeFilters(client, (err, context) => { - if (err) { - done(err); - return; - } + const context = await initializeFilters(client); - // Ensure test MongoClients set a serverApi parameter when required - if (MONGODB_API_VERSION) { - context.serverApi = MONGODB_API_VERSION; - } + if (MONGODB_API_VERSION) { + context.serverApi = MONGODB_API_VERSION; + } - if (SINGLE_MONGOS_LB_URI && MULTI_MONGOS_LB_URI) { - context.singleMongosLoadBalancerUri = SINGLE_MONGOS_LB_URI; - context.multiMongosLoadBalancerUri = MULTI_MONGOS_LB_URI; - } + if (SINGLE_MONGOS_LB_URI && MULTI_MONGOS_LB_URI) { + context.singleMongosLoadBalancerUri = SINGLE_MONGOS_LB_URI; + context.multiMongosLoadBalancerUri = MULTI_MONGOS_LB_URI; + } - // replace this when mocha supports dynamic skipping with `afterEach` - filterOutTests(this._runnable.parent); - this.configuration = new TestConfiguration(MONGODB_URI, context); - done(); - }); - }); + this.configuration = new TestConfiguration(MONGODB_URI, context); + await client.close(); }); // ensure all mock connections are closed after the suite is run diff --git a/test/tools/runner/plugins/session_leak_checker.js b/test/tools/runner/plugins/session_leak_checker.js index 1852b85f72..b702f0f17f 100644 --- a/test/tools/runner/plugins/session_leak_checker.js +++ b/test/tools/runner/plugins/session_leak_checker.js @@ -131,9 +131,9 @@ afterEach('Session Leak After Each - ensure no leaks', function () { `client close failed to clean up ${pooledSessions.size} pooled sessions` ).to.equal(0); } catch (e) { - activeSessions.clear(); - pooledSessions.clear(); - activeSessionsBeforeClose.clear(); + if (activeSessions) activeSessions.clear(); + if (pooledSessions) pooledSessions.clear(); + if (activeSessionsBeforeClose) activeSessionsBeforeClose.clear(); this.test.error(e); } }); diff --git a/test/tools/unified-spec-runner/schema.ts b/test/tools/unified-spec-runner/schema.ts index a8e1733533..2bad64a6b9 100644 --- a/test/tools/unified-spec-runner/schema.ts +++ b/test/tools/unified-spec-runner/schema.ts @@ -34,6 +34,8 @@ export const TopologyType = Object.freeze({ } as const); export type TopologyId = typeof TopologyType[keyof typeof TopologyType]; export interface RunOnRequirement { + serverless: 'forbid' | 'allow' | 'require'; + auth: boolean; maxServerVersion?: string; minServerVersion?: string; topologies?: TopologyId[]; From b6d90901984a2aab2b36a5c838064530c07ce0fd Mon Sep 17 00:00:00 2001 From: Neal Beeken Date: Fri, 7 Jan 2022 10:37:59 -0500 Subject: [PATCH 02/11] fix: before all hooks run regardless of test skip, new hazzard? --- .../client_side_encryption.prose.test.js | 20 +++++++++---------- .../filters/client_encryption_filter.js | 7 +------ 2 files changed, 11 insertions(+), 16 deletions(-) diff --git a/test/integration/client-side-encryption/client_side_encryption.prose.test.js b/test/integration/client-side-encryption/client_side_encryption.prose.test.js index 0dfe958a2c..d0752f11ed 100644 --- a/test/integration/client-side-encryption/client_side_encryption.prose.test.js +++ b/test/integration/client-side-encryption/client_side_encryption.prose.test.js @@ -552,14 +552,16 @@ describe('Client Side Encryption Prose Tests', function () { const limitsKey = loadLimits('limits-key.json'); const limitsDoc = loadLimits('limits-doc.json'); - before(function () { - // First, perform the setup. + let firstTimeSetup = true; + beforeEach(async function () { + if (firstTimeSetup) { + firstTimeSetup = false; + // First, perform the setup. - // #. Create a MongoClient without encryption enabled (referred to as ``client``). - this.client = this.configuration.newClient(); + // #. Create a MongoClient without encryption enabled (referred to as ``client``). + this.client = this.configuration.newClient(); - return ( - this.client + await this.client .connect() // #. Using ``client``, drop and create the collection ``db.coll`` configured with the included JSON schema `limits/limits-schema.json <../limits/limits-schema.json>`_. .then(() => dropCollection(this.client.db(dataDbName), dataCollName)) @@ -575,11 +577,9 @@ describe('Client Side Encryption Prose Tests', function () { .db(keyVaultDbName) .collection(keyVaultCollName) .insertOne(limitsKey, { writeConcern: { w: 'majority' } }); - }) - ); - }); + }); + } - beforeEach(function () { // #. Create a MongoClient configured with auto encryption (referred to as ``client_encrypted``) // Configure with the ``local`` KMS provider as follows: // .. code:: javascript diff --git a/test/tools/runner/filters/client_encryption_filter.js b/test/tools/runner/filters/client_encryption_filter.js index 5aa48d1fb4..56f25bfe08 100644 --- a/test/tools/runner/filters/client_encryption_filter.js +++ b/test/tools/runner/filters/client_encryption_filter.js @@ -1,7 +1,6 @@ 'use strict'; const mongodb = require('../../../../src'); -const semver = require('semver'); /** * Filter for whether or not a test needs / doesn't need Client Side Encryption @@ -40,11 +39,7 @@ class ClientSideEncryptionFilter { const clientSideEncryption = test.metadata && test.metadata.requires && test.metadata.requires.clientSideEncryption; - // CSFLE is only supported on LTS versions of node - const nodeSupportsCSFLE = semver.satisfies(process.version, '>4'); - - const ret = typeof clientSideEncryption !== 'boolean' || clientSideEncryption === this.enabled; - return ret && nodeSupportsCSFLE; + return typeof clientSideEncryption !== 'boolean' || clientSideEncryption === this.enabled; } } From 21ad026fc7f1ed311231798f1cf670e49af07a81 Mon Sep 17 00:00:00 2001 From: Neal Beeken Date: Fri, 7 Jan 2022 11:27:27 -0500 Subject: [PATCH 03/11] fix: docs and sdam_viz move --- etc/sdam_viz.js | 130 +++++++++++++++++++++++++++++++++++++++++++- test/readme.md | 13 +++++ test/tools/utils.js | 124 +----------------------------------------- 3 files changed, 141 insertions(+), 126 deletions(-) diff --git a/etc/sdam_viz.js b/etc/sdam_viz.js index e9b6215f47..9bfa8418dd 100644 --- a/etc/sdam_viz.js +++ b/etc/sdam_viz.js @@ -1,9 +1,13 @@ /* eslint-disable no-console */ /* eslint-disable @typescript-eslint/no-var-requires */ -const { MongoClient } = require('../../src'); -const visualizeMonitoringEvents = require('./utils').visualizeMonitoringEvents; -const { now, calculateDurationInMs } = require('../../src/utils'); +// run this file with ts-node: +// npx ts-node etc/sdam_viz.js -h + +const { MongoClient } = require('../src'); +const { now, calculateDurationInMs, arrayStrictEqual, errorStrictEqual } = require('../src/utils'); + +const util = require('util'); const chalk = require('chalk'); const argv = require('yargs') .usage('Usage: $0 [options] ') @@ -25,6 +29,126 @@ function print(msg) { const uri = argv._[0]; const client = new MongoClient(uri); +function diff(lhs, rhs, fields, comparator) { + return fields.reduce((diff, field) => { + if ((lhs[field] == null || rhs[field] == null) && field !== 'error') { + return diff; + } + + if (!comparator(lhs[field], rhs[field])) { + diff.push( + ` ${field}: ${chalk.green(`${util.inspect(lhs[field])}`)} => ${chalk.green( + `${util.inspect(rhs[field])}` + )}` + ); + } + + return diff; + }, []); +} + +function serverDescriptionDiff(lhs, rhs) { + const objectIdFields = ['electionId']; + const arrayFields = ['hosts', 'tags']; + const simpleFields = [ + 'type', + 'minWireVersion', + 'me', + 'setName', + 'setVersion', + 'electionId', + 'primary', + 'logicalSessionTimeoutMinutes' + ]; + + return diff(lhs, rhs, simpleFields, (x, y) => x === y) + .concat(diff(lhs, rhs, ['error'], (x, y) => errorStrictEqual(x, y))) + .concat(diff(lhs, rhs, arrayFields, (x, y) => arrayStrictEqual(x, y))) + .concat(diff(lhs, rhs, objectIdFields, (x, y) => x.equals(y))) + .join(',\n'); +} + +function topologyDescriptionDiff(lhs, rhs) { + const simpleFields = [ + 'type', + 'setName', + 'maxSetVersion', + 'stale', + 'compatible', + 'compatibilityError', + 'logicalSessionTimeoutMinutes', + 'error', + 'commonWireVersion' + ]; + + return diff(lhs, rhs, simpleFields, (x, y) => x === y).join(',\n'); +} + +function visualizeMonitoringEvents(client) { + function print(msg) { + console.error(`${chalk.white(new Date().toISOString())} ${msg}`); + } + + client.on('serverHeartbeatStarted', event => + print(`${chalk.yellow('heartbeat')} ${chalk.bold('started')} host: '${event.connectionId}`) + ); + + client.on('serverHeartbeatSucceeded', event => + print( + `${chalk.yellow('heartbeat')} ${chalk.green('succeeded')} host: '${ + event.connectionId + }' ${chalk.gray(`(${event.duration} ms)`)}` + ) + ); + + client.on('serverHeartbeatFailed', event => + print( + `${chalk.yellow('heartbeat')} ${chalk.red('failed')} host: '${ + event.connectionId + }' ${chalk.gray(`(${event.duration} ms)`)}` + ) + ); + + // server information + client.on('serverOpening', event => { + print( + `${chalk.cyan('server')} [${event.address}] ${chalk.bold('opening')} in topology#${ + event.topologyId + }` + ); + }); + + client.on('serverClosed', event => { + print( + `${chalk.cyan('server')} [${event.address}] ${chalk.bold('closed')} in topology#${ + event.topologyId + }` + ); + }); + + client.on('serverDescriptionChanged', event => { + print(`${chalk.cyan('server')} [${event.address}] changed:`); + console.error(serverDescriptionDiff(event.previousDescription, event.newDescription)); + }); + + // topology information + client.on('topologyOpening', event => { + print(`${chalk.magenta('topology')} adding topology#${event.topologyId}`); + }); + + client.on('topologyClosed', event => { + print(`${chalk.magenta('topology')} removing topology#${event.topologyId}`); + }); + + client.on('topologyDescriptionChanged', event => { + const diff = topologyDescriptionDiff(event.previousDescription, event.newDescription); + if (diff !== '') { + print(`${chalk.magenta('topology')} [topology#${event.topologyId}] changed:`); + console.error(diff); + } + }); +} + async function run() { print(`connecting to: ${chalk.bold(uri)}`); diff --git a/test/readme.md b/test/readme.md index f3ff916eba..5715f60331 100644 --- a/test/readme.md +++ b/test/readme.md @@ -10,6 +10,7 @@ about the types of tests and how to run them. - [Running the Tests in Evergreen](#running-the-tests-in-evergreen) - [Using a Pre-Release Version of a Dependent Library](#using-a-pre-release-version-of-a-dependent-library) - [Manually Testing the Driver](#manually-testing-the-driver) +- [Writing Tests](#writing-tests) - [Testing with Special Environments](#testing-with-special-environments) ## About the Tests @@ -143,6 +144,18 @@ modify the steps to work with existing Node projects. > **Note:** When making driver changes, you will need to run `npm run build:ts` with each change in order for it to take effect. +## Writing Tests + +> TODO: flesh this section out more + +We use mocha to construct our test suites and chai to assert expectations. + +Some special notes on how mocha works with our testing setup: + +- `before` hooks will run even if a test is skipped by the environment it runs on. + - So, for example, if your before hook does logic that can only run on a certain server version you can't depend on your test block metadata to filter for that. +- `after` hooks cannot be used to clean up clients because the session leak checker currently runs its afterEach hook first. + ## Testing with Special Environments In order to test some features, you will need to generate and set a specialized group of environment variables. The subsections below will walk you through how to generate and set the environment variables for these features. diff --git a/test/tools/utils.js b/test/tools/utils.js index 6cb4689394..fef8c5d9f5 100644 --- a/test/tools/utils.js +++ b/test/tools/utils.js @@ -1,8 +1,7 @@ 'use strict'; const { Logger } = require('../../src/logger'); -const { deprecateOptions, arrayStrictEqual, errorStrictEqual } = require('../../src/utils'); -const chalk = require('chalk'); +const { deprecateOptions } = require('../../src/utils'); const util = require('util'); const chai = require('chai'); @@ -60,126 +59,6 @@ ClassWithUndefinedLogger.prototype.getLogger = function () { return undefined; }; -function diff(lhs, rhs, fields, comparator) { - return fields.reduce((diff, field) => { - if ((lhs[field] == null || rhs[field] == null) && field !== 'error') { - return diff; - } - - if (!comparator(lhs[field], rhs[field])) { - diff.push( - ` ${field}: ${chalk.green(`${util.inspect(lhs[field])}`)} => ${chalk.green( - `${util.inspect(rhs[field])}` - )}` - ); - } - - return diff; - }, []); -} - -function serverDescriptionDiff(lhs, rhs) { - const objectIdFields = ['electionId']; - const arrayFields = ['hosts', 'tags']; - const simpleFields = [ - 'type', - 'minWireVersion', - 'me', - 'setName', - 'setVersion', - 'electionId', - 'primary', - 'logicalSessionTimeoutMinutes' - ]; - - return diff(lhs, rhs, simpleFields, (x, y) => x === y) - .concat(diff(lhs, rhs, ['error'], (x, y) => errorStrictEqual(x, y))) - .concat(diff(lhs, rhs, arrayFields, (x, y) => arrayStrictEqual(x, y))) - .concat(diff(lhs, rhs, objectIdFields, (x, y) => x.equals(y))) - .join(',\n'); -} - -function topologyDescriptionDiff(lhs, rhs) { - const simpleFields = [ - 'type', - 'setName', - 'maxSetVersion', - 'stale', - 'compatible', - 'compatibilityError', - 'logicalSessionTimeoutMinutes', - 'error', - 'commonWireVersion' - ]; - - return diff(lhs, rhs, simpleFields, (x, y) => x === y).join(',\n'); -} - -function visualizeMonitoringEvents(client) { - function print(msg) { - console.error(`${chalk.white(new Date().toISOString())} ${msg}`); - } - - client.on('serverHeartbeatStarted', event => - print(`${chalk.yellow('heartbeat')} ${chalk.bold('started')} host: '${event.connectionId}`) - ); - - client.on('serverHeartbeatSucceeded', event => - print( - `${chalk.yellow('heartbeat')} ${chalk.green('succeeded')} host: '${ - event.connectionId - }' ${chalk.gray(`(${event.duration} ms)`)}` - ) - ); - - client.on('serverHeartbeatFailed', event => - print( - `${chalk.yellow('heartbeat')} ${chalk.red('failed')} host: '${ - event.connectionId - }' ${chalk.gray(`(${event.duration} ms)`)}` - ) - ); - - // server information - client.on('serverOpening', event => { - print( - `${chalk.cyan('server')} [${event.address}] ${chalk.bold('opening')} in topology#${ - event.topologyId - }` - ); - }); - - client.on('serverClosed', event => { - print( - `${chalk.cyan('server')} [${event.address}] ${chalk.bold('closed')} in topology#${ - event.topologyId - }` - ); - }); - - client.on('serverDescriptionChanged', event => { - print(`${chalk.cyan('server')} [${event.address}] changed:`); - console.error(serverDescriptionDiff(event.previousDescription, event.newDescription)); - }); - - // topology information - client.on('topologyOpening', event => { - print(`${chalk.magenta('topology')} adding topology#${event.topologyId}`); - }); - - client.on('topologyClosed', event => { - print(`${chalk.magenta('topology')} removing topology#${event.topologyId}`); - }); - - client.on('topologyDescriptionChanged', event => { - const diff = topologyDescriptionDiff(event.previousDescription, event.newDescription); - if (diff !== '') { - print(`${chalk.magenta('topology')} [topology#${event.topologyId}] changed:`); - console.error(diff); - } - }); -} - class EventCollector { constructor(obj, events, options) { this._events = Object.create(null); @@ -358,7 +237,6 @@ module.exports = { ClassWithLogger, ClassWithoutLogger, ClassWithUndefinedLogger, - visualizeMonitoringEvents, getSymbolFrom, getEnvironmentalOptions, shouldRunServerlessTest From dc80d12e2d6e47946a808934a42f1b2550cc3ebf Mon Sep 17 00:00:00 2001 From: Neal Beeken Date: Fri, 7 Jan 2022 17:46:50 -0500 Subject: [PATCH 04/11] fix: wording on hook warning Co-authored-by: Daria Pardue --- test/readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/readme.md b/test/readme.md index 5715f60331..c3e97c4866 100644 --- a/test/readme.md +++ b/test/readme.md @@ -154,7 +154,7 @@ Some special notes on how mocha works with our testing setup: - `before` hooks will run even if a test is skipped by the environment it runs on. - So, for example, if your before hook does logic that can only run on a certain server version you can't depend on your test block metadata to filter for that. -- `after` hooks cannot be used to clean up clients because the session leak checker currently runs its afterEach hook first. +- `after` hooks cannot be used to clean up clients because the session leak checker currently runs in an `afterEach` hook, which would be executed before any `after` hook has a chance to run ## Testing with Special Environments From ea10612408eeda07d3e05af39e6839d8991d7185 Mon Sep 17 00:00:00 2001 From: Neal Beeken Date: Fri, 7 Jan 2022 17:53:10 -0500 Subject: [PATCH 05/11] fix: use two beforeEach hooks for clarity --- .../client_side_encryption.prose.test.js | 52 +++++++++++-------- 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/test/integration/client-side-encryption/client_side_encryption.prose.test.js b/test/integration/client-side-encryption/client_side_encryption.prose.test.js index d0752f11ed..64f052b84f 100644 --- a/test/integration/client-side-encryption/client_side_encryption.prose.test.js +++ b/test/integration/client-side-encryption/client_side_encryption.prose.test.js @@ -552,34 +552,40 @@ describe('Client Side Encryption Prose Tests', function () { const limitsKey = loadLimits('limits-key.json'); const limitsDoc = loadLimits('limits-doc.json'); - let firstTimeSetup = true; + let hasRunFirstTimeSetup = false; beforeEach(async function () { - if (firstTimeSetup) { - firstTimeSetup = false; - // First, perform the setup. + if (hasRunFirstTimeSetup) { + // Even though we have to use a beforeEach here + // We still only want the following code to be run *once* + // before all the tests that follow + return; + } + hasRunFirstTimeSetup = true; + // First, perform the setup. - // #. Create a MongoClient without encryption enabled (referred to as ``client``). - this.client = this.configuration.newClient(); + // #. Create a MongoClient without encryption enabled (referred to as ``client``). + this.client = this.configuration.newClient(); - await this.client - .connect() - // #. Using ``client``, drop and create the collection ``db.coll`` configured with the included JSON schema `limits/limits-schema.json <../limits/limits-schema.json>`_. - .then(() => dropCollection(this.client.db(dataDbName), dataCollName)) - .then(() => { - return this.client.db(dataDbName).createCollection(dataCollName, { - validator: { $jsonSchema: limitsSchema } - }); - }) - // #. Using ``client``, drop the collection ``keyvault.datakeys``. Insert the document `limits/limits-key.json <../limits/limits-key.json>`_ - .then(() => dropCollection(this.client.db(keyVaultDbName), keyVaultCollName)) - .then(() => { - return this.client - .db(keyVaultDbName) - .collection(keyVaultCollName) - .insertOne(limitsKey, { writeConcern: { w: 'majority' } }); + await this.client + .connect() + // #. Using ``client``, drop and create the collection ``db.coll`` configured with the included JSON schema `limits/limits-schema.json <../limits/limits-schema.json>`_. + .then(() => dropCollection(this.client.db(dataDbName), dataCollName)) + .then(() => { + return this.client.db(dataDbName).createCollection(dataCollName, { + validator: { $jsonSchema: limitsSchema } }); - } + }) + // #. Using ``client``, drop the collection ``keyvault.datakeys``. Insert the document `limits/limits-key.json <../limits/limits-key.json>`_ + .then(() => dropCollection(this.client.db(keyVaultDbName), keyVaultCollName)) + .then(() => { + return this.client + .db(keyVaultDbName) + .collection(keyVaultCollName) + .insertOne(limitsKey, { writeConcern: { w: 'majority' } }); + }); + }); + beforeEach(function () { // #. Create a MongoClient configured with auto encryption (referred to as ``client_encrypted``) // Configure with the ``local`` KMS provider as follows: // .. code:: javascript From e50e4e42ee3ecd833b51be21de1bb074d41f47aa Mon Sep 17 00:00:00 2001 From: Neal Beeken Date: Fri, 7 Jan 2022 18:26:18 -0500 Subject: [PATCH 06/11] fix: clean up filter logic add skipReason to logs --- test/functional/unit-sdam/monitoring.test.js | 6 ++--- test/tools/reporter/mongodb_reporter.js | 5 +++- test/tools/runner/index.js | 28 +++++++++----------- test/tools/runner/metadata_ui.js | 2 +- 4 files changed, 21 insertions(+), 20 deletions(-) diff --git a/test/functional/unit-sdam/monitoring.test.js b/test/functional/unit-sdam/monitoring.test.js index 51197b2abb..6554ecbd67 100644 --- a/test/functional/unit-sdam/monitoring.test.js +++ b/test/functional/unit-sdam/monitoring.test.js @@ -24,7 +24,7 @@ describe('monitoring', function () { return mock.createServer().then(server => (mockServer = server)); }); - // TODO: NODE-3819: Unskip flaky tests. + // TODO(NODE-3819): Unskip flaky tests. it.skip('should record roundTripTime', function (done) { mockServer.setMessageHandler(request => { const doc = request.document; @@ -49,7 +49,7 @@ describe('monitoring', function () { topology.close(done); }, 500); }); - }); + }).skipReason = 'TODO(NODE-3819): Unskip flaky tests'; // TODO(NODE-3600): Unskip flaky test it.skip('should recover on error during initial connect', function (done) { @@ -88,7 +88,7 @@ describe('monitoring', function () { topology.close(done); }); - }); + }).skipReason = 'TODO(NODE-3600): Unskip flaky tests'; describe('Monitor', function () { it('should connect and issue an initial server check', function (done) { diff --git a/test/tools/reporter/mongodb_reporter.js b/test/tools/reporter/mongodb_reporter.js index 896cebc196..5f18352c27 100644 --- a/test/tools/reporter/mongodb_reporter.js +++ b/test/tools/reporter/mongodb_reporter.js @@ -230,10 +230,13 @@ class MongoDBMochaReporter extends mocha.reporters.Spec { } /** - * @param {MongoMochaTest} test + * @param {MongoMochaTest & {skipReason?: string}} test */ pending(test) { if (REPORT_TO_STDIO) console.log(chalk.cyan(`↬ ${test.fullTitle()}`)); + if (typeof test.skipReason === 'string') { + console.log(chalk.cyan(`${' '.repeat(test.titlePath().length + 1)}↬ ${test.skipReason}`)); + } test.skipped = true; } } diff --git a/test/tools/runner/index.js b/test/tools/runner/index.js index 0356bfc7fd..13c0e6ac49 100644 --- a/test/tools/runner/index.js +++ b/test/tools/runner/index.js @@ -10,6 +10,7 @@ const { MongoClient } = require('../../../src'); const { TestConfiguration } = require('./config'); const { getEnvironmentalOptions } = require('../utils'); const mock = require('../mongodb-mock/index'); +const { inspect } = require('util'); const MONGODB_URI = process.env.MONGODB_URI || 'mongodb://localhost:27017'; const MONGODB_API_VERSION = process.env.MONGODB_API_VERSION; @@ -20,8 +21,6 @@ const MULTI_MONGOS_LB_URI = process.env.MULTI_MONGOS_LB_URI; const loadBalanced = SINGLE_MONGOS_LB_URI && MULTI_MONGOS_LB_URI; const filters = []; -const LOG_FILTER_REASON = false; - let initializedFilters = false; async function initializeFilters(client) { if (initializedFilters) { @@ -55,20 +54,19 @@ async function initializeFilters(client) { beforeEach(async function () { if (Object.keys(this.currentTest.metadata).length > 0) { - let ok = true; - for (const filter of filters) { - ok = ok && filter.filter(this.currentTest); - if (!ok) { - if (LOG_FILTER_REASON) { - this.currentTest.title += ` ## filtered by ${filter.constructor.name} - ${JSON.stringify( - this.currentTest.metadata - )}`; - } - break; - } - } + const failedFilter = filters.find(filter => !filter.filter(this.currentTest)); + + if (failedFilter) { + const filterName = failedFilter.constructor.name; + const metadataString = inspect(this.currentTest.metadata, { + colors: true, + compact: true, + depth: 10, + breakLength: Infinity + }); + + this.currentTest.skipReason = `filtered by ${filterName} - ${metadataString}`; - if (!ok) { this.skip(); } } diff --git a/test/tools/runner/metadata_ui.js b/test/tools/runner/metadata_ui.js index 442b982957..4c945d56c1 100644 --- a/test/tools/runner/metadata_ui.js +++ b/test/tools/runner/metadata_ui.js @@ -198,7 +198,7 @@ module.exports = Mocha.interfaces.metadata_ui = function (suite) { context.xspecify = context.it.skip = function (title) { - context.it(title); + return context.it(title); }; /** From 5a07b3b4d3e96e344d18bb957e57ad6dede318d6 Mon Sep 17 00:00:00 2001 From: Neal Beeken Date: Fri, 7 Jan 2022 18:58:24 -0500 Subject: [PATCH 07/11] fix: colors --- test/tools/reporter/mongodb_reporter.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/tools/reporter/mongodb_reporter.js b/test/tools/reporter/mongodb_reporter.js index 5f18352c27..22dee62655 100644 --- a/test/tools/reporter/mongodb_reporter.js +++ b/test/tools/reporter/mongodb_reporter.js @@ -3,6 +3,8 @@ const mocha = require('mocha'); const chalk = require('chalk'); +chalk.level = 3; + const { EVENT_RUN_BEGIN, EVENT_RUN_END, From 460642e8d94ccdab76c746bbd8fc91bdf9b073ac Mon Sep 17 00:00:00 2001 From: Neal Beeken Date: Sun, 9 Jan 2022 15:22:43 -0500 Subject: [PATCH 08/11] fix: beforeEach hook now extracts just the requires property - Added type defintions that describe our metadata overloads, new tsconfig imports this, it makes it so vscode will auto discover the extensions - Added skipReason to the unified runner --- .gitignore | 1 + test/mongodb-mocha.d.ts | 65 +++++++++++++++++++ .../runner/filters/mongodb_topology_filter.js | 2 +- test/tools/runner/index.js | 8 ++- test/tools/unified-spec-runner/runner.ts | 9 +-- .../unified-spec-runner/unified-utils.ts | 32 ++++++--- test/tsconfig.json | 9 +++ 7 files changed, 108 insertions(+), 18 deletions(-) create mode 100644 test/mongodb-mocha.d.ts create mode 100644 test/tsconfig.json diff --git a/.gitignore b/.gitignore index 8a63988681..6ff665b7ec 100644 --- a/.gitignore +++ b/.gitignore @@ -57,6 +57,7 @@ lib/ *.d.ts # type definition tests !test/types +!test/mongodb-mocha.d.ts .vscode output diff --git a/test/mongodb-mocha.d.ts b/test/mongodb-mocha.d.ts new file mode 100644 index 0000000000..97aaebfd91 --- /dev/null +++ b/test/mongodb-mocha.d.ts @@ -0,0 +1,65 @@ +import type { TestConfiguration } from "./tools/unified-spec-runner/runner"; + +/** Defined in test/tools/runner/filters/mongodb_topology_filter.js (topologyTypeToString) */ +type TopologyTypes = 'single' | 'replicaset' | 'sharded' | 'load-balanced'; + +interface MongoDBMetadataUI { + requires?: { + topology?: TopologyTypes | TopologyTypes[]; + mongodb?: string; + os?: NodeJS.Platform | `!${NodeJS.Platform}`; + apiVersion?: '1'; + clientSideEncryption?: boolean; + serverless?: 'forbid' | 'allow' | 'require'; + }; + + sessions?: { + skipLeakTests?: boolean; + }; +} + +interface MetadataAndTest { + metadata: MongoDBMetadataUI; + test: Fn; +} + +declare global { + namespace Mocha { + interface TestFunction { + (title: string, metadata: MongoDBMetadataUI, fn: Mocha.Func): Mocha.Test; + (title: string, metadata: MongoDBMetadataUI, fn: Mocha.AsyncFunc): Mocha.Test; + + (title: string, testAndMetadata: MetadataAndTest): Mocha.Test; + (title: string, testAndMetadata: MetadataAndTest): Mocha.Test; + } + + interface Context { + configuration: TestConfiguration; + } + + interface Test { + metadata: MongoDBMetadataUI; + } + + interface Runnable { + /** + * An optional string the test author can attach to print out why a test is skipped + * + * @example + * ``` + * it.skip('my test', () => { + * //... + * }).skipReason = 'TODO(NODE-XXXX): Feature implementation impending!'; + * ``` + * + * The reporter (`test/tools/reporter/mongodb_reporter.js`) will print out the skipReason + * indented directly below the test name. + * ``` + * - my test + * - TODO(NODE-XXXX): Feature implementation impending! + * ``` + */ + skipReason?: string; + } + } +} diff --git a/test/tools/runner/filters/mongodb_topology_filter.js b/test/tools/runner/filters/mongodb_topology_filter.js index dafbed52d8..6be1fb94e2 100755 --- a/test/tools/runner/filters/mongodb_topology_filter.js +++ b/test/tools/runner/filters/mongodb_topology_filter.js @@ -2,7 +2,7 @@ const { TopologyType } = require('../../../../src/sdam/common'); /** - * Filter for the MongoDB toopology required for the test + * Filter for the MongoDB topology required for the test * * example: * metadata: { diff --git a/test/tools/runner/index.js b/test/tools/runner/index.js index 13c0e6ac49..bfe1f26fd2 100644 --- a/test/tools/runner/index.js +++ b/test/tools/runner/index.js @@ -53,19 +53,21 @@ async function initializeFilters(client) { } beforeEach(async function () { - if (Object.keys(this.currentTest.metadata).length > 0) { + // `metadata` always exists, `requires` is optional + const requires = this.currentTest.metadata.requires; + if (requires && Object.keys(requires).length > 0) { const failedFilter = filters.find(filter => !filter.filter(this.currentTest)); if (failedFilter) { const filterName = failedFilter.constructor.name; - const metadataString = inspect(this.currentTest.metadata, { + const metadataString = inspect(requires, { colors: true, compact: true, depth: 10, breakLength: Infinity }); - this.currentTest.skipReason = `filtered by ${filterName} - ${metadataString}`; + this.currentTest.skipReason = `filtered by ${filterName} requires ${metadataString}`; this.skip(); } diff --git a/test/tools/unified-spec-runner/runner.ts b/test/tools/unified-spec-runner/runner.ts index 56fd98f9dc..eff46d7711 100644 --- a/test/tools/unified-spec-runner/runner.ts +++ b/test/tools/unified-spec-runner/runner.ts @@ -1,7 +1,7 @@ import { expect } from 'chai'; import { satisfies as semverSatisfies } from 'semver'; -import { MongoClient } from '../../../src/mongo_client'; +import type { MongoClient } from '../../../src/mongo_client'; import { ReadPreference } from '../../../src/read_preference'; import { TopologyType } from '../../../src/sdam/common'; import { ns } from '../../../src/utils'; @@ -15,9 +15,6 @@ import { patchVersion, topologySatisfies, zip } from './unified-utils'; export type TestConfiguration = InstanceType< typeof import('../../tools/runner/config')['TestConfiguration'] >; -interface MongoDBMochaTestContext extends Mocha.Context { - configuration: TestConfiguration; -} export function trace(message: string): void { if (process.env.UTR_TRACE) { @@ -43,7 +40,7 @@ async function terminateOpenTransactions(client: MongoClient) { } export async function runUnifiedTest( - ctx: MongoDBMochaTestContext, + ctx: Mocha.Context, unifiedSuite: uni.UnifiedSuite, test: uni.Test, testsToSkip?: string[] @@ -103,7 +100,7 @@ export async function runUnifiedTest( trace('satisfiesRequirements'); for (const requirement of allRequirements) { - const met = await topologySatisfies(ctx.configuration, requirement, utilClient); + const met = await topologySatisfies(ctx, requirement, utilClient); if (!met) { return ctx.skip(); } diff --git a/test/tools/unified-spec-runner/unified-utils.ts b/test/tools/unified-spec-runner/unified-utils.ts index 3ce2ae8538..21b3d5837f 100644 --- a/test/tools/unified-spec-runner/unified-utils.ts +++ b/test/tools/unified-spec-runner/unified-utils.ts @@ -3,10 +3,8 @@ import ConnectionString from 'mongodb-connection-string-url'; import { gte as semverGte, lte as semverLte } from 'semver'; import { isDeepStrictEqual } from 'util'; -import type { Document } from '../../../src'; -import { CollectionOptions, DbOptions, MongoClient } from '../../../src'; +import type { CollectionOptions, DbOptions, Document, MongoClient } from '../../../src'; import { shouldRunServerlessTest } from '../../tools/utils'; -import { TestConfiguration } from './runner'; import type { CollectionOrDatabaseOptions, RunOnRequirement } from './schema'; const ENABLE_UNIFIED_TEST_LOGGING = false; @@ -15,18 +13,26 @@ export function log(message: unknown, ...optionalParameters: unknown[]): void { } export async function topologySatisfies( - config: TestConfiguration, + ctx: Mocha.Context, r: RunOnRequirement, utilClient: MongoClient ): Promise { + const config = ctx.configuration; let ok = true; + + let skipReason; + if (r.minServerVersion) { const minVersion = patchVersion(r.minServerVersion); ok &&= semverGte(config.version, minVersion); + if (!ok && skipReason == null) { + skipReason = `requires mongodb version greater than ${minVersion}`; + } } if (r.maxServerVersion) { const maxVersion = patchVersion(r.maxServerVersion); ok &&= semverLte(config.version, maxVersion); + if (!ok && skipReason == null) skipReason = `requires mongodb version less than ${maxVersion}`; } if (r.topologies) { @@ -41,9 +47,15 @@ export async function topologySatisfies( if (r.topologies.includes('sharded-replicaset') && topologyType === 'sharded') { const shards = await utilClient.db('config').collection('shards').find({}).toArray(); ok &&= shards.length > 0 && shards.every(shard => shard.host.split(',').length > 1); + if (!ok && skipReason == null) { + skipReason = `requires sharded-replicaset but shards.length=${shards.length}`; + } } else { if (!topologyType) throw new Error(`Topology undiscovered: ${config.topologyType}`); ok &&= r.topologies.includes(topologyType); + if (!ok && skipReason == null) { + skipReason = `requires ${r.topologies} but against a ${topologyType} topology`; + } } } @@ -52,21 +64,25 @@ export async function topologySatisfies( for (const [name, value] of Object.entries(r.serverParameters)) { if (name in config.parameters) { ok &&= isDeepStrictEqual(config.parameters[name], value); + if (!ok && skipReason == null) { + skipReason = `requires serverParameter ${name} to be ${value} but found ${config.parameters[name]}`; + } } } } if (r.auth) { - ok &&= - !!utilClient.options.auth || - !!utilClient.options.authSource || - !!utilClient.options.authMechanism; + ok &&= process.env.AUTH === 'auth'; + if (!ok && skipReason == null) skipReason = `requires auth but auth is not enabled`; } if (r.serverless) { ok &&= shouldRunServerlessTest(r.serverless, config.isServerless); + if (!ok && skipReason == null) skipReason = `has serverless set to ${r.serverless}`; } + if (!ok && skipReason != null && ctx.test) ctx.test.skipReason = skipReason; + return ok; } diff --git a/test/tsconfig.json b/test/tsconfig.json new file mode 100644 index 0000000000..798e84ac0a --- /dev/null +++ b/test/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "strict": false, + "allowJs": true, + "checkJs": false + }, + "include": ["../node_modules/@types/mocha/index.d.ts", "./mongodb-mocha.d.ts",] +} From 36f05de6ce7f0d91d3c147ed2caaad116affd711 Mon Sep 17 00:00:00 2001 From: Neal Beeken Date: Sun, 9 Jan 2022 15:26:15 -0500 Subject: [PATCH 09/11] fix: lint --- test/mongodb-mocha.d.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/mongodb-mocha.d.ts b/test/mongodb-mocha.d.ts index 97aaebfd91..fb2ffc7648 100644 --- a/test/mongodb-mocha.d.ts +++ b/test/mongodb-mocha.d.ts @@ -1,4 +1,4 @@ -import type { TestConfiguration } from "./tools/unified-spec-runner/runner"; +import type { TestConfiguration } from './tools/unified-spec-runner/runner'; /** Defined in test/tools/runner/filters/mongodb_topology_filter.js (topologyTypeToString) */ type TopologyTypes = 'single' | 'replicaset' | 'sharded' | 'load-balanced'; From 220247e835edae21f7a53bbf2c0981d63ea0c609 Mon Sep 17 00:00:00 2001 From: Neal Beeken Date: Sun, 9 Jan 2022 17:47:48 -0500 Subject: [PATCH 10/11] fix: unfied runner auth filter --- .../tools/unified-spec-runner/unified-utils.ts | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/test/tools/unified-spec-runner/unified-utils.ts b/test/tools/unified-spec-runner/unified-utils.ts index 21b3d5837f..fa418bb80b 100644 --- a/test/tools/unified-spec-runner/unified-utils.ts +++ b/test/tools/unified-spec-runner/unified-utils.ts @@ -71,9 +71,21 @@ export async function topologySatisfies( } } - if (r.auth) { - ok &&= process.env.AUTH === 'auth'; - if (!ok && skipReason == null) skipReason = `requires auth but auth is not enabled`; + if (typeof r.auth === 'boolean') { + if (r.auth === true) { + // TODO(NODE-2471): Currently when there are credentials our driver will send a ping command + // All other drivers connect implicitly upon the first operation + // but in node you'll run into auth errors / successes at client.connect() time. + // so we cannot run into saslContinue failPoints that get configured for an operation to fail with + // Ex. 'errors during authentication are processed' in test/spec/load-balancers/sdam-error-handling.yml + ok &&= false; // process.env.AUTH === 'auth'; + if (!ok && skipReason == null) { + skipReason = `requires auth but auth cannot be tested in the unified format - TODO(NODE-2471)`; + } + } else if (r.auth === false) { + ok &&= process.env.AUTH === 'noauth' || process.env.AUTH == null; + if (!ok && skipReason == null) skipReason = `requires no auth but auth is enabled`; + } } if (r.serverless) { From e8c30da7c63eacc6708a83a06d88ee5022f8a102 Mon Sep 17 00:00:00 2001 From: Neal Beeken Date: Mon, 10 Jan 2022 14:01:43 -0500 Subject: [PATCH 11/11] fix: move mocha overrides to global.d.ts --- .gitignore | 2 +- test/mongodb-mocha.d.ts => global.d.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename test/mongodb-mocha.d.ts => global.d.ts (95%) diff --git a/.gitignore b/.gitignore index 6ff665b7ec..72999c8b07 100644 --- a/.gitignore +++ b/.gitignore @@ -57,7 +57,7 @@ lib/ *.d.ts # type definition tests !test/types -!test/mongodb-mocha.d.ts +!global.d.ts .vscode output diff --git a/test/mongodb-mocha.d.ts b/global.d.ts similarity index 95% rename from test/mongodb-mocha.d.ts rename to global.d.ts index fb2ffc7648..f5b1064e64 100644 --- a/test/mongodb-mocha.d.ts +++ b/global.d.ts @@ -1,4 +1,4 @@ -import type { TestConfiguration } from './tools/unified-spec-runner/runner'; +import type { TestConfiguration } from './test/tools/unified-spec-runner/runner'; /** Defined in test/tools/runner/filters/mongodb_topology_filter.js (topologyTypeToString) */ type TopologyTypes = 'single' | 'replicaset' | 'sharded' | 'load-balanced';