diff --git a/test/integration/collection-management/collection.test.ts b/test/integration/collection-management/collection.test.ts index 78ec3f86f9..b9180a8958 100644 --- a/test/integration/collection-management/collection.test.ts +++ b/test/integration/collection-management/collection.test.ts @@ -547,7 +547,7 @@ describe('Collection', function () { const capped = await collection.isCapped(); expect(capped).to.be.false; } finally { - client.close(); + await client.close(); } } diff --git a/test/integration/connection-monitoring-and-pooling/connection.test.ts b/test/integration/connection-monitoring-and-pooling/connection.test.ts index 0e5fd45323..e68c405f1b 100644 --- a/test/integration/connection-monitoring-and-pooling/connection.test.ts +++ b/test/integration/connection-monitoring-and-pooling/connection.test.ts @@ -268,7 +268,15 @@ describe('Connection', function () { // This test exists to prevent regression of processing many messages inside one chunk. it( 'processes all of them and emits heartbeats', - { requires: { topology: 'replicaset', mongodb: '>=4.4' } }, + { + requires: { + topology: 'replicaset', + mongodb: '>=4.4', + // When compression is enabled, processing heartbeat events is asynchronous. + predicate: () => + process.env.COMPRESSOR ? 'test requires that compression is disabled' : true + } + }, async function () { let hbSuccess = 0; client.on('serverHeartbeatSucceeded', () => (hbSuccess += 1)); @@ -291,6 +299,7 @@ describe('Connection', function () { // All of the hb will be emitted synchronously in the next tick as the entire chunk is processed. await processTick(); + expect(hbSuccess).to.be.greaterThan(1000); } ); diff --git a/test/integration/crud/insert.test.js b/test/integration/crud/insert.test.js index c7c212d91d..8fa676574a 100644 --- a/test/integration/crud/insert.test.js +++ b/test/integration/crud/insert.test.js @@ -1825,7 +1825,7 @@ describe('crud - insert', function () { .toArray(); const doc = docs.pop(); expect(doc.a._bsontype).to.equal('Long'); - client.close(); + await client.close(); } }); diff --git a/test/integration/crud/misc_cursors.test.js b/test/integration/crud/misc_cursors.test.js index c558ec5797..ca35bdaef9 100644 --- a/test/integration/crud/misc_cursors.test.js +++ b/test/integration/crud/misc_cursors.test.js @@ -1887,52 +1887,6 @@ describe('Cursor', function () { } ); - it('shouldAwaitData', { - // Add a tag that our runner can trigger on - // in this case we are setting that node needs to be higher than 0.10.X to run - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded'] } - }, - - test: function (done) { - // www.mongodb.com/docs/display/DOCS/Tailable+Cursors - - const configuration = this.configuration; - client.connect((err, client) => { - expect(err).to.not.exist; - this.defer(() => client.close()); - - const db = client.db(configuration.db); - const options = { capped: true, size: 8 }; - db.createCollection( - 'should_await_data_retry_tailable_cursor', - options, - (err, collection) => { - expect(err).to.not.exist; - - collection.insert({ a: 1 }, configuration.writeConcernMax(), err => { - expect(err).to.not.exist; - - // Create cursor with awaitData, and timeout after the period specified - const cursor = collection.find({}, { tailable: true, awaitData: true }); - this.defer(() => cursor.close()); - - // Execute each - cursor.forEach( - () => cursor.close(), - () => { - // Even though cursor is exhausted, should not close session - // unless cursor is manually closed, due to awaitData / tailable - done(); - } - ); - }); - } - ); - }); - } - }); - it('shouldAwaitDataWithDocumentsAvailable', function (done) { // www.mongodb.com/docs/display/DOCS/Tailable+Cursors diff --git a/test/integration/mongodb-handshake/mongodb-handshake.test.ts b/test/integration/mongodb-handshake/mongodb-handshake.test.ts index 85698af946..da5f0a9896 100644 --- a/test/integration/mongodb-handshake/mongodb-handshake.test.ts +++ b/test/integration/mongodb-handshake/mongodb-handshake.test.ts @@ -55,7 +55,7 @@ describe('MongoDB Handshake', () => { after(() => sinon.restore()); it('constructs a handshake with the specified compressors', async function () { - client = this.configuration.newClient({ compressors: ['snappy'] }); + client = this.configuration.newClient({}, { compressors: ['snappy'] }); // The load-balanced mode doesn’t perform SDAM, // so `connect` doesn’t do anything unless authentication is enabled. // Force the driver to send a command to the server in the noauth mode. diff --git a/test/integration/node-specific/bson-options/utf8_validation.test.ts b/test/integration/node-specific/bson-options/utf8_validation.test.ts index 85f128b533..2586b28ed2 100644 --- a/test/integration/node-specific/bson-options/utf8_validation.test.ts +++ b/test/integration/node-specific/bson-options/utf8_validation.test.ts @@ -79,6 +79,8 @@ describe('class MongoDBResponse', () => { describe('parsing of utf8-invalid documents with cursors', function () { let client: MongoClient; let collection: Collection; + const compressionPredicate = () => + process.env.COMPRESSOR ? 'Test requires that compression is disabled' : true; /** * Inserts a document with malformed utf8 bytes. This method spies on socket.write, and then waits @@ -117,6 +119,10 @@ describe('parsing of utf8-invalid documents with cursors', function () { } beforeEach(async function () { + if (typeof compressionPredicate() === 'string') { + this.currentTest.skipReason = compressionPredicate() as string; + this.skip(); + } client = this.configuration.newClient(); await client.connect(); const db = client.db('test'); @@ -128,7 +134,7 @@ describe('parsing of utf8-invalid documents with cursors', function () { afterEach(async function () { sinon.restore(); - await client.close(); + await client?.close(); }); context('when utf-8 validation is explicitly disabled', function () { diff --git a/test/integration/node-specific/compression.test.ts b/test/integration/node-specific/compression.test.ts new file mode 100644 index 0000000000..1e3bada815 --- /dev/null +++ b/test/integration/node-specific/compression.test.ts @@ -0,0 +1,60 @@ +import { expect } from 'chai'; + +describe('compression configuration tests', function () { + describe('process.env.COMPRESSOR is set', function () { + it( + 'enables compression when set in the environment', + { + requires: { + predicate: () => !!process.env.COMPRESSOR || 'compression must be enabled.' + } + }, + function () { + const client = this.configuration.newClient(); + expect(client.s.options.compressors).to.deep.equal([process.env.COMPRESSOR]); + } + ); + + it( + 'enables compression when set in the environment', + { + requires: { + predicate: () => !!process.env.COMPRESSOR || 'compression must be enabled.' + } + }, + function () { + const url = this.configuration.url(); + expect(url).to.include(`compressors=${process.env.COMPRESSOR}`); + } + ); + }); + + describe('process.env.COMPRESSOR is unset', function () { + it( + 'enables compression when set in the environment', + { + requires: { + predicate: () => !process.env.COMPRESSOR || 'compression cannot be enabled.' + } + }, + function () { + const client = this.configuration.newClient(); + + expect(client.s.options.compressors).to.deep.equal(['none']); + } + ); + + it( + 'enables compression when set in the environment', + { + requires: { + predicate: () => !process.env.COMPRESSOR || 'compression cannot be enabled.' + } + }, + function () { + const url = this.configuration.url(); + expect(url).to.not.include(`compressors=none`); + } + ); + }); +}); diff --git a/test/integration/read-write-concern/write_concern.test.ts b/test/integration/read-write-concern/write_concern.test.ts index 58afc2d04e..2610288b6f 100644 --- a/test/integration/read-write-concern/write_concern.test.ts +++ b/test/integration/read-write-concern/write_concern.test.ts @@ -1,6 +1,5 @@ import { expect } from 'chai'; import { on, once } from 'events'; -import { gte } from 'semver'; import * as sinon from 'sinon'; import { @@ -174,6 +173,8 @@ describe('Write Concern', function () { }); describe('fire-and-forget protocol', function () { + const compressionPredicate = () => + process.env.COMPRESSOR ? 'Test requires that compression is disabled' : true; context('when writeConcern = 0 and OP_MSG is used', function () { const writeOperations: { name: string; command: any; expectedReturnVal: any }[] = [ { @@ -270,10 +271,6 @@ describe('Write Concern', function () { let spy; beforeEach(async function () { - if (gte('3.6.0', this.configuration.version)) { - this.currentTest.skipReason = 'Test requires OP_MSG, needs to be on MongoDB 3.6+'; - this.skip(); - } spy = sinon.spy(OpMsgRequest.prototype, 'toBin'); client = this.configuration.newClient({ monitorCommands: true, w: 0 }); await client.connect(); @@ -284,22 +281,34 @@ describe('Write Concern', function () { client.close(); }); - it('the request should have moreToCome bit set', async function () { - await op.command(client); - expect(spy.returnValues[spy.returnValues.length - 1][0][16]).to.equal(2); - }); + it( + 'the request should have moreToCome bit set', + { requires: { predicate: compressionPredicate } }, + async function () { + await op.command(client); + expect(spy.returnValues[spy.returnValues.length - 1][0][16]).to.equal(2); + } + ); - it('the return value of the command should be nullish', async function () { - const result = await op.command(client); - expect(result).to.containSubset(op.expectedReturnVal); - }); + it( + 'the return value of the command should be nullish', + { requires: { predicate: compressionPredicate } }, + async function () { + const result = await op.command(client); + expect(result).to.containSubset(op.expectedReturnVal); + } + ); - it('commandSucceededEvent should have reply with only {ok: 1}', async function () { - const events: CommandSucceededEvent[] = []; - client.on('commandSucceeded', event => events.push(event)); - await op.command(client); - expect(events[0]).to.containSubset({ reply: { ok: 1 } }); - }); + it( + 'commandSucceededEvent should have reply with only {ok: 1}', + { requires: { predicate: compressionPredicate } }, + async function () { + const events: CommandSucceededEvent[] = []; + client.on('commandSucceeded', event => events.push(event)); + await op.command(client); + expect(events[0]).to.containSubset({ reply: { ok: 1 } }); + } + ); }); } }); diff --git a/test/tools/runner/config.ts b/test/tools/runner/config.ts index 14a28a5214..b7f9951d57 100644 --- a/test/tools/runner/config.ts +++ b/test/tools/runner/config.ts @@ -4,6 +4,7 @@ import * as types from 'node:util/types'; import { expect } from 'chai'; import { type Context } from 'mocha'; import ConnectionString from 'mongodb-connection-string-url'; +import { type CompressorName } from 'mongodb-legacy'; import * as qs from 'querystring'; import * as url from 'url'; @@ -64,6 +65,21 @@ function convertToConnStringMap(obj: Record) { return result.join(','); } +function getCompressor(compressor: string | undefined): CompressorName { + if (!compressor) return null; + + switch (compressor) { + case 'zstd': + return 'zstd'; + case 'zlib': + return 'zlib'; + case 'snappy': + return 'snappy'; + default: + throw new Error('unsupported test runner compressor, would default to no compression'); + } +} + export class TestConfiguration { version: string; clientSideEncryption: { @@ -94,6 +110,7 @@ export class TestConfiguration { activeResources: number; isSrv: boolean; filters: Record; + compressor: CompressorName | null; constructor( private uri: string, @@ -111,6 +128,7 @@ export class TestConfiguration { this.buildInfo = context.buildInfo; this.serverApi = context.serverApi; this.isSrv = uri.indexOf('mongodb+srv') > -1; + this.compressor = getCompressor(process.env.COMPRESSOR); this.options = { hosts, hostAddresses, @@ -200,7 +218,13 @@ export class TestConfiguration { } newClient(urlOrQueryOptions?: string | Record, serverOptions?: MongoClientOptions) { - serverOptions = Object.assign({}, getEnvironmentalOptions(), serverOptions); + const baseOptions: MongoClientOptions = this.compressor + ? { + compressors: this.compressor + } + : {}; + + serverOptions = Object.assign(baseOptions, getEnvironmentalOptions(), serverOptions); if (this.loggingEnabled && !Object.hasOwn(serverOptions, 'mongodbLogPath')) { serverOptions = this.setupLogging(serverOptions); @@ -399,6 +423,8 @@ export class TestConfiguration { url.searchParams.append('authSource', 'admin'); } + this.compressor && url.searchParams.append('compressors', this.compressor); + // Secrets setup for OIDC always sets the workload URI as MONGODB_URI_SINGLE. if (process.env.MONGODB_URI_SINGLE?.includes('MONGODB-OIDC')) { return process.env.MONGODB_URI_SINGLE; diff --git a/test/tools/unified-spec-runner/entities.ts b/test/tools/unified-spec-runner/entities.ts index bc43bac25b..74f6242731 100644 --- a/test/tools/unified-spec-runner/entities.ts +++ b/test/tools/unified-spec-runner/entities.ts @@ -212,7 +212,7 @@ export class UnifiedMongoClient extends MongoClient { ...getEnvironmentalOptions(), ...(description.serverApi ? { serverApi: description.serverApi } : {}), // TODO(NODE-5785): We need to increase the truncation length because signature.hash is a Buffer making hellos too long - mongodbLogMaxDocumentLength: 1250 + mongodbLogMaxDocumentLength: 1500 }; let logCollector: { buffer: LogMessage[]; write: (log: Log) => void } | undefined;