diff --git a/.mocharc.json b/.mocharc.json index 50daefc5160..64b96180d28 100644 --- a/.mocharc.json +++ b/.mocharc.json @@ -3,9 +3,13 @@ "require": [ "source-map-support/register", "ts-node/register", - "test/tools/runner/chai-addons.js" + "test/tools/runner/chai-addons.js", + "test/tools/runner/hooks/unhandled_checker.ts" + ], + "extension": [ + "js", + "ts" ], - "extension": ["js", "ts"], "recursive": true, "timeout": 60000, "failZero": true, diff --git a/src/cursor/abstract_cursor.ts b/src/cursor/abstract_cursor.ts index 2d9ec01a3dd..fe0b6ffaef7 100644 --- a/src/cursor/abstract_cursor.ts +++ b/src/cursor/abstract_cursor.ts @@ -828,13 +828,16 @@ function cleanupCursor( cursor[kKilled] = true; + if (session.hasEnded) { + return completeCleanup(); + } + executeOperation( cursor[kClient], new KillCursorsOperation(cursorId, cursorNs, server, { session }) - ).finally(() => { - completeCleanup(); - }); - return; + ) + .catch(() => null) + .finally(completeCleanup); } /** @internal */ diff --git a/test/integration/change-streams/change_stream.test.ts b/test/integration/change-streams/change_stream.test.ts index 213a5bd628d..a0533023bed 100644 --- a/test/integration/change-streams/change_stream.test.ts +++ b/test/integration/change-streams/change_stream.test.ts @@ -1019,6 +1019,30 @@ describe('Change Streams', function () { } } ); + + it( + 'when closed throws "ChangeStream is closed"', + { requires: { topology: '!single' } }, + async function () { + changeStream = collection.watch(); + + const loop = (async function () { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + for await (const _change of changeStream) { + return 'loop entered'; // loop should never be entered + } + return 'loop ended without error'; // loop should not finish without error + })(); + + await sleep(1); + const closeResult = changeStream.close().catch(error => error); + expect(closeResult).to.not.be.instanceOf(Error); + + const result = await loop.catch(error => error); + expect(result).to.be.instanceOf(MongoAPIError); + expect(result.message).to.match(/ChangeStream is closed/i); + } + ); }); describe('#return', function () { diff --git a/test/integration/crud/crud_api.test.ts b/test/integration/crud/crud_api.test.ts index 797f6118473..8460e76c0a7 100644 --- a/test/integration/crud/crud_api.test.ts +++ b/test/integration/crud/crud_api.test.ts @@ -1,4 +1,5 @@ import { expect } from 'chai'; +import { on } from 'events'; import { MongoClient, MongoError, ObjectId, ReturnDocument } from '../../mongodb'; import { assert as test } from '../shared'; @@ -60,130 +61,92 @@ describe('CRUD API', function () { await client.close(); }); - it('should correctly execute find method using crud api', function (done) { - const db = client.db(); - - db.collection('t').insertMany([{ a: 1 }, { a: 1 }, { a: 1 }, { a: 1 }], function (err) { - expect(err).to.not.exist; - - // - // Cursor - // -------------------------------------------------- - const makeCursor = () => { - // Possible methods on the the cursor instance - return db - .collection('t') - .find({}) - .filter({ a: 1 }) - .addCursorFlag('noCursorTimeout', true) - .addQueryModifier('$comment', 'some comment') - .batchSize(2) - .comment('some comment 2') - .limit(2) - .maxTimeMS(50) - .project({ a: 1 }) - .skip(0) - .sort({ a: 1 }); - }; + context('when creating a cursor with find', () => { + let collection; - // - // Exercise count method - // ------------------------------------------------- - const countMethod = function () { - // Execute the different methods supported by the cursor - const cursor = makeCursor(); - cursor.count(function (err, count) { - expect(err).to.not.exist; - test.equal(2, count); - eachMethod(); - }); - }; + beforeEach(async () => { + collection = client.db().collection('t'); + await collection.drop().catch(() => null); + await collection.insertMany([{ a: 1 }, { a: 1 }, { a: 1 }, { a: 1 }]); + }); - // - // Exercise legacy method each - // ------------------------------------------------- - const eachMethod = function () { - let count = 0; + afterEach(async () => { + await collection?.drop().catch(() => null); + }); + const makeCursor = () => { + // Possible methods on the the cursor instance + return collection + .find({}) + .filter({ a: 1 }) + .addCursorFlag('noCursorTimeout', true) + .addQueryModifier('$comment', 'some comment') + .batchSize(1) + .comment('some comment 2') + .limit(2) + .maxTimeMS(50) + .project({ a: 1 }) + .skip(0) + .sort({ a: 1 }); + }; + + describe('#count()', () => { + it('returns the number of documents', async () => { const cursor = makeCursor(); - cursor.forEach( - () => { - count = count + 1; - }, - err => { - expect(err).to.not.exist; - test.equal(2, count); - toArrayMethod(); - } - ); - }; + const res = await cursor.count(); + expect(res).to.equal(2); + }); + }); - // - // Exercise toArray - // ------------------------------------------------- - const toArrayMethod = function () { + describe('#forEach()', () => { + it('iterates all the documents', async () => { const cursor = makeCursor(); - cursor.toArray(function (err, docs) { - expect(err).to.not.exist; - test.equal(2, docs.length); - nextMethod(); + let count = 0; + await cursor.forEach(() => { + count += 1; }); - }; + expect(count).to.equal(2); + }); + }); - // - // Exercise next method - // ------------------------------------------------- - const nextMethod = function () { + describe('#toArray()', () => { + it('returns an array with all documents', async () => { const cursor = makeCursor(); - cursor.next(function (err, doc) { - expect(err).to.not.exist; - test.ok(doc != null); - - cursor.next(function (err, doc) { - expect(err).to.not.exist; - test.ok(doc != null); + const res = await cursor.toArray(); + expect(res).to.have.lengthOf(2); + }); + }); - cursor.next(function (err, doc) { - expect(err).to.not.exist; - expect(doc).to.not.exist; - streamMethod(); - }); - }); - }); - }; + describe('#next()', () => { + it('is callable without blocking', async () => { + const cursor = makeCursor(); + const doc0 = await cursor.next(); + expect(doc0).to.exist; + const doc1 = await cursor.next(); + expect(doc1).to.exist; + const doc2 = await cursor.next(); + expect(doc2).to.not.exist; + }); + }); - // - // Exercise stream - // ------------------------------------------------- - const streamMethod = function () { - let count = 0; + describe('#stream()', () => { + it('creates a node stream that emits data events', async () => { + const count = 0; const cursor = makeCursor(); const stream = cursor.stream(); - stream.on('data', function () { - count = count + 1; - }); - + on(stream, 'data'); cursor.once('close', function () { - test.equal(2, count); - explainMethod(); + expect(count).to.equal(2); }); - }; + }); + }); - // - // Explain method - // ------------------------------------------------- - const explainMethod = function () { + describe('#explain()', () => { + it('returns an explain document', async () => { const cursor = makeCursor(); - cursor.explain(function (err, result) { - expect(err).to.not.exist; - test.ok(result != null); - - client.close(done); - }); - }; - - // Execute all the methods - countMethod(); + const result = await cursor.explain(); + expect(result).to.exist; + }); }); }); diff --git a/test/integration/crud/find_cursor_methods.test.js b/test/integration/crud/find_cursor_methods.test.js index 9ead78b794c..7170256403e 100644 --- a/test/integration/crud/find_cursor_methods.test.js +++ b/test/integration/crud/find_cursor_methods.test.js @@ -108,50 +108,58 @@ describe('Find Cursor', function () { }); }); - context('#close', function () { - it('should send a killCursors command when closed before completely iterated', function (done) { - const commands = []; - client.on('commandStarted', filterForCommands(['killCursors'], commands)); + describe('#close', function () { + let collection; - const coll = client.db().collection('abstract_cursor'); - const cursor = coll.find({}, { batchSize: 2 }); - cursor.next(err => { - expect(err).to.not.exist; - cursor.close(err => { - expect(err).to.not.exist; - expect(commands).to.have.length(1); - done(); - }); - }); + beforeEach(async function () { + collection = client.db().collection('abstract_cursor'); + await collection.drop().catch(() => null); + await collection.insertMany([{ a: 1 }, { a: 2 }, { a: 3 }, { a: 4 }]); }); - it('should not send a killCursors command when closed after completely iterated', function (done) { - const commands = []; - client.on('commandStarted', filterForCommands(['killCursors'], commands)); + afterEach(async function () { + await collection?.drop().catch(() => null); + }); - const coll = client.db().collection('abstract_cursor'); - const cursor = coll.find({}, { batchSize: 2 }); - cursor.toArray(err => { - expect(err).to.not.exist; + context('when closed before completely iterated', () => { + it('sends a killCursors command', async () => { + const killCursorsCommands = []; + client.on('commandStarted', filterForCommands(['killCursors'], killCursorsCommands)); - cursor.close(err => { - expect(err).to.not.exist; - expect(commands).to.have.length(0); - done(); - }); + const cursor = collection.find({}, { batchSize: 2 }); + + const doc = await cursor.next(); + expect(doc).property('a', 1); + + expect(killCursorsCommands).to.have.length(0); + await cursor.close(); + expect(killCursorsCommands).to.have.length(1); }); }); - it('should not send a killCursors command when closed before initialization', function (done) { - const commands = []; - client.on('commandStarted', filterForCommands(['killCursors'], commands)); + context('when closed after completely iterated', () => { + it('does not send a killCursors command', async () => { + const killCursorsCommands = []; + client.on('commandStarted', filterForCommands(['killCursors'], killCursorsCommands)); - const coll = client.db().collection('abstract_cursor'); - const cursor = coll.find({}, { batchSize: 2 }); - cursor.close(err => { - expect(err).to.not.exist; - expect(commands).to.have.length(0); - done(); + const cursor = collection.find(); + await cursor.toArray(); + expect(killCursorsCommands).to.have.length(0); + await cursor.close(); + expect(killCursorsCommands).to.have.length(0); + }); + }); + + context('when closed before initialization', () => { + it('does not send a killCursors command', async () => { + const killCursorsCommands = []; + client.on('commandStarted', filterForCommands(['killCursors'], killCursorsCommands)); + + const cursor = collection.find(); + + expect(killCursorsCommands).to.have.length(0); + await cursor.close(); + expect(killCursorsCommands).to.have.length(0); }); }); }); diff --git a/test/integration/crud/misc_cursors.test.js b/test/integration/crud/misc_cursors.test.js index 3e16a7eeef0..87e888ac375 100644 --- a/test/integration/crud/misc_cursors.test.js +++ b/test/integration/crud/misc_cursors.test.js @@ -8,6 +8,7 @@ const { expect } = require('chai'); const BSON = require('bson'); const sinon = require('sinon'); const { Writable } = require('stream'); +const { once, on } = require('events'); const { setTimeout } = require('timers'); const { ReadPreference } = require('../../mongodb'); const { ServerType } = require('../../mongodb'); @@ -1692,62 +1693,46 @@ describe('Cursor', function () { expect(cursor.session).to.not.equal(clonedCursor.session); }); - it('destroying a stream stops it', { - // Add a tag that our runner can trigger on - // in this case we are setting that node needs to be higher than 0.10.X to run - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded'] } - }, + it('destroying a stream stops it', async function () { + const db = client.db(); + await db.dropCollection('destroying_a_stream_stops_it').catch(() => null); + const collection = await db.createCollection('destroying_a_stream_stops_it'); - test: function (done) { - const configuration = this.configuration; - client.connect((err, client) => { - expect(err).to.not.exist; - this.defer(() => client.close()); + const docs = Array.from({ length: 10 }, (_, i) => ({ b: i + 1 })); - const db = client.db(configuration.db); - db.createCollection('destroying_a_stream_stops_it', (err, collection) => { - expect(err).to.not.exist; + await collection.insertMany(docs); - var docs = []; - for (var ii = 0; ii < 10; ++ii) docs.push({ b: ii + 1 }); + const cursor = collection.find(); + const stream = cursor.stream(); - // insert all docs - collection.insert(docs, configuration.writeConcernMax(), err => { - expect(err).to.not.exist; + expect(cursor).property('closed', false); - var finished = 0, - i = 0; + const willClose = once(cursor, 'close'); + const willEnd = once(stream, 'end'); - const cursor = collection.find(); - const stream = cursor.stream(); + const dataEvents = on(stream, 'data'); - test.strictEqual(false, cursor.closed); - - stream.on('data', function () { - if (++i === 5) { - stream.destroy(); - } - }); + for (let i = 0; i < 5; i++) { + let { + value: [doc] + } = await dataEvents.next(); + expect(doc).property('b', i + 1); + } - cursor.once('close', testDone); - stream.once('error', testDone); - stream.once('end', testDone); + // After 5 successful data events, destroy stream + stream.destroy(); - function testDone(err) { - ++finished; - if (finished === 2) { - test.strictEqual(undefined, err); - test.strictEqual(5, i); - test.strictEqual(2, finished); - test.strictEqual(true, cursor.closed); - done(); - } - } - }); - }); - }); - } + // We should get an end event on the stream and a close event on the cursor + // We should **not** get an 'error' event, + // the following will throw if either stream or cursor emitted an 'error' event + await Promise.race([ + willEnd, + sleep(100).then(() => Promise.reject(new Error('end event never emitted'))) + ]); + await Promise.race([ + willClose, + sleep(100).then(() => Promise.reject(new Error('close event never emitted'))) + ]); }); // NOTE: skipped for use of topology manager diff --git a/test/integration/node-specific/operation_examples.test.ts b/test/integration/node-specific/operation_examples.test.ts index b0de3fc1276..839fa3ef210 100644 --- a/test/integration/node-specific/operation_examples.test.ts +++ b/test/integration/node-specific/operation_examples.test.ts @@ -4409,7 +4409,7 @@ describe('Operations', function () { .then(function (_collection) { collection = _collection; - const docs = []; + const docs: Array<{ a: number }> = []; for (let i = 0; i < 1000; i++) docs.push({ a: i }); // Insert a document in the capped collection diff --git a/test/mocha_mongodb.json b/test/mocha_mongodb.json index 6203eea2b1d..d029ca46919 100644 --- a/test/mocha_mongodb.json +++ b/test/mocha_mongodb.json @@ -5,6 +5,7 @@ "ts-node/register", "test/tools/runner/chai-addons.js", "test/tools/runner/hooks/configuration.js", + "test/tools/runner/hooks/unhandled_checker.ts", "test/tools/runner/hooks/leak_checker.ts", "test/tools/runner/hooks/legacy_crud_shims.ts" ], diff --git a/test/tools/runner/hooks/unhandled_checker.ts b/test/tools/runner/hooks/unhandled_checker.ts new file mode 100644 index 00000000000..7f25ea65415 --- /dev/null +++ b/test/tools/runner/hooks/unhandled_checker.ts @@ -0,0 +1,44 @@ +import { expect } from 'chai'; + +const unhandled: { + rejections: Error[]; + exceptions: Error[]; + unknown: unknown[]; +} = { + rejections: [], + exceptions: [], + unknown: [] +}; + +const uncaughtExceptionListener: NodeJS.UncaughtExceptionListener = (error, origin) => { + if (origin === 'uncaughtException') { + unhandled.exceptions.push(error); + } else if (origin === 'unhandledRejection') { + unhandled.rejections.push(error); + } else { + unhandled.unknown.push(error); + } +}; + +function beforeEachUnhandled() { + unhandled.rejections = []; + unhandled.exceptions = []; + unhandled.unknown = []; + process.addListener('uncaughtExceptionMonitor', uncaughtExceptionListener); +} + +function afterEachUnhandled() { + process.removeListener('uncaughtExceptionMonitor', uncaughtExceptionListener); + try { + expect(unhandled).property('rejections').to.have.lengthOf(0); + expect(unhandled).property('exceptions').to.have.lengthOf(0); + expect(unhandled).property('unknown').to.have.lengthOf(0); + } catch (error) { + this.test.error(error); + } + unhandled.rejections = []; + unhandled.exceptions = []; + unhandled.unknown = []; +} + +module.exports = { mochaHooks: { beforeEach: beforeEachUnhandled, afterEach: afterEachUnhandled } };