From 02f06dfbcc94d5f7bf4b65ebdc76d8c61b391627 Mon Sep 17 00:00:00 2001 From: Rhys Howell Date: Mon, 10 Mar 2025 13:06:59 -0400 Subject: [PATCH 1/3] fix: update MongoDB schema converter to remove invalid fields, add integration test --- .../internalToMongoDB.test.ts | 11 +- src/schema-converters/internalToMongoDB.ts | 29 +-- test/all-bson-types-fixture.ts | 26 ++- test/integration/generateAndValidate.test.ts | 191 +++++++++++++----- 4 files changed, 183 insertions(+), 74 deletions(-) diff --git a/src/schema-converters/internalToMongoDB.test.ts b/src/schema-converters/internalToMongoDB.test.ts index 4847e2c..565874e 100644 --- a/src/schema-converters/internalToMongoDB.test.ts +++ b/src/schema-converters/internalToMongoDB.test.ts @@ -895,7 +895,6 @@ describe('internalSchemaToMongoDB', async function() { const mongodb = await convertInternalToMongodb(internal); assert.deepStrictEqual(mongodb, { bsonType: 'object', - required: [], properties: { _id: { bsonType: 'objectId' @@ -939,8 +938,7 @@ describe('internalSchemaToMongoDB', async function() { uuidOld: { bsonType: 'binData' } - }, - required: [] + } }, boolean: { bsonType: 'bool' @@ -984,8 +982,7 @@ describe('internalSchemaToMongoDB', async function() { key: { bsonType: 'string' } - }, - required: [] + } }, objectId: { bsonType: 'objectId' @@ -1193,7 +1190,6 @@ describe('internalSchemaToMongoDB', async function() { const mongodb = await convertInternalToMongodb(internal); assert.deepStrictEqual(mongodb, { bsonType: 'object', - required: [], properties: { genres: { bsonType: 'array', @@ -1338,7 +1334,6 @@ describe('internalSchemaToMongoDB', async function() { const mongodb = await convertInternalToMongodb(internal); assert.deepStrictEqual(mongodb, { bsonType: 'object', - required: [], properties: { genres: { bsonType: 'array', @@ -1510,7 +1505,6 @@ describe('internalSchemaToMongoDB', async function() { const mongodb = await convertInternalToMongodb(internal); assert.deepStrictEqual(mongodb, { bsonType: 'object', - required: [], properties: { mixedType: { bsonType: ['int', 'string'] @@ -1626,7 +1620,6 @@ describe('internalSchemaToMongoDB', async function() { const mongodb = await convertInternalToMongodb(internal); assert.deepStrictEqual(mongodb, { bsonType: 'object', - required: [], properties: { mixedComplexType: { anyOf: [ diff --git a/src/schema-converters/internalToMongoDB.ts b/src/schema-converters/internalToMongoDB.ts index c40447f..d2edf95 100644 --- a/src/schema-converters/internalToMongoDB.ts +++ b/src/schema-converters/internalToMongoDB.ts @@ -46,15 +46,17 @@ async function parseType(type: SchemaType, signal?: AbortSignal): Promise 0) { + schema.items = items; + } + } else if (type.bsonType === 'Document') { + Object.assign(schema, + await parseFields((type as DocumentSchemaType).fields, signal) + ); } return schema; @@ -83,17 +85,17 @@ async function parseTypes(types: SchemaType[], signal?: AbortSignal): Promise { - const required = []; + const required: string[] = []; const properties: MongoDBJSONSchema['properties'] = {}; for (const field of fields) { if (field.probability === 1) required.push(field.name); properties[field.name] = await parseTypes(field.types, signal); } - return { required, properties }; + return { properties, ...(required.length > 0 ? { required } : {}) }; } export async function convertInternalToMongodb( @@ -104,7 +106,8 @@ export async function convertInternalToMongodb( const { required, properties } = await parseFields(internalSchema.fields, options.signal); const schema: MongoDBJSONSchema = { bsonType: 'object', - required, + // Prevent adding an empty required array as it isn't valid. + ...((required === undefined) ? {} : { required }), properties }; return schema; diff --git a/test/all-bson-types-fixture.ts b/test/all-bson-types-fixture.ts index b8cfb5f..53bd6eb 100644 --- a/test/all-bson-types-fixture.ts +++ b/test/all-bson-types-fixture.ts @@ -49,9 +49,33 @@ export const allBSONTypesDoc = { uuid: new UUID('AAAAAAAA-AAAA-4AAA-AAAA-AAAAAAAAAAAA'), // 4 md5: Binary.createFromBase64('c//SZESzTGmQ6OfR38A11A==', 5), // 5 encrypted: Binary.createFromBase64('c//SZESzTGmQ6OfR38A11A==', 6), // 6 - compressedTimeSeries: Binary.createFromBase64('c//SZESzTGmQ6OfR38A11A==', 7), // 7 + compressedTimeSeries: new Binary( + Buffer.from( + 'CQCKW/8XjAEAAIfx//////////H/////////AQAAAAAAAABfAAAAAAAAAAEAAAAAAAAAAgAAAAAAAAAHAAAAAAAAAA4AAAAAAAAAAA==', + 'base64' + ), + 7 + ), // 7 custom: Binary.createFromBase64('//8=', 128) // 128 }, dbRef: new DBRef('namespace', new ObjectId('642d76b4b7ebfab15d3c4a78')) // not actually a separate type, just a convention }; + +// Includes some edge cases like empty objects, nested empty arrays, etc. +export const allBSONTypesWithEdgeCasesDoc = { + ...allBSONTypesDoc, + emptyObject: {}, + objectWithNestedEmpty: { + nestedEmpty: {} + }, + emptyArray: [], + arrayOfEmptyArrays: [[], []], + // infinityNum: new Double(Infinity), + // negativeInfinityNum: new Double(-Infinity), + // NaNNum: new Double(NaN) + + infinityNum: Infinity, + negativeInfinityNum: -Infinity, + NaNNum: NaN +}; diff --git a/test/integration/generateAndValidate.test.ts b/test/integration/generateAndValidate.test.ts index 7509d32..041a2c3 100644 --- a/test/integration/generateAndValidate.test.ts +++ b/test/integration/generateAndValidate.test.ts @@ -1,10 +1,17 @@ -import { analyzeDocuments } from '../../src'; import Ajv2020 from 'ajv/dist/2020'; import assert from 'assert'; -import { ObjectId, Int32, Double, EJSON } from 'bson'; +import { + Double, + Int32, + ObjectId, + EJSON +} from 'bson'; import { MongoClient, type Db } from 'mongodb'; import { mochaTestServer } from '@mongodb-js/compass-test-server'; +import { allBSONTypesWithEdgeCasesDoc } from '../all-bson-types-fixture'; +import { analyzeDocuments } from '../../src'; + const bsonDocuments = [{ _id: new ObjectId('67863e82fb817085a6b0ebad'), title: 'My book', @@ -47,74 +54,156 @@ describe('Documents -> Generate schema -> Validate Documents against the schema' }); }); -describe('Documents -> Generate schema -> Use schema in validation rule in MongoDB -> Validate documents against the schema', function() { +describe('With a MongoDB Cluster', function() { let client: MongoClient; let db: Db; const cluster = mochaTestServer(); before(async function() { - // Create the schema validation rule. - const analyzedDocuments = await analyzeDocuments(bsonDocuments); - const schema = await analyzedDocuments.getMongoDBJsonSchema(); - const validationRule = { - $jsonSchema: schema - }; - // Connect to the mongodb instance. const connectionString = cluster().connectionString; client = new MongoClient(connectionString); await client.connect(); db = client.db('test'); - - // Create a collection with the schema validation in Compass. - await db.createCollection('books', { - validator: validationRule - }); }); + after(async function() { await client?.close(); }); - it('allows inserting valid documents', async function() { - await db.collection('books').insertMany(bsonDocuments); - }); + describe('Documents -> Generate basic schema -> Use schema in validation rule in MongoDB -> Validate documents against the schema', function() { + before(async function() { + // Create the schema validation rule. + const analyzedDocuments = await analyzeDocuments(bsonDocuments); + const schema = await analyzedDocuments.getMongoDBJsonSchema(); + const validationRule = { + $jsonSchema: schema + }; + + // Create a collection with the schema validation. + await db.createCollection('books', { + validator: validationRule + }); + }); + + it('allows inserting valid documents', async function() { + await db.collection('books').insertMany(bsonDocuments); + }); + + it('prevents inserting invalid documents', async function() { + const invalidDocs = [{ + _id: new ObjectId('67863e82fb817085a6b0ebba'), + title: 'Pineapple 1', + year: new Int32(1983), + genres: [ + 'crimi', + 'comedy', + { + short: 'scifi', + long: 'science fiction' + } + ], + number: 'an invalid string' + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbb'), + title: 'Pineapple 2', + year: 'year a string' + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbc'), + title: 123, + year: new Int32('1999') + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbc'), + title: 'No year' + }]; - it('prevents inserting invalid documents', async function() { - const invalidDocs = [{ - _id: new ObjectId('67863e82fb817085a6b0ebba'), - title: 'Pineapple 1', - year: new Int32(1983), - genres: [ - 'crimi', - 'comedy', - { - short: 'scifi', - long: 'science fiction' + for (const doc of invalidDocs) { + try { + await db.collection('books').insertOne(doc); + + throw new Error('This should not be reached'); + } catch (e: any) { + const expectedMessage = 'Document failed validation'; + assert.ok(e.message.includes(expectedMessage), `Expected error ${e.message} message to include "${expectedMessage}", doc: ${doc._id}`); } - ], - number: 'an invalid string' - }, { - _id: new ObjectId('67863eacfb817085a6b0ebbb'), - title: 'Pineapple 2', - year: 'year a string' - }, { - _id: new ObjectId('67863eacfb817085a6b0ebbc'), - title: 123, - year: new Int32('1999') - }, { - _id: new ObjectId('67863eacfb817085a6b0ebbc'), - title: 'No year' - }]; - - for (const doc of invalidDocs) { + } + }); + }); + + describe('[All Types] Documents -> Generate basic schema -> Use schema in validation rule in MongoDB -> Validate documents against the schema', function() { + const allTypesCollection = 'allTypes'; + + before(async function() { + await db.collection(allTypesCollection).insertOne(allBSONTypesWithEdgeCasesDoc); + const docsFromCollection = await db.collection(allTypesCollection).find().toArray(); + + // Create the schema validation rule. + const analyzedDocuments = await analyzeDocuments(docsFromCollection); + const schema = await analyzedDocuments.getMongoDBJsonSchema(); + const validationRule = { + $jsonSchema: schema + }; + // Update the collection with the schema validation. + await db.command({ + collMod: allTypesCollection, + validator: validationRule + }); + }); + + it('allows inserting valid documents (does not error)', async function() { + const docs = [{ + ...allBSONTypesWithEdgeCasesDoc, + _id: new ObjectId() + }, { + ...allBSONTypesWithEdgeCasesDoc, + _id: new ObjectId() + }]; + try { - await db.collection('books').insertOne(doc); + await db.collection(allTypesCollection).insertMany(docs); + } catch (err) { + console.error('Error inserting documents', EJSON.stringify(err, undefined, 2)); + throw err; + } + }); + + it('prevents inserting invalid documents', async function() { + const invalidDocs = [{ + _id: new ObjectId('67863e82fb817085a6b0ebba'), + title: 'Pineapple 1', + year: new Int32(1983), + genres: [ + 'crimi', + 'comedy', + { + short: 'scifi', + long: 'science fiction' + } + ], + number: 'an invalid string' + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbb'), + title: 'Pineapple 2', + year: 'year a string' + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbc'), + title: 123, + year: new Int32('1999') + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbc'), + title: 'No year' + }]; + + for (const doc of invalidDocs) { + try { + await db.collection(allTypesCollection).insertOne(doc); - throw new Error('This should not be reached'); - } catch (e: any) { - const expectedMessage = 'Document failed validation'; - assert.ok(e.message.includes(expectedMessage), `Expected error ${e.message} message to include "${expectedMessage}", doc: ${doc._id}`); + throw new Error('This should not be reached'); + } catch (e: any) { + const expectedMessage = 'Document failed validation'; + assert.ok(e.message.includes(expectedMessage), `Expected error ${e.message} message to include "${expectedMessage}", doc: ${doc._id}`); + } } - } + }); }); }); From fb3a426a2e6936897f95ed4e7eaee1861c7631ec Mon Sep 17 00:00:00 2001 From: Rhys Howell Date: Mon, 10 Mar 2025 13:07:54 -0400 Subject: [PATCH 2/3] fixup: remove comment --- test/all-bson-types-fixture.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/test/all-bson-types-fixture.ts b/test/all-bson-types-fixture.ts index 53bd6eb..ed51891 100644 --- a/test/all-bson-types-fixture.ts +++ b/test/all-bson-types-fixture.ts @@ -71,10 +71,6 @@ export const allBSONTypesWithEdgeCasesDoc = { }, emptyArray: [], arrayOfEmptyArrays: [[], []], - // infinityNum: new Double(Infinity), - // negativeInfinityNum: new Double(-Infinity), - // NaNNum: new Double(NaN) - infinityNum: Infinity, negativeInfinityNum: -Infinity, NaNNum: NaN From 796ff2983a913da7b1c442d8ddd40f0910838200 Mon Sep 17 00:00:00 2001 From: Paula Stachova Date: Fri, 14 Mar 2025 09:36:34 +0100 Subject: [PATCH 3/3] fix test --- test/all-bson-types-fixture.ts | 9 +++++++-- test/integration/generateAndValidate.test.ts | 10 +++++----- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/test/all-bson-types-fixture.ts b/test/all-bson-types-fixture.ts index ed51891..1e9232f 100644 --- a/test/all-bson-types-fixture.ts +++ b/test/all-bson-types-fixture.ts @@ -62,9 +62,14 @@ export const allBSONTypesDoc = { dbRef: new DBRef('namespace', new ObjectId('642d76b4b7ebfab15d3c4a78')) // not actually a separate type, just a convention }; +const { + dbRef, + ...allValidBSONTypesDoc +} = allBSONTypesDoc; + // Includes some edge cases like empty objects, nested empty arrays, etc. -export const allBSONTypesWithEdgeCasesDoc = { - ...allBSONTypesDoc, +export const allValidBSONTypesWithEdgeCasesDoc = { + ...allValidBSONTypesDoc, emptyObject: {}, objectWithNestedEmpty: { nestedEmpty: {} diff --git a/test/integration/generateAndValidate.test.ts b/test/integration/generateAndValidate.test.ts index 041a2c3..acab887 100644 --- a/test/integration/generateAndValidate.test.ts +++ b/test/integration/generateAndValidate.test.ts @@ -9,7 +9,7 @@ import { import { MongoClient, type Db } from 'mongodb'; import { mochaTestServer } from '@mongodb-js/compass-test-server'; -import { allBSONTypesWithEdgeCasesDoc } from '../all-bson-types-fixture'; +import { allValidBSONTypesWithEdgeCasesDoc } from '../all-bson-types-fixture'; import { analyzeDocuments } from '../../src'; const bsonDocuments = [{ @@ -134,8 +134,8 @@ describe('With a MongoDB Cluster', function() { const allTypesCollection = 'allTypes'; before(async function() { - await db.collection(allTypesCollection).insertOne(allBSONTypesWithEdgeCasesDoc); - const docsFromCollection = await db.collection(allTypesCollection).find().toArray(); + await db.collection(allTypesCollection).insertOne(allValidBSONTypesWithEdgeCasesDoc); + const docsFromCollection = await db.collection(allTypesCollection).find({}, { promoteValues: false }).toArray(); // Create the schema validation rule. const analyzedDocuments = await analyzeDocuments(docsFromCollection); @@ -152,10 +152,10 @@ describe('With a MongoDB Cluster', function() { it('allows inserting valid documents (does not error)', async function() { const docs = [{ - ...allBSONTypesWithEdgeCasesDoc, + ...allValidBSONTypesWithEdgeCasesDoc, _id: new ObjectId() }, { - ...allBSONTypesWithEdgeCasesDoc, + ...allValidBSONTypesWithEdgeCasesDoc, _id: new ObjectId() }];