diff --git a/src/schema-converters/internalToMongoDB.test.ts b/src/schema-converters/internalToMongoDB.test.ts index 4847e2c..565874e 100644 --- a/src/schema-converters/internalToMongoDB.test.ts +++ b/src/schema-converters/internalToMongoDB.test.ts @@ -895,7 +895,6 @@ describe('internalSchemaToMongoDB', async function() { const mongodb = await convertInternalToMongodb(internal); assert.deepStrictEqual(mongodb, { bsonType: 'object', - required: [], properties: { _id: { bsonType: 'objectId' @@ -939,8 +938,7 @@ describe('internalSchemaToMongoDB', async function() { uuidOld: { bsonType: 'binData' } - }, - required: [] + } }, boolean: { bsonType: 'bool' @@ -984,8 +982,7 @@ describe('internalSchemaToMongoDB', async function() { key: { bsonType: 'string' } - }, - required: [] + } }, objectId: { bsonType: 'objectId' @@ -1193,7 +1190,6 @@ describe('internalSchemaToMongoDB', async function() { const mongodb = await convertInternalToMongodb(internal); assert.deepStrictEqual(mongodb, { bsonType: 'object', - required: [], properties: { genres: { bsonType: 'array', @@ -1338,7 +1334,6 @@ describe('internalSchemaToMongoDB', async function() { const mongodb = await convertInternalToMongodb(internal); assert.deepStrictEqual(mongodb, { bsonType: 'object', - required: [], properties: { genres: { bsonType: 'array', @@ -1510,7 +1505,6 @@ describe('internalSchemaToMongoDB', async function() { const mongodb = await convertInternalToMongodb(internal); assert.deepStrictEqual(mongodb, { bsonType: 'object', - required: [], properties: { mixedType: { bsonType: ['int', 'string'] @@ -1626,7 +1620,6 @@ describe('internalSchemaToMongoDB', async function() { const mongodb = await convertInternalToMongodb(internal); assert.deepStrictEqual(mongodb, { bsonType: 'object', - required: [], properties: { mixedComplexType: { anyOf: [ diff --git a/src/schema-converters/internalToMongoDB.ts b/src/schema-converters/internalToMongoDB.ts index c40447f..d2edf95 100644 --- a/src/schema-converters/internalToMongoDB.ts +++ b/src/schema-converters/internalToMongoDB.ts @@ -46,15 +46,17 @@ async function parseType(type: SchemaType, signal?: AbortSignal): Promise 0) { + schema.items = items; + } + } else if (type.bsonType === 'Document') { + Object.assign(schema, + await parseFields((type as DocumentSchemaType).fields, signal) + ); } return schema; @@ -83,17 +85,17 @@ async function parseTypes(types: SchemaType[], signal?: AbortSignal): Promise { - const required = []; + const required: string[] = []; const properties: MongoDBJSONSchema['properties'] = {}; for (const field of fields) { if (field.probability === 1) required.push(field.name); properties[field.name] = await parseTypes(field.types, signal); } - return { required, properties }; + return { properties, ...(required.length > 0 ? { required } : {}) }; } export async function convertInternalToMongodb( @@ -104,7 +106,8 @@ export async function convertInternalToMongodb( const { required, properties } = await parseFields(internalSchema.fields, options.signal); const schema: MongoDBJSONSchema = { bsonType: 'object', - required, + // Prevent adding an empty required array as it isn't valid. + ...((required === undefined) ? {} : { required }), properties }; return schema; diff --git a/test/all-bson-types-fixture.ts b/test/all-bson-types-fixture.ts index b8cfb5f..1e9232f 100644 --- a/test/all-bson-types-fixture.ts +++ b/test/all-bson-types-fixture.ts @@ -49,9 +49,34 @@ export const allBSONTypesDoc = { uuid: new UUID('AAAAAAAA-AAAA-4AAA-AAAA-AAAAAAAAAAAA'), // 4 md5: Binary.createFromBase64('c//SZESzTGmQ6OfR38A11A==', 5), // 5 encrypted: Binary.createFromBase64('c//SZESzTGmQ6OfR38A11A==', 6), // 6 - compressedTimeSeries: Binary.createFromBase64('c//SZESzTGmQ6OfR38A11A==', 7), // 7 + compressedTimeSeries: new Binary( + Buffer.from( + 'CQCKW/8XjAEAAIfx//////////H/////////AQAAAAAAAABfAAAAAAAAAAEAAAAAAAAAAgAAAAAAAAAHAAAAAAAAAA4AAAAAAAAAAA==', + 'base64' + ), + 7 + ), // 7 custom: Binary.createFromBase64('//8=', 128) // 128 }, dbRef: new DBRef('namespace', new ObjectId('642d76b4b7ebfab15d3c4a78')) // not actually a separate type, just a convention }; + +const { + dbRef, + ...allValidBSONTypesDoc +} = allBSONTypesDoc; + +// Includes some edge cases like empty objects, nested empty arrays, etc. +export const allValidBSONTypesWithEdgeCasesDoc = { + ...allValidBSONTypesDoc, + emptyObject: {}, + objectWithNestedEmpty: { + nestedEmpty: {} + }, + emptyArray: [], + arrayOfEmptyArrays: [[], []], + infinityNum: Infinity, + negativeInfinityNum: -Infinity, + NaNNum: NaN +}; diff --git a/test/integration/generateAndValidate.test.ts b/test/integration/generateAndValidate.test.ts index 7509d32..acab887 100644 --- a/test/integration/generateAndValidate.test.ts +++ b/test/integration/generateAndValidate.test.ts @@ -1,10 +1,17 @@ -import { analyzeDocuments } from '../../src'; import Ajv2020 from 'ajv/dist/2020'; import assert from 'assert'; -import { ObjectId, Int32, Double, EJSON } from 'bson'; +import { + Double, + Int32, + ObjectId, + EJSON +} from 'bson'; import { MongoClient, type Db } from 'mongodb'; import { mochaTestServer } from '@mongodb-js/compass-test-server'; +import { allValidBSONTypesWithEdgeCasesDoc } from '../all-bson-types-fixture'; +import { analyzeDocuments } from '../../src'; + const bsonDocuments = [{ _id: new ObjectId('67863e82fb817085a6b0ebad'), title: 'My book', @@ -47,74 +54,156 @@ describe('Documents -> Generate schema -> Validate Documents against the schema' }); }); -describe('Documents -> Generate schema -> Use schema in validation rule in MongoDB -> Validate documents against the schema', function() { +describe('With a MongoDB Cluster', function() { let client: MongoClient; let db: Db; const cluster = mochaTestServer(); before(async function() { - // Create the schema validation rule. - const analyzedDocuments = await analyzeDocuments(bsonDocuments); - const schema = await analyzedDocuments.getMongoDBJsonSchema(); - const validationRule = { - $jsonSchema: schema - }; - // Connect to the mongodb instance. const connectionString = cluster().connectionString; client = new MongoClient(connectionString); await client.connect(); db = client.db('test'); - - // Create a collection with the schema validation in Compass. - await db.createCollection('books', { - validator: validationRule - }); }); + after(async function() { await client?.close(); }); - it('allows inserting valid documents', async function() { - await db.collection('books').insertMany(bsonDocuments); - }); + describe('Documents -> Generate basic schema -> Use schema in validation rule in MongoDB -> Validate documents against the schema', function() { + before(async function() { + // Create the schema validation rule. + const analyzedDocuments = await analyzeDocuments(bsonDocuments); + const schema = await analyzedDocuments.getMongoDBJsonSchema(); + const validationRule = { + $jsonSchema: schema + }; + + // Create a collection with the schema validation. + await db.createCollection('books', { + validator: validationRule + }); + }); + + it('allows inserting valid documents', async function() { + await db.collection('books').insertMany(bsonDocuments); + }); + + it('prevents inserting invalid documents', async function() { + const invalidDocs = [{ + _id: new ObjectId('67863e82fb817085a6b0ebba'), + title: 'Pineapple 1', + year: new Int32(1983), + genres: [ + 'crimi', + 'comedy', + { + short: 'scifi', + long: 'science fiction' + } + ], + number: 'an invalid string' + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbb'), + title: 'Pineapple 2', + year: 'year a string' + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbc'), + title: 123, + year: new Int32('1999') + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbc'), + title: 'No year' + }]; - it('prevents inserting invalid documents', async function() { - const invalidDocs = [{ - _id: new ObjectId('67863e82fb817085a6b0ebba'), - title: 'Pineapple 1', - year: new Int32(1983), - genres: [ - 'crimi', - 'comedy', - { - short: 'scifi', - long: 'science fiction' + for (const doc of invalidDocs) { + try { + await db.collection('books').insertOne(doc); + + throw new Error('This should not be reached'); + } catch (e: any) { + const expectedMessage = 'Document failed validation'; + assert.ok(e.message.includes(expectedMessage), `Expected error ${e.message} message to include "${expectedMessage}", doc: ${doc._id}`); } - ], - number: 'an invalid string' - }, { - _id: new ObjectId('67863eacfb817085a6b0ebbb'), - title: 'Pineapple 2', - year: 'year a string' - }, { - _id: new ObjectId('67863eacfb817085a6b0ebbc'), - title: 123, - year: new Int32('1999') - }, { - _id: new ObjectId('67863eacfb817085a6b0ebbc'), - title: 'No year' - }]; - - for (const doc of invalidDocs) { + } + }); + }); + + describe('[All Types] Documents -> Generate basic schema -> Use schema in validation rule in MongoDB -> Validate documents against the schema', function() { + const allTypesCollection = 'allTypes'; + + before(async function() { + await db.collection(allTypesCollection).insertOne(allValidBSONTypesWithEdgeCasesDoc); + const docsFromCollection = await db.collection(allTypesCollection).find({}, { promoteValues: false }).toArray(); + + // Create the schema validation rule. + const analyzedDocuments = await analyzeDocuments(docsFromCollection); + const schema = await analyzedDocuments.getMongoDBJsonSchema(); + const validationRule = { + $jsonSchema: schema + }; + // Update the collection with the schema validation. + await db.command({ + collMod: allTypesCollection, + validator: validationRule + }); + }); + + it('allows inserting valid documents (does not error)', async function() { + const docs = [{ + ...allValidBSONTypesWithEdgeCasesDoc, + _id: new ObjectId() + }, { + ...allValidBSONTypesWithEdgeCasesDoc, + _id: new ObjectId() + }]; + try { - await db.collection('books').insertOne(doc); + await db.collection(allTypesCollection).insertMany(docs); + } catch (err) { + console.error('Error inserting documents', EJSON.stringify(err, undefined, 2)); + throw err; + } + }); + + it('prevents inserting invalid documents', async function() { + const invalidDocs = [{ + _id: new ObjectId('67863e82fb817085a6b0ebba'), + title: 'Pineapple 1', + year: new Int32(1983), + genres: [ + 'crimi', + 'comedy', + { + short: 'scifi', + long: 'science fiction' + } + ], + number: 'an invalid string' + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbb'), + title: 'Pineapple 2', + year: 'year a string' + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbc'), + title: 123, + year: new Int32('1999') + }, { + _id: new ObjectId('67863eacfb817085a6b0ebbc'), + title: 'No year' + }]; + + for (const doc of invalidDocs) { + try { + await db.collection(allTypesCollection).insertOne(doc); - throw new Error('This should not be reached'); - } catch (e: any) { - const expectedMessage = 'Document failed validation'; - assert.ok(e.message.includes(expectedMessage), `Expected error ${e.message} message to include "${expectedMessage}", doc: ${doc._id}`); + throw new Error('This should not be reached'); + } catch (e: any) { + const expectedMessage = 'Document failed validation'; + assert.ok(e.message.includes(expectedMessage), `Expected error ${e.message} message to include "${expectedMessage}", doc: ${doc._id}`); + } } - } + }); }); });