diff --git a/packages/compass-assistant/test/eval-cases/aggregation-pipeline.ts b/packages/compass-assistant/test/eval-cases/aggregation-pipeline.ts index a3d7590bf70..75dacdb827d 100644 --- a/packages/compass-assistant/test/eval-cases/aggregation-pipeline.ts +++ b/packages/compass-assistant/test/eval-cases/aggregation-pipeline.ts @@ -1,36 +1,31 @@ import type { SimpleEvalCase } from '../assistant.eval'; -const evalCases: SimpleEvalCase[] = [ +const aggregationPipelineCases: SimpleEvalCase[] = [ { input: 'What is an aggregation pipeline?', - expected: `The aggregation pipeline in MongoDB is a framework for data processing and transformation. It consists of a sequence of stages, where each stage performs an operation on the input documents and passes the results to the next stage. Common operations include filtering, grouping, projecting, joining, and calculating values. Aggregation pipelines are powerful for data analysis, reporting, and transformation tasks in MongoDB. + expected: `The aggregation pipeline in MongoDB is a framework for data processing +and transformation. It consists of a sequence of stages, where each stage +operates on input documents and passes results to the next stage. Common stages +include $match, $group, $project, $lookup, and $set. Pipelines are useful for +analysis, reporting, and reshaping data. -Compass makes it easy to create and run aggregation pipelines under the Aggregations tab. You may generate an aggregation pipeline with natural language, utilize the visual stage editor, or edit aggregations in the text view. +In Compass, you can build pipelines under the Aggregations tab using natural +language, the visual stage editor, or the text view. -Example aggregation pipeline: +Example: db.orders.aggregate([ - // Stage 1: Unwind the array of products - { $unwind: { path: "$products" } }, - - // Stage 2: Match products that cost more than $15 - { $match: { "products.price": { $gt: 15 } } }, - - // Stage 3: Group products by product ID - { $group: { - _id: "$products.prod_id", - product: { $first: "$products.name" }, - total_value: { $sum: "$products.price" }, - quantity: { $sum: 1 } - } - }, - - // Stage 4: Add a product_id field - { $set: { product_id: "$_id" } }, - - // Stage 5: Remove the _id field - { $unset: ["_id"] } -]) -`, + { $unwind: { path: "$products" } }, + { $match: { "products.price": { $gt: 15 } } }, + { $group: { + _id: "$products.prod_id", + product: { $first: "$products.name" }, + total_value: { $sum: "$products.price" }, + quantity: { $sum: 1 } + } + }, + { $set: { product_id: "$_id" } }, + { $unset: ["_id"] } +])`, expectedSources: [ 'https://www.mongodb.com/docs/manual/core/aggregation-pipeline/', 'https://www.mongodb.com/docs/compass/create-agg-pipeline/', @@ -38,4 +33,4 @@ db.orders.aggregate([ }, ]; -export default evalCases; +export default aggregationPipelineCases; diff --git a/packages/compass-assistant/test/eval-cases/atlas-search.ts b/packages/compass-assistant/test/eval-cases/atlas-search.ts index 75210fb4934..f00a92d59f2 100644 --- a/packages/compass-assistant/test/eval-cases/atlas-search.ts +++ b/packages/compass-assistant/test/eval-cases/atlas-search.ts @@ -1,15 +1,98 @@ import type { SimpleEvalCase } from '../assistant.eval'; -const evalCases: SimpleEvalCase[] = [ +const atlasSearchCases: SimpleEvalCase[] = [ { input: 'How can I filter docs before running a $search query?', - expected: - 'Because the $search stage must be the first stage in an aggregation pipeline, you cannot pre-filter documents with a preceding $match stage. Instead, filtering should be performed within the $search stage using the filter clause of the compound operator. This allows you to apply predicate queries (e.g., on ranges, dates, or specific terms) to narrow down the dataset before the main query clauses (must or should) are executed. Alternatively, you can filter documents by creating a View—a partial index of your collection that pre-queries and filters out unwanted documents. Note that users need createCollection privileges to build views.', + expected: `The $search stage must be first in the pipeline, so you cannot +pre-filter with a preceding $match. Instead, add filtering to your $search +using the compound operator's filter clause to narrow the dataset. +Alternatively, build a View that pre-filters documents (requires +createCollection privileges).`, expectedSources: [ 'https://www.mongodb.com/docs/atlas/atlas-search/compound/#options', 'https://www.mongodb.com/docs/atlas/atlas-search/transform-documents-collections/#example--filter-documents', ], }, + { + input: 'What is the $search stage?', + expected: `$search is part of Atlas Search and integrates with aggregation as the +first stage. It supports full-text and rich search expressions over text, +numeric, date, and more, and works with subsequent pipeline stages for +transformation.`, + expectedSources: [ + 'https://www.mongodb.com/docs/atlas/atlas-search/#what-is-fts-', + ], + }, + { + input: 'Can $search work with regular MongoDB indexes?', + expected: `No. $search requires an Atlas Search Index. Standard MongoDB indexes are +not used by $search operations.`, + expectedSources: [ + 'https://www.mongodb.com/docs/atlas/atlas-search/index-definitions/#index-reference', + ], + }, + { + input: 'How do I sort $search results?', + expected: `Use the sort parameter inside $search. Numeric, date, ObjectId, boolean, +and UUID types support sorting directly. For strings, map the field as token +in the Atlas Search Index to enable proper sorting.`, + expectedSources: [ + 'https://www.mongodb.com/docs/atlas/atlas-search/sort/#sort-fts-results', + ], + }, + { + input: 'What is token type mapping in Atlas Search and why use it?', + expected: `Mapping a field as token indexes it as a single term with no tokenization +or lowercasing. This is useful for sorting, faceting, and exact matching on +strings. If you need case-insensitive behavior, apply a lowercase normalizer.`, + expectedSources: [ + 'https://www.mongodb.com/docs/atlas/atlas-search/field-types/token-type/#how-to-index-string-fields-for-efficient-sorting-and-faceting', + ], + }, + { + input: 'Can I add fuzzy matching to $search?', + expected: `Yes. In a text or autocomplete query, add a fuzzy object with maxEdits to +allow close matches (insertions, deletions, substitutions). Example: set +maxEdits: 2 to tolerate up to two single-character edits.`, + expectedSources: [ + 'https://www.mongodb.com/docs/atlas/atlas-search/text/#text-operator', + ], + }, + { + input: 'How do I combine multiple conditions in one $search query?', + expected: `Use the compound operator. must is an AND, should boosts relevancy for +preferred matches, and filter applies constraints without affecting score. +You can mix text queries with range filters for dates or numbers.`, + expectedSources: [ + 'https://www.mongodb.com/docs/atlas/atlas-search/compound/#compound-operator', + ], + }, + { + input: 'Does $search support wildcard or regex?', + expected: `Yes. Use wildcard for simple patterns with * and ?. Use regex for +Lucene-style regular expressions. For substring search at scale, prefer +autocomplete, weighing performance vs. storage tradeoffs.`, + expectedSources: [ + 'https://www.mongodb.com/docs/atlas/atlas-search/wildcard/#wildcard-operator', + ], + }, + { + input: 'How can I highlight matched terms in $search results?', + expected: `Add a highlight object to $search and specify the field path. Results +include a highlights array with snippets and tags around matched terms.`, + expectedSources: [ + 'https://www.mongodb.com/docs/atlas/atlas-search/highlighting/#highlight-search-terms-in-results', + ], + }, + { + input: 'What is the difference between $search and $searchMeta?', + expected: `$search returns matching documents. $searchMeta returns only metadata +such as total counts or facets. If you need both docs and metadata, you can +read $$SEARCH_META in a later stage after $search.`, + expectedSources: [ + 'https://www.mongodb.com/docs/atlas/atlas-search/query-syntax/#choose-the-aggregation-pipeline-stage', + ], + }, ]; -export default evalCases; +export default atlasSearchCases; diff --git a/packages/compass-assistant/test/eval-cases/connection-errors.ts b/packages/compass-assistant/test/eval-cases/connection-errors.ts new file mode 100644 index 00000000000..b1c1f73bb9c --- /dev/null +++ b/packages/compass-assistant/test/eval-cases/connection-errors.ts @@ -0,0 +1,69 @@ +import type { SimpleEvalCase } from '../assistant.eval'; + +const connectionErrorCases: SimpleEvalCase[] = [ + { + input: + 'Connection failed in Compass: Authentication failed using mongodb+srv. ' + + 'What should I check?', + expected: `Start with the basics: verify username and password, confirm the +database user exists, and ensure the Authentication Database and mechanism +match your server setup. Check whether the selected auth mechanism is +supported. If this is an Atlas deployment, open the Connect modal for +connection details and code snippets. Review Compass logs for more specific +errors and, after you succeed, save the connection as a Favorite for next +time.`, + expectedSources: [ + 'https://www.mongodb.com/docs/compass/current/troubleshooting/connection-errors', + 'https://www.mongodb.com/docs/atlas/troubleshoot-connection', + 'https://www.mongodb.com/docs/atlas/compass-connection', + 'https://www.mongodb.com/docs/compass/current/connect', + 'https://www.mongodb.com/docs/compass/current/troubleshooting/logs/', + ], + }, + { + input: + 'Compass shows querySrv ENOTFOUND for _mongodb._tcp.. How do I ' + + 'fix DNS issues?', + expected: `This indicates a DNS resolution problem. Double-check the cluster +address. From a terminal, run nslookup on the SRV record; if it fails, your +DNS may be blocking or unable to resolve the address. Ensure DNS TXT results +are allowed. Confirm the cluster still exists and isn't paused, verify your +internet connection, and check Compass logs for details.`, + expectedSources: [ + 'https://www.mongodb.com/docs/compass/current/troubleshooting/connection-errors', + 'https://www.mongodb.com/docs/atlas/troubleshoot-connection', + 'https://www.mongodb.com/docs/atlas/compass-connection', + 'https://www.mongodb.com/docs/compass/current/connect', + 'https://www.mongodb.com/docs/compass/current/troubleshooting/logs', + ], + }, + { + input: 'connect ENETUNREACH when connecting from Compass', + expected: `The destination network is unreachable. Check the Compass log, verify +your internet connection, and confirm the cluster address. Inspect VPN, +firewall, and network settings that may block outbound requests. Ensure the +target cluster exists and isn't paused. Logs often include details to narrow +down the failure.`, + expectedSources: [ + 'https://www.mongodb.com/docs/compass/current/troubleshooting/logs/', + 'https://www.mongodb.com/docs/compass/current/troubleshooting/connection-errors', + ], + }, + { + input: 'querySrv ECONNREFUSED with mongodb+srv. Any workaround?', + expected: `This may be a DNS or driver issue. In Atlas, find your legacy +connection string (mongodb://) and try connecting with it. If that works, +upgrade to the latest Compass and share what works vs. what fails with your +network admin. If it doesn't work, follow the next error's troubleshooting +steps as the message will likely change.`, + expectedSources: [ + 'https://www.mongodb.com/docs/compass/current/troubleshooting/connection-errors', + 'https://www.mongodb.com/docs/atlas/troubleshoot-connection', + 'https://www.mongodb.com/docs/atlas/compass-connection', + 'https://www.mongodb.com/docs/compass/current/connect', + 'https://www.mongodb.com/docs/compass/current/troubleshooting/logs', + ], + }, +]; + +export default connectionErrorCases; diff --git a/packages/compass-assistant/test/eval-cases/data-modeling.ts b/packages/compass-assistant/test/eval-cases/data-modeling.ts new file mode 100644 index 00000000000..3ea75f47285 --- /dev/null +++ b/packages/compass-assistant/test/eval-cases/data-modeling.ts @@ -0,0 +1,40 @@ +import type { SimpleEvalCase } from '../assistant.eval'; + +const dataModelingCases: SimpleEvalCase[] = [ + { + input: 'How do I model data with MongoDB?', + expected: `Start with workload analysis: identify frequent operations. Map +relationships and decide whether to embed or reference. Apply schema design +patterns to optimize reads and writes. Finally, create indexes to support +common query patterns. Planning ahead helps ensure performance and consistency +as you scale.`, + expectedSources: [ + 'https://www.mongodb.com/docs/manual/data-modeling/#plan-your-schema', + 'https://www.mongodb.com/docs/manual/data-modeling/schema-design-process/#designing-your-schema', + ], + }, + { + input: 'Is MongoDB schemaless?', + expected: `No. MongoDB uses a flexible schema. Documents in a collection need not +share identical fields or types, but most follow a similar structure. You can +enforce consistency with JSON Schema validation rules in Compass or via +collMod.`, + expectedSources: [ + 'https://www.mongodb.com/docs/manual/data-modeling/#data-modeling', + 'https://www.mongodb.com/docs/manual/core/schema-validation/specify-validation-level/', + 'https://www.mongodb.com/docs/compass/validation/', + ], + }, + { + input: 'Should I embed related data or put it in a new collection?', + expected: `Embed when it simplifies code and the data has a contains or has-a +relationship, is read together, updated together, or archived together. Use +references for frequently changing subdocuments, many-to-many or large +hierarchies, or when the subdocument is often queried on its own.`, + expectedSources: [ + 'https://www.mongodb.com/docs/manual/data-modeling/concepts/embedding-vs-references/', + ], + }, +]; + +export default dataModelingCases; diff --git a/packages/compass-assistant/test/eval-cases/explain-plan.ts b/packages/compass-assistant/test/eval-cases/explain-plan.ts new file mode 100644 index 00000000000..0bb15dcb49a --- /dev/null +++ b/packages/compass-assistant/test/eval-cases/explain-plan.ts @@ -0,0 +1,42 @@ +import type { SimpleEvalCase } from '../assistant.eval'; + +const explainPlanCases: SimpleEvalCase[] = [ + { + input: + 'Explain this pipeline plan: $match { cuisine: "Italian" } then group by ' + + 'borough. Why is it slow and how to optimize?', + expected: `The plan shows an IXSCAN on cuisine_-1, followed by a FETCH of full +documents because borough is not in the index. That causes 43,207 documents to +be read and grouped, taking about 1s. To avoid the FETCH and lower I/O, create +a compound index on { cuisine: 1, borough: 1 } (cuisine first). With a +compound index, queries that only filter on cuisine can still use the index +prefix, so a separate single-field index on cuisine is usually redundant. +Balance the benefits against index storage and write overhead before creating +it.`, + expectedSources: [ + 'https://www.mongodb.com/docs/manual/tutorial/analyze-query-plan/', + 'https://www.mongodb.com/docs/manual/reference/explain-results', + 'https://www.mongodb.com/docs/manual/faq/indexes', + 'https://www.mongodb.com/docs/manual/tutorial/equality-sort-range-guideline/#std-label-esr-indexing-guideline', + 'https://www.mongodb.com/docs/manual/core/indexes/index-types/index-compound/#index-prefixes', + ], + }, + { + input: + 'Explain plan shows COLLSCAN on category = "low_rated" and ~1M docs ' + + 'examined. What should I do?', + expected: `A COLLSCAN means the query read the entire collection to find matches, +which does not scale. Create an index on category so the planner can perform +an IXSCAN. Pros: much faster reads and better scalability. Cons: indexes add +storage and introduce write overhead because updates must maintain the index. +In Compass, open the collection, go to Indexes, click Create, add category, +and create the index. Re-run explain to confirm IXSCAN.`, + expectedSources: [ + 'https://www.mongodb.com/docs/manual/tutorial/analyze-query-plan/', + 'https://www.mongodb.com/docs/manual/reference/explain-results', + 'https://www.mongodb.com/docs/manual/faq/indexes', + ], + }, +]; + +export default explainPlanCases; diff --git a/packages/compass-assistant/test/eval-cases/index.ts b/packages/compass-assistant/test/eval-cases/index.ts index 03270a041db..1b3a0659018 100644 --- a/packages/compass-assistant/test/eval-cases/index.ts +++ b/packages/compass-assistant/test/eval-cases/index.ts @@ -1,10 +1,15 @@ import type { SimpleEvalCase } from '../assistant.eval'; -import atlasSearch from './atlas-search'; -import aggregationPipeline from './aggregation-pipeline'; -import modelData from './model-data'; - +import aggregationPipelineCases from './aggregation-pipeline'; +import connectionErrorCases from './connection-errors'; +import oidcAuthCases from './oidc-auth'; +import explainPlanCases from './explain-plan'; +import atlasSearchCases from './atlas-search'; +import dataModelingCases from './data-modeling'; export const evalCases: SimpleEvalCase[] = [ - ...atlasSearch, - ...aggregationPipeline, - ...modelData, + ...aggregationPipelineCases, + ...connectionErrorCases, + ...oidcAuthCases, + ...explainPlanCases, + ...atlasSearchCases, + ...dataModelingCases, ]; diff --git a/packages/compass-assistant/test/eval-cases/model-data.ts b/packages/compass-assistant/test/eval-cases/model-data.ts deleted file mode 100644 index 44d71af6da5..00000000000 --- a/packages/compass-assistant/test/eval-cases/model-data.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { SimpleEvalCase } from '../assistant.eval'; - -const evalCases: SimpleEvalCase[] = [ - { - input: 'How do I model data with MongoDB?', - expected: `Data modeling in MongoDB is highly dependent on how you access your data. To ensure that your data model has a logical structure and achieves optimal performance, plan your schema prior to using your database at a production scale. To determine your data model, use the following schema design process: - -Identify your workload: Identify the operations that your application runs most frequently -Map relationships: Identify the relationships in your application's data and decide whether to link or embed related data. -Apply design patterns: Apply schema design patterns to optimize reads and writes. -Create indexes: Create indexes to support common query patterns. -`, - expectedSources: [ - 'https://www.mongodb.com/docs/manual/data-modeling/#plan-your-schema', - 'https://www.mongodb.com/docs/manual/data-modeling/schema-design-process/#designing-your-schema', - ], - }, -]; - -export default evalCases; diff --git a/packages/compass-assistant/test/eval-cases/oidc-auth.ts b/packages/compass-assistant/test/eval-cases/oidc-auth.ts new file mode 100644 index 00000000000..57977c30838 --- /dev/null +++ b/packages/compass-assistant/test/eval-cases/oidc-auth.ts @@ -0,0 +1,30 @@ +import type { SimpleEvalCase } from '../assistant.eval'; + +const oidcAuthCases: SimpleEvalCase[] = [ + { + input: + 'OIDC auth: authenticated with IdP but Compass says Authentication ' + + 'failed on server', + expected: `You likely authenticated with your identity provider, but the MongoDB +server rejected the request due to configuration issues. Check the Compass +log. Try connecting with mongosh. Collect tokens using: mongosh +--oidcDumpTokens . Share logs, the dump output, timestamps, +and your Compass and mongosh versions with the cluster administrator to fix +the server configuration.`, + expectedSources: [ + 'https://www.mongodb.com/docs/compass/current/troubleshooting/logs/', + ], + }, + { + input: 'Compass hangs then browser shows 400 Bad Request during OIDC login', + expected: `This suggests a mismatch between IdP configuration and MongoDB server +OIDC settings. Check the Compass log, test connectivity with mongosh, and +contact the cluster administrator with logs, times, and versions to correct +the IdP and server configuration.`, + expectedSources: [ + 'https://www.mongodb.com/docs/compass/current/troubleshooting/logs/', + ], + }, +]; + +export default oidcAuthCases;