diff --git a/scripts/accuracy/generateTestSummary.ts b/scripts/accuracy/generateTestSummary.ts index dc451caf..f328576a 100644 --- a/scripts/accuracy/generateTestSummary.ts +++ b/scripts/accuracy/generateTestSummary.ts @@ -230,6 +230,7 @@ function generateMarkdownBrief( markdownTexts.push( ...[ "## 📊 Baseline Comparison", + "| Metric | Value |", "|--------|-------|", `| **Baseline Commit** | \`${baselineInfo.commitSHA}\` |`, `| **Baseline Run ID** | \`${baselineInfo.accuracyRunId}\` |`, diff --git a/src/common/config.ts b/src/common/config.ts index cfcffb3d..2367a3ad 100644 --- a/src/common/config.ts +++ b/src/common/config.ts @@ -19,6 +19,9 @@ export interface UserConfig { apiClientSecret?: string; telemetry: "enabled" | "disabled"; logPath: string; + exportsPath: string; + exportTimeoutMs: number; + exportCleanupIntervalMs: number; connectionString?: string; connectOptions: ConnectOptions; disabledTools: Array; @@ -35,6 +38,9 @@ export interface UserConfig { const defaults: UserConfig = { apiBaseUrl: "https://cloud.mongodb.com/", logPath: getLogPath(), + exportsPath: getExportsPath(), + exportTimeoutMs: 300000, // 5 minutes + exportCleanupIntervalMs: 120000, // 2 minutes connectOptions: { readConcern: "local", readPreference: "secondaryPreferred", @@ -59,17 +65,21 @@ export const config = { ...getCliConfig(), }; -function getLogPath(): string { - const localDataPath = - process.platform === "win32" - ? path.join(process.env.LOCALAPPDATA || process.env.APPDATA || os.homedir(), "mongodb") - : path.join(os.homedir(), ".mongodb"); - - const logPath = path.join(localDataPath, "mongodb-mcp", ".app-logs"); +function getLocalDataPath(): string { + return process.platform === "win32" + ? path.join(process.env.LOCALAPPDATA || process.env.APPDATA || os.homedir(), "mongodb") + : path.join(os.homedir(), ".mongodb"); +} +function getLogPath(): string { + const logPath = path.join(getLocalDataPath(), "mongodb-mcp", ".app-logs"); return logPath; } +function getExportsPath(): string { + return path.join(getLocalDataPath(), "mongodb-mcp", "exports"); +} + // Gets the config supplied by the user as environment variables. The variable names // are prefixed with `MDB_MCP_` and the keys match the UserConfig keys, but are converted // to SNAKE_UPPER_CASE. diff --git a/src/common/exportsManager.ts b/src/common/exportsManager.ts new file mode 100644 index 00000000..38420309 --- /dev/null +++ b/src/common/exportsManager.ts @@ -0,0 +1,392 @@ +import z from "zod"; +import path from "path"; +import fs from "fs/promises"; +import EventEmitter from "events"; +import { createWriteStream } from "fs"; +import { FindCursor } from "mongodb"; +import { EJSON, EJSONOptions, ObjectId } from "bson"; +import { Transform } from "stream"; +import { pipeline } from "stream/promises"; +import { MongoLogId } from "mongodb-log-writer"; + +import { UserConfig } from "./config.js"; +import { LoggerBase, LogId } from "./logger.js"; + +export const jsonExportFormat = z.enum(["relaxed", "canonical"]); +export type JSONExportFormat = z.infer; + +interface CommonExportData { + exportName: string; + exportTitle: string; + exportURI: string; + exportPath: string; +} + +interface ReadyExport extends CommonExportData { + exportStatus: "ready"; + exportCreatedAt: number; +} + +interface InProgressExport extends CommonExportData { + exportStatus: "in-progress"; +} + +type StoredExport = ReadyExport | InProgressExport; + +/** + * Ideally just exportName and exportURI should be made publicly available but + * we also make exportPath available because the export tool, also returns the + * exportPath in its response when the MCP server is running connected to stdio + * transport. The reasoning behind this is that a few clients, Cursor in + * particular, as of the date of this writing (7 August 2025) cannot refer to + * resource URIs which means they have no means to access the exported resource. + * As of this writing, majority of the usage of our MCP server is behind STDIO + * transport so we can assume that for most of the usages, if not all, the MCP + * server will be running on the same machine as of the MCP client and thus we + * can provide the local path to export so that these clients which do not still + * support parsing resource URIs, can still work with the exported data. We + * expect for clients to catch up and implement referencing resource URIs at + * which point it would be safe to remove the `exportPath` from the publicly + * exposed properties of an export. + * + * The editors that we would like to watch out for are Cursor and Windsurf as + * they don't yet support working with Resource URIs. + * + * Ref Cursor: https://forum.cursor.com/t/cursor-mcp-resource-feature-support/50987 + * JIRA: https://jira.mongodb.org/browse/MCP-104 */ +type AvailableExport = Pick; + +export type ExportsManagerConfig = Pick; + +type ExportsManagerEvents = { + closed: []; + "export-expired": [string]; + "export-available": [string]; +}; + +export class ExportsManager extends EventEmitter { + private storedExports: Record = {}; + private exportsCleanupInProgress: boolean = false; + private exportsCleanupInterval?: NodeJS.Timeout; + private readonly shutdownController: AbortController = new AbortController(); + + private constructor( + private readonly exportsDirectoryPath: string, + private readonly config: ExportsManagerConfig, + private readonly logger: LoggerBase + ) { + super(); + } + + public get availableExports(): AvailableExport[] { + this.assertIsNotShuttingDown(); + return Object.values(this.storedExports) + .filter((storedExport) => { + return ( + storedExport.exportStatus === "ready" && + !isExportExpired(storedExport.exportCreatedAt, this.config.exportTimeoutMs) + ); + }) + .map(({ exportName, exportTitle, exportURI, exportPath }) => ({ + exportName, + exportTitle, + exportURI, + exportPath, + })); + } + + protected init(): void { + if (!this.exportsCleanupInterval) { + this.exportsCleanupInterval = setInterval( + () => void this.cleanupExpiredExports(), + this.config.exportCleanupIntervalMs + ); + } + } + + public async close(): Promise { + if (this.shutdownController.signal.aborted) { + return; + } + try { + clearInterval(this.exportsCleanupInterval); + this.shutdownController.abort(); + await fs.rm(this.exportsDirectoryPath, { force: true, recursive: true }); + this.emit("closed"); + } catch (error) { + this.logger.error({ + id: LogId.exportCloseError, + context: "Error while closing ExportsManager", + message: error instanceof Error ? error.message : String(error), + }); + } + } + + public async readExport(exportName: string): Promise { + try { + this.assertIsNotShuttingDown(); + exportName = decodeURIComponent(exportName); + const exportHandle = this.storedExports[exportName]; + if (!exportHandle) { + throw new Error("Requested export has either expired or does not exist."); + } + + if (exportHandle.exportStatus === "in-progress") { + throw new Error("Requested export is still being generated. Try again later."); + } + + const { exportPath } = exportHandle; + + return fs.readFile(exportPath, { encoding: "utf8", signal: this.shutdownController.signal }); + } catch (error) { + this.logger.error({ + id: LogId.exportReadError, + context: `Error when reading export - ${exportName}`, + message: error instanceof Error ? error.message : String(error), + }); + throw error; + } + } + + public async createJSONExport({ + input, + exportName, + exportTitle, + jsonExportFormat, + }: { + input: FindCursor; + exportName: string; + exportTitle: string; + jsonExportFormat: JSONExportFormat; + }): Promise { + try { + this.assertIsNotShuttingDown(); + const exportNameWithExtension = validateExportName(ensureExtension(exportName, "json")); + if (this.storedExports[exportNameWithExtension]) { + return Promise.reject( + new Error("Export with same name is either already available or being generated.") + ); + } + const exportURI = `exported-data://${encodeURIComponent(exportNameWithExtension)}`; + const exportFilePath = path.join(this.exportsDirectoryPath, exportNameWithExtension); + const inProgressExport: InProgressExport = (this.storedExports[exportNameWithExtension] = { + exportName: exportNameWithExtension, + exportTitle, + exportPath: exportFilePath, + exportURI: exportURI, + exportStatus: "in-progress", + }); + + void this.startExport({ input, jsonExportFormat, inProgressExport }); + return Promise.resolve(inProgressExport); + } catch (error) { + this.logger.error({ + id: LogId.exportCreationError, + context: "Error when registering JSON export request", + message: error instanceof Error ? error.message : String(error), + }); + throw error; + } + } + + private async startExport({ + input, + jsonExportFormat, + inProgressExport, + }: { + input: FindCursor; + jsonExportFormat: JSONExportFormat; + inProgressExport: InProgressExport; + }): Promise { + try { + let pipeSuccessful = false; + try { + await fs.mkdir(this.exportsDirectoryPath, { recursive: true }); + const outputStream = createWriteStream(inProgressExport.exportPath); + await pipeline( + [ + input.stream(), + this.docToEJSONStream(this.getEJSONOptionsForFormat(jsonExportFormat)), + outputStream, + ], + { signal: this.shutdownController.signal } + ); + pipeSuccessful = true; + } catch (error) { + // If the pipeline errors out then we might end up with + // partial and incorrect export so we remove it entirely. + delete this.storedExports[inProgressExport.exportName]; + // do not block the user, just delete the file in the background + void this.silentlyRemoveExport( + inProgressExport.exportPath, + LogId.exportCreationCleanupError, + `Error when removing incomplete export ${inProgressExport.exportName}` + ); + throw error; + } finally { + if (pipeSuccessful) { + this.storedExports[inProgressExport.exportName] = { + ...inProgressExport, + exportCreatedAt: Date.now(), + exportStatus: "ready", + }; + this.emit("export-available", inProgressExport.exportURI); + } + void input.close(); + } + } catch (error) { + this.logger.error({ + id: LogId.exportCreationError, + context: `Error when generating JSON export for ${inProgressExport.exportName}`, + message: error instanceof Error ? error.message : String(error), + }); + } + } + + private getEJSONOptionsForFormat(format: JSONExportFormat): EJSONOptions | undefined { + switch (format) { + case "relaxed": + return { relaxed: true }; + case "canonical": + return { relaxed: false }; + default: + return undefined; + } + } + + private docToEJSONStream(ejsonOptions: EJSONOptions | undefined): Transform { + let docsTransformed = 0; + return new Transform({ + objectMode: true, + transform(chunk: unknown, encoding, callback): void { + try { + const doc = EJSON.stringify(chunk, undefined, undefined, ejsonOptions); + if (docsTransformed === 0) { + this.push("[" + doc); + } else { + this.push(",\n" + doc); + } + docsTransformed++; + callback(); + } catch (err) { + callback(err as Error); + } + }, + flush(callback): void { + if (docsTransformed === 0) { + this.push("[]"); + } else { + this.push("]"); + } + callback(); + }, + }); + } + + private async cleanupExpiredExports(): Promise { + if (this.exportsCleanupInProgress) { + return; + } + + this.exportsCleanupInProgress = true; + try { + // first, unregister all exports that are expired, so they are not considered anymore for reading + const exportsForCleanup: ReadyExport[] = []; + for (const expiredExport of Object.values(this.storedExports)) { + if ( + expiredExport.exportStatus === "ready" && + isExportExpired(expiredExport.exportCreatedAt, this.config.exportTimeoutMs) + ) { + exportsForCleanup.push(expiredExport); + delete this.storedExports[expiredExport.exportName]; + } + } + + // and then remove them (slow operation potentially) from disk. + const allDeletionPromises: Promise[] = []; + for (const { exportPath, exportName } of exportsForCleanup) { + allDeletionPromises.push( + this.silentlyRemoveExport( + exportPath, + LogId.exportCleanupError, + `Considerable error when removing export ${exportName}` + ) + ); + } + + await Promise.allSettled(allDeletionPromises); + } catch (error) { + this.logger.error({ + id: LogId.exportCleanupError, + context: "Error when cleaning up exports", + message: error instanceof Error ? error.message : String(error), + }); + } finally { + this.exportsCleanupInProgress = false; + } + } + + private async silentlyRemoveExport(exportPath: string, logId: MongoLogId, logContext: string): Promise { + try { + await fs.unlink(exportPath); + } catch (error) { + // If the file does not exist or the containing directory itself + // does not exist then we can safely ignore that error anything else + // we need to flag. + if ((error as NodeJS.ErrnoException).code !== "ENOENT") { + this.logger.error({ + id: logId, + context: logContext, + message: error instanceof Error ? error.message : String(error), + }); + } + } + } + + private assertIsNotShuttingDown(): void { + if (this.shutdownController.signal.aborted) { + throw new Error("ExportsManager is shutting down."); + } + } + + static init( + config: ExportsManagerConfig, + logger: LoggerBase, + sessionId = new ObjectId().toString() + ): ExportsManager { + const exportsDirectoryPath = path.join(config.exportsPath, sessionId); + const exportsManager = new ExportsManager(exportsDirectoryPath, config, logger); + exportsManager.init(); + return exportsManager; + } +} + +/** + * Ensures the path ends with the provided extension */ +export function ensureExtension(pathOrName: string, extension: string): string { + const extWithDot = extension.startsWith(".") ? extension : `.${extension}`; + if (pathOrName.endsWith(extWithDot)) { + return pathOrName; + } + return `${pathOrName}${extWithDot}`; +} + +/** + * Small utility to decoding and validating provided export name for path + * traversal or no extension */ +export function validateExportName(nameWithExtension: string): string { + const decodedName = decodeURIComponent(nameWithExtension); + if (!path.extname(decodedName)) { + throw new Error("Provided export name has no extension"); + } + + if (decodedName.includes("..") || decodedName.includes("/") || decodedName.includes("\\")) { + throw new Error("Invalid export name: path traversal hinted"); + } + + return decodedName; +} + +export function isExportExpired(createdAt: number, exportTimeoutMs: number): boolean { + return Date.now() - createdAt > exportTimeoutMs; +} diff --git a/src/common/logger.ts b/src/common/logger.ts index 90bf97be..0add105c 100644 --- a/src/common/logger.ts +++ b/src/common/logger.ts @@ -49,6 +49,15 @@ export const LogId = { streamableHttpTransportSessionCloseNotificationFailure: mongoLogId(1_006_004), streamableHttpTransportRequestFailure: mongoLogId(1_006_005), streamableHttpTransportCloseFailure: mongoLogId(1_006_006), + + exportCleanupError: mongoLogId(1_007_001), + exportCreationError: mongoLogId(1_007_002), + exportCreationCleanupError: mongoLogId(1_007_003), + exportReadError: mongoLogId(1_007_004), + exportCloseError: mongoLogId(1_007_005), + exportedDataListError: mongoLogId(1_007_006), + exportedDataAutoCompleteError: mongoLogId(1_007_007), + exportLockError: mongoLogId(1_007_008), } as const; interface LogPayload { diff --git a/src/common/session.ts b/src/common/session.ts index 11d6e12b..444a747b 100644 --- a/src/common/session.ts +++ b/src/common/session.ts @@ -1,3 +1,4 @@ +import { ObjectId } from "bson"; import { ApiClient, ApiClientCredentials } from "./atlas/apiClient.js"; import { Implementation } from "@modelcontextprotocol/sdk/types.js"; import { CompositeLogger, LogId } from "./logger.js"; @@ -10,13 +11,15 @@ import { } from "./connectionManager.js"; import { NodeDriverServiceProvider } from "@mongosh/service-provider-node-driver"; import { ErrorCodes, MongoDBError } from "./errors.js"; +import { ExportsManager } from "./exportsManager.js"; export interface SessionOptions { apiBaseUrl: string; apiClientId?: string; apiClientSecret?: string; - connectionManager?: ConnectionManager; logger: CompositeLogger; + exportsManager: ExportsManager; + connectionManager: ConnectionManager; } export type SessionEvents = { @@ -27,9 +30,10 @@ export type SessionEvents = { }; export class Session extends EventEmitter { - sessionId?: string; - connectionManager: ConnectionManager; - apiClient: ApiClient; + readonly sessionId: string = new ObjectId().toString(); + readonly exportsManager: ExportsManager; + readonly connectionManager: ConnectionManager; + readonly apiClient: ApiClient; agentRunner?: { name: string; version: string; @@ -37,11 +41,17 @@ export class Session extends EventEmitter { public logger: CompositeLogger; - constructor({ apiBaseUrl, apiClientId, apiClientSecret, connectionManager, logger }: SessionOptions) { + constructor({ + apiBaseUrl, + apiClientId, + apiClientSecret, + logger, + connectionManager, + exportsManager, + }: SessionOptions) { super(); this.logger = logger; - const credentials: ApiClientCredentials | undefined = apiClientId && apiClientSecret ? { @@ -51,8 +61,8 @@ export class Session extends EventEmitter { : undefined; this.apiClient = new ApiClient({ baseUrl: apiBaseUrl, credentials }, logger); - - this.connectionManager = connectionManager ?? new ConnectionManager(); + this.exportsManager = exportsManager; + this.connectionManager = connectionManager; this.connectionManager.on("connection-succeeded", () => this.emit("connect")); this.connectionManager.on("connection-timed-out", (error) => this.emit("connection-error", error.errorReason)); this.connectionManager.on("connection-closed", () => this.emit("disconnect")); @@ -107,6 +117,7 @@ export class Session extends EventEmitter { async close(): Promise { await this.disconnect(); await this.apiClient.close(); + await this.exportsManager.close(); this.emit("close"); } diff --git a/src/resources/common/config.ts b/src/resources/common/config.ts index 2bd8a8aa..5a0570d4 100644 --- a/src/resources/common/config.ts +++ b/src/resources/common/config.ts @@ -1,13 +1,12 @@ import { ReactiveResource } from "../resource.js"; -import { config } from "../../common/config.js"; import type { UserConfig } from "../../common/config.js"; -import type { Server } from "../../server.js"; import type { Telemetry } from "../../telemetry/telemetry.js"; +import type { Session } from "../../lib.js"; export class ConfigResource extends ReactiveResource { - constructor(server: Server, telemetry: Telemetry) { - super( - { + constructor(session: Session, config: UserConfig, telemetry: Telemetry) { + super({ + resourceConfiguration: { name: "config", uri: "config://config", config: { @@ -15,13 +14,14 @@ export class ConfigResource extends ReactiveResource { "Server configuration, supplied by the user either as environment variables or as startup arguments", }, }, - { + options: { initial: { ...config }, events: [], }, - server, - telemetry - ); + session, + config, + telemetry, + }); } reduce(eventName: undefined, event: undefined): UserConfig { void eventName; diff --git a/src/resources/common/debug.ts b/src/resources/common/debug.ts index bf6cf5f5..40be3fc0 100644 --- a/src/resources/common/debug.ts +++ b/src/resources/common/debug.ts @@ -1,6 +1,6 @@ import { ReactiveResource } from "../resource.js"; -import type { Server } from "../../server.js"; import type { Telemetry } from "../../telemetry/telemetry.js"; +import { Session, UserConfig } from "../../lib.js"; type ConnectionStateDebuggingInformation = { readonly tag: "connected" | "connecting" | "disconnected" | "errored"; @@ -14,9 +14,9 @@ export class DebugResource extends ReactiveResource< ConnectionStateDebuggingInformation, readonly ["connect", "disconnect", "close", "connection-error"] > { - constructor(server: Server, telemetry: Telemetry) { - super( - { + constructor(session: Session, config: UserConfig, telemetry: Telemetry) { + super({ + resourceConfiguration: { name: "debug-mongodb", uri: "debug://mongodb", config: { @@ -24,13 +24,14 @@ export class DebugResource extends ReactiveResource< "Debugging information for MongoDB connectivity issues. Tracks the last connectivity error and attempt information.", }, }, - { + options: { initial: { tag: "disconnected" }, events: ["connect", "disconnect", "close", "connection-error"], }, - server, - telemetry - ); + session, + config, + telemetry, + }); } reduce( eventName: "connect" | "disconnect" | "close" | "connection-error", diff --git a/src/resources/common/exportedData.ts b/src/resources/common/exportedData.ts new file mode 100644 index 00000000..0c81f35e --- /dev/null +++ b/src/resources/common/exportedData.ts @@ -0,0 +1,116 @@ +import { + CompleteResourceTemplateCallback, + ListResourcesCallback, + ReadResourceTemplateCallback, + ResourceTemplate, +} from "@modelcontextprotocol/sdk/server/mcp.js"; +import { Server } from "../../server.js"; +import { LogId } from "../../common/logger.js"; +import { Session } from "../../common/session.js"; + +export class ExportedData { + private readonly name = "exported-data"; + private readonly description = "Data files exported in the current session."; + private readonly uri = "exported-data://{exportName}"; + private server?: Server; + + constructor(private readonly session: Session) {} + + public register(server: Server): void { + this.server = server; + this.server.mcpServer.registerResource( + this.name, + new ResourceTemplate(this.uri, { + /** + * A few clients have the capability of listing templated + * resources as well and this callback provides support for that + * */ + list: this.listResourcesCallback, + /** + * This is to provide auto completion when user starts typing in + * value for template variable, in our case, exportName */ + complete: { + exportName: this.autoCompleteExportName, + }, + }), + { description: this.description }, + this.readResourceCallback + ); + this.session.exportsManager.on("export-available", (uri: string): void => { + server.sendResourceListChanged(); + server.sendResourceUpdated(uri); + }); + this.session.exportsManager.on("export-expired", (): void => { + server.sendResourceListChanged(); + }); + } + + private listResourcesCallback: ListResourcesCallback = () => { + try { + return { + resources: this.session.exportsManager.availableExports.map( + ({ exportName, exportTitle, exportURI }) => ({ + name: exportName, + description: exportTitle, + uri: exportURI, + mimeType: "application/json", + }) + ), + }; + } catch (error) { + this.session.logger.error({ + id: LogId.exportedDataListError, + context: "Error when listing exported data resources", + message: error instanceof Error ? error.message : String(error), + }); + return { + resources: [], + }; + } + }; + + private autoCompleteExportName: CompleteResourceTemplateCallback = (value) => { + try { + return this.session.exportsManager.availableExports + .filter(({ exportName }) => exportName.startsWith(value)) + .map(({ exportName }) => exportName); + } catch (error) { + this.session.logger.error({ + id: LogId.exportedDataAutoCompleteError, + context: "Error when autocompleting exported data", + message: error instanceof Error ? error.message : String(error), + }); + return []; + } + }; + + private readResourceCallback: ReadResourceTemplateCallback = async (url, { exportName }) => { + try { + if (typeof exportName !== "string") { + throw new Error("Cannot retrieve exported data, exportName not provided."); + } + + const content = await this.session.exportsManager.readExport(exportName); + + return { + contents: [ + { + uri: url.href, + text: content, + mimeType: "application/json", + }, + ], + }; + } catch (error) { + return { + contents: [ + { + uri: url.href, + text: `Error reading ${url.href}: ${error instanceof Error ? error.message : String(error)}`, + }, + ], + isError: true, + }; + } + }; +} diff --git a/src/resources/resource.ts b/src/resources/resource.ts index 58ab13b8..005e60c8 100644 --- a/src/resources/resource.ts +++ b/src/resources/resource.ts @@ -20,28 +20,41 @@ export type ReactiveResourceOptions { - protected readonly session: Session; - protected readonly config: UserConfig; + protected server?: Server; + protected session: Session; + protected config: UserConfig; + protected telemetry: Telemetry; + protected current: Value; protected readonly name: string; protected readonly uri: string; protected readonly resourceConfig: ResourceMetadata; protected readonly events: RelevantEvents; - constructor( - resourceConfiguration: ResourceConfiguration, - options: ReactiveResourceOptions, - protected readonly server: Server, - protected readonly telemetry: Telemetry, - current?: Value - ) { + constructor({ + resourceConfiguration, + options, + session, + config, + telemetry, + current, + }: { + resourceConfiguration: ResourceConfiguration; + options: ReactiveResourceOptions; + session: Session; + config: UserConfig; + telemetry: Telemetry; + current?: Value; + }) { + this.session = session; + this.config = config; + this.telemetry = telemetry; + this.name = resourceConfiguration.name; this.uri = resourceConfiguration.uri; this.resourceConfig = resourceConfiguration.config; this.events = options.events; this.current = current ?? options.initial; - this.session = server.session; - this.config = server.userConfig; this.setupEventListeners(); } @@ -55,7 +68,8 @@ export abstract class ReactiveResource { + private triggerUpdate(): void { try { - await this.server.mcpServer.server.sendResourceUpdated({ uri: this.uri }); - this.server.mcpServer.sendResourceListChanged(); + this.server?.sendResourceListChanged(); + this.server?.sendResourceUpdated(this.uri); } catch (error: unknown) { this.session.logger.warning({ id: LogId.resourceUpdateFailure, diff --git a/src/resources/resources.ts b/src/resources/resources.ts index 40a17702..24a129ab 100644 --- a/src/resources/resources.ts +++ b/src/resources/resources.ts @@ -1,4 +1,5 @@ import { ConfigResource } from "./common/config.js"; import { DebugResource } from "./common/debug.js"; +import { ExportedData } from "./common/exportedData.js"; -export const Resources = [ConfigResource, DebugResource] as const; +export const Resources = [ConfigResource, DebugResource, ExportedData] as const; diff --git a/src/server.ts b/src/server.ts index c23207a9..cc8d30dd 100644 --- a/src/server.ts +++ b/src/server.ts @@ -5,12 +5,16 @@ import { AtlasTools } from "./tools/atlas/tools.js"; import { MongoDbTools } from "./tools/mongodb/tools.js"; import { Resources } from "./resources/resources.js"; import { LogId } from "./common/logger.js"; -import { ObjectId } from "mongodb"; import { Telemetry } from "./telemetry/telemetry.js"; import { UserConfig } from "./common/config.js"; import { type ServerEvent } from "./telemetry/types.js"; import { type ServerCommand } from "./telemetry/types.js"; -import { CallToolRequestSchema, CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { + CallToolRequestSchema, + CallToolResult, + SubscribeRequestSchema, + UnsubscribeRequestSchema, +} from "@modelcontextprotocol/sdk/types.js"; import assert from "assert"; import { ToolBase } from "./tools/tool.js"; @@ -28,6 +32,7 @@ export class Server { public readonly userConfig: UserConfig; public readonly tools: ToolBase[] = []; private readonly startTime: number; + private readonly subscriptions = new Set(); constructor({ session, mcpServer, userConfig, telemetry }: ServerOptions) { this.startTime = Date.now(); @@ -43,7 +48,7 @@ export class Server { this.registerResources(); await this.validateConfig(); - this.mcpServer.server.registerCapabilities({ logging: {}, resources: { subscribe: true, listChanged: true } }); + this.mcpServer.server.registerCapabilities({ logging: {}, resources: { listChanged: true, subscribe: true } }); // TODO: Eventually we might want to make tools reactive too instead of relying on custom logic. this.registerTools(); @@ -71,9 +76,28 @@ export class Server { return existingHandler(request, extra); }); + this.mcpServer.server.setRequestHandler(SubscribeRequestSchema, ({ params }) => { + this.subscriptions.add(params.uri); + this.session.logger.debug({ + id: LogId.serverInitialized, + context: "resources", + message: `Client subscribed to resource: ${params.uri}`, + }); + return {}; + }); + + this.mcpServer.server.setRequestHandler(UnsubscribeRequestSchema, ({ params }) => { + this.subscriptions.delete(params.uri); + this.session.logger.debug({ + id: LogId.serverInitialized, + context: "resources", + message: `Client unsubscribed from resource: ${params.uri}`, + }); + return {}; + }); + this.mcpServer.server.oninitialized = (): void => { this.session.setAgentRunner(this.mcpServer.server.getClientVersion()); - this.session.sessionId = new ObjectId().toString(); this.session.logger.info({ id: LogId.serverInitialized, @@ -103,6 +127,16 @@ export class Server { await this.mcpServer.close(); } + public sendResourceListChanged(): void { + this.mcpServer.sendResourceListChanged(); + } + + public sendResourceUpdated(uri: string): void { + if (this.subscriptions.has(uri)) { + void this.mcpServer.server.sendResourceUpdated({ uri }); + } + } + /** * Emits a server event * @param command - The server command (e.g., "start", "stop", "register", "deregister") @@ -148,8 +182,8 @@ export class Server { private registerResources(): void { for (const resourceConstructor of Resources) { - const resource = new resourceConstructor(this, this.telemetry); - resource.register(); + const resource = new resourceConstructor(this.session, this.userConfig, this.telemetry); + resource.register(this); } } diff --git a/src/tools/mongodb/read/export.ts b/src/tools/mongodb/read/export.ts new file mode 100644 index 00000000..9eaacba2 --- /dev/null +++ b/src/tools/mongodb/read/export.ts @@ -0,0 +1,92 @@ +import z from "zod"; +import { ObjectId } from "bson"; +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { OperationType, ToolArgs } from "../../tool.js"; +import { DbOperationArgs, MongoDBToolBase } from "../mongodbTool.js"; +import { FindArgs } from "./find.js"; +import { jsonExportFormat } from "../../../common/exportsManager.js"; + +export class ExportTool extends MongoDBToolBase { + public name = "export"; + protected description = "Export a collection data or query results in the specified EJSON format."; + protected argsShape = { + exportTitle: z.string().describe("A short description to uniquely identify the export."), + ...DbOperationArgs, + ...FindArgs, + limit: z.number().optional().describe("The maximum number of documents to return"), + jsonExportFormat: jsonExportFormat + .default("relaxed") + .describe( + [ + "The format to be used when exporting collection data as EJSON with default being relaxed.", + "relaxed: A string format that emphasizes readability and interoperability at the expense of type preservation. That is, conversion from relaxed format to BSON can lose type information.", + "canonical: A string format that emphasizes type preservation at the expense of readability and interoperability. That is, conversion from canonical to BSON will generally preserve type information except in certain specific cases.", + ].join("\n") + ), + }; + public operationType: OperationType = "read"; + + protected async execute({ + database, + collection, + jsonExportFormat, + filter, + projection, + sort, + limit, + exportTitle, + }: ToolArgs): Promise { + const provider = await this.ensureConnected(); + const findCursor = provider.find(database, collection, filter ?? {}, { + projection, + sort, + limit, + promoteValues: false, + bsonRegExp: true, + }); + const exportName = `${database}.${collection}.${new ObjectId().toString()}.json`; + + const { exportURI, exportPath } = await this.session.exportsManager.createJSONExport({ + input: findCursor, + exportName, + exportTitle: + exportTitle || + `Export for namespace ${database}.${collection} requested on ${new Date().toLocaleString()}`, + jsonExportFormat, + }); + const toolCallContent: CallToolResult["content"] = [ + // Not all the clients as of this commit understands how to + // parse a resource_link so we provide a text result for them to + // understand what to do with the result. + { + type: "text", + text: `Data for namespace ${database}.${collection} is being exported and will be made available under resource URI - "${exportURI}".`, + }, + { + type: "resource_link", + name: exportName, + uri: exportURI, + description: "Resource URI for fetching exported data once it is ready.", + mimeType: "application/json", + }, + ]; + + // This special case is to make it easier to work with exported data for + // clients that still cannot reference resources (Cursor). + // More information here: https://jira.mongodb.org/browse/MCP-104 + if (this.isServerRunningLocally()) { + toolCallContent.push({ + type: "text", + text: `Optionally, when the export is finished, the exported data can also be accessed under path - "${exportPath}"`, + }); + } + + return { + content: toolCallContent, + }; + } + + private isServerRunningLocally(): boolean { + return this.config.transport === "stdio" || ["127.0.0.1", "localhost"].includes(this.config.httpHost); + } +} diff --git a/src/tools/mongodb/tools.ts b/src/tools/mongodb/tools.ts index c74fdf29..00575ee0 100644 --- a/src/tools/mongodb/tools.ts +++ b/src/tools/mongodb/tools.ts @@ -18,6 +18,7 @@ import { DropCollectionTool } from "./delete/dropCollection.js"; import { ExplainTool } from "./metadata/explain.js"; import { CreateCollectionTool } from "./create/createCollection.js"; import { LogsTool } from "./metadata/logs.js"; +import { ExportTool } from "./read/export.js"; export const MongoDbTools = [ ConnectTool, @@ -40,4 +41,5 @@ export const MongoDbTools = [ ExplainTool, CreateCollectionTool, LogsTool, + ExportTool, ]; diff --git a/src/transports/base.ts b/src/transports/base.ts index 7052f1c4..22a000cc 100644 --- a/src/transports/base.ts +++ b/src/transports/base.ts @@ -5,6 +5,8 @@ import { Session } from "../common/session.js"; import { Telemetry } from "../telemetry/telemetry.js"; import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { CompositeLogger, ConsoleLogger, DiskLogger, LoggerBase, McpLogger } from "../common/logger.js"; +import { ExportsManager } from "../common/exportsManager.js"; +import { ConnectionManager } from "../common/connectionManager.js"; export abstract class TransportRunnerBase { public logger: LoggerBase; @@ -39,11 +41,17 @@ export abstract class TransportRunnerBase { loggers.push(new McpLogger(mcpServer)); } + const logger = new CompositeLogger(...loggers); + const exportsManager = ExportsManager.init(userConfig, logger); + const connectionManager = new ConnectionManager(); + const session = new Session({ apiBaseUrl: userConfig.apiBaseUrl, apiClientId: userConfig.apiClientId, apiClientSecret: userConfig.apiClientSecret, - logger: new CompositeLogger(...loggers), + logger, + exportsManager, + connectionManager, }); const telemetry = Telemetry.create(session, userConfig); diff --git a/tests/accuracy/export.test.ts b/tests/accuracy/export.test.ts new file mode 100644 index 00000000..9e1f0cff --- /dev/null +++ b/tests/accuracy/export.test.ts @@ -0,0 +1,69 @@ +import { describeAccuracyTests } from "./sdk/describeAccuracyTests.js"; +import { Matcher } from "./sdk/matcher.js"; + +describeAccuracyTests([ + { + prompt: "Export all the movies in 'mflix.movies' namespace.", + expectedToolCalls: [ + { + toolName: "export", + parameters: { + database: "mflix", + collection: "movies", + filter: Matcher.emptyObjectOrUndefined, + limit: Matcher.undefined, + }, + }, + ], + }, + { + prompt: "Export all the movies in 'mflix.movies' namespace with runtime less than 100.", + expectedToolCalls: [ + { + toolName: "export", + parameters: { + database: "mflix", + collection: "movies", + filter: { + runtime: { $lt: 100 }, + }, + }, + }, + ], + }, + { + prompt: "Export all the movie titles available in 'mflix.movies' namespace", + expectedToolCalls: [ + { + toolName: "export", + parameters: { + database: "mflix", + collection: "movies", + projection: { + title: 1, + _id: Matcher.anyOf( + Matcher.undefined, + Matcher.number((value) => value === 0) + ), + }, + filter: Matcher.emptyObjectOrUndefined, + }, + }, + ], + }, + { + prompt: "From the mflix.movies namespace, export the first 2 movies of Horror genre sorted ascending by their runtime", + expectedToolCalls: [ + { + toolName: "export", + parameters: { + database: "mflix", + collection: "movies", + filter: { genres: "Horror" }, + sort: { runtime: 1 }, + limit: 2, + }, + }, + ], + }, +]); diff --git a/tests/integration/helpers.ts b/tests/integration/helpers.ts index f5d8f05b..738cbdfd 100644 --- a/tests/integration/helpers.ts +++ b/tests/integration/helpers.ts @@ -2,7 +2,7 @@ import { Client } from "@modelcontextprotocol/sdk/client/index.js"; import { InMemoryTransport } from "./inMemoryTransport.js"; import { Server } from "../../src/server.js"; import { UserConfig } from "../../src/common/config.js"; -import { McpError } from "@modelcontextprotocol/sdk/types.js"; +import { McpError, ResourceUpdatedNotificationSchema } from "@modelcontextprotocol/sdk/types.js"; import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { Session } from "../../src/common/session.js"; import { Telemetry } from "../../src/telemetry/telemetry.js"; @@ -10,6 +10,7 @@ import { config } from "../../src/common/config.js"; import { afterAll, afterEach, beforeAll, describe, expect, it, vi } from "vitest"; import { ConnectionManager } from "../../src/common/connectionManager.js"; import { CompositeLogger } from "../../src/common/logger.js"; +import { ExportsManager } from "../../src/common/exportsManager.js"; interface ParameterInfo { name: string; @@ -55,14 +56,17 @@ export function setupIntegrationTest(getUserConfig: () => UserConfig): Integrati } ); + const logger = new CompositeLogger(); + const exportsManager = ExportsManager.init(userConfig, logger); const connectionManager = new ConnectionManager(); const session = new Session({ apiBaseUrl: userConfig.apiBaseUrl, apiClientId: userConfig.apiClientId, apiClientSecret: userConfig.apiClientSecret, + logger, + exportsManager, connectionManager, - logger: new CompositeLogger(), }); // Mock hasValidAccessToken for tests @@ -269,3 +273,21 @@ function validateToolAnnotations(tool: ToolInfo, name: string, description: stri expect(tool.annotations.destructiveHint).toBe(false); } } + +export function timeout(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +/** + * Subscribes to the resources changed notification for the provided URI + */ +export function resourceChangedNotification(client: Client, uri: string): Promise { + return new Promise((resolve) => { + void client.subscribeResource({ uri }); + client.setNotificationHandler(ResourceUpdatedNotificationSchema, (notification) => { + if (notification.params.uri === uri) { + resolve(); + } + }); + }); +} diff --git a/tests/integration/resources/exportedData.test.ts b/tests/integration/resources/exportedData.test.ts new file mode 100644 index 00000000..94710d87 --- /dev/null +++ b/tests/integration/resources/exportedData.test.ts @@ -0,0 +1,151 @@ +import path from "path"; +import fs from "fs/promises"; +import { Long } from "bson"; +import { describe, expect, it, beforeEach, afterAll } from "vitest"; +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { defaultTestConfig, resourceChangedNotification, timeout } from "../helpers.js"; +import { describeWithMongoDB } from "../tools/mongodb/mongodbHelpers.js"; +import { contentWithResourceURILink } from "../tools/mongodb/read/export.test.js"; +import { UserConfig } from "../../../src/lib.js"; + +const userConfig: UserConfig = { + ...defaultTestConfig, + exportsPath: path.join(path.dirname(defaultTestConfig.exportsPath), `exports-${Date.now()}`), + exportTimeoutMs: 200, + exportCleanupIntervalMs: 300, +}; + +describeWithMongoDB( + "exported-data resource", + (integration) => { + beforeEach(async () => { + const mongoClient = integration.mongoClient(); + await mongoClient + .db("db") + .collection("coll") + .insertMany([ + { name: "foo", longNumber: new Long(1234) }, + { name: "bar", bigInt: new Long(123412341234) }, + ]); + }); + + afterAll(async () => { + await fs.rm(userConfig.exportsPath, { recursive: true, force: true }); + }); + + it("should be able to list resource template", async () => { + await integration.connectMcpClient(); + const response = await integration.mcpClient().listResourceTemplates(); + expect(response.resourceTemplates).toEqual([ + { + name: "exported-data", + uriTemplate: "exported-data://{exportName}", + description: "Data files exported in the current session.", + }, + ]); + }); + + describe("when requesting non-existent resource", () => { + it("should return an error", async () => { + const exportURI = "exported-data://db.coll.json"; + await integration.connectMcpClient(); + const response = await integration.mcpClient().readResource({ + uri: exportURI, + }); + expect(response.isError).toEqual(true); + expect(response.contents[0]?.uri).toEqual(exportURI); + expect(response.contents[0]?.text).toEqual( + `Error reading ${exportURI}: Requested export has either expired or does not exist.` + ); + }); + }); + + describe("when requesting an expired resource", () => { + it("should return an error", async () => { + await integration.connectMcpClient(); + const exportResponse = await integration.mcpClient().callTool({ + name: "export", + arguments: { database: "db", collection: "coll", exportTitle: "Export for db.coll" }, + }); + + const exportedResourceURI = (exportResponse as CallToolResult).content.find( + (part) => part.type === "resource_link" + )?.uri; + expect(exportedResourceURI).toBeDefined(); + + // wait for export expired + for (let tries = 0; tries < 10; tries++) { + await timeout(300); + const response = await integration.mcpClient().readResource({ + uri: exportedResourceURI as string, + }); + + // wait for an error from the MCP Server as it + // means the resource is not available anymore + if (response.isError !== true) { + continue; + } + + expect(response.isError).toEqual(true); + expect(response.contents[0]?.uri).toEqual(exportedResourceURI); + expect(response.contents[0]?.text).toMatch(`Error reading ${exportedResourceURI}:`); + break; + } + }); + }); + + describe("after requesting a fresh export", () => { + it("should be able to read the resource", async () => { + await integration.connectMcpClient(); + const exportResponse = await integration.mcpClient().callTool({ + name: "export", + arguments: { database: "db", collection: "coll", exportTitle: "Export for db.coll" }, + }); + const content = exportResponse.content as CallToolResult["content"]; + const exportURI = contentWithResourceURILink(content)?.uri as string; + await resourceChangedNotification(integration.mcpClient(), exportURI); + + const exportedResourceURI = (exportResponse as CallToolResult).content.find( + (part) => part.type === "resource_link" + )?.uri; + expect(exportedResourceURI).toBeDefined(); + + const response = await integration.mcpClient().readResource({ + uri: exportedResourceURI as string, + }); + expect(response.isError).toBeFalsy(); + expect(response.contents[0]?.mimeType).toEqual("application/json"); + expect(response.contents[0]?.text).toContain("foo"); + }); + + it("should be able to autocomplete the resource", async () => { + await integration.connectMcpClient(); + const exportResponse = await integration.mcpClient().callTool({ + name: "export", + arguments: { database: "big", collection: "coll", exportTitle: "Export for big.coll" }, + }); + const content = exportResponse.content as CallToolResult["content"]; + const exportURI = contentWithResourceURILink(content)?.uri as string; + await resourceChangedNotification(integration.mcpClient(), exportURI); + + const exportedResourceURI = (exportResponse as CallToolResult).content.find( + (part) => part.type === "resource_link" + )?.uri; + expect(exportedResourceURI).toBeDefined(); + + const completeResponse = await integration.mcpClient().complete({ + ref: { + type: "ref/resource", + uri: "exported-data://{exportName}", + }, + argument: { + name: "exportName", + value: "b", + }, + }); + expect(completeResponse.completion.total).toEqual(1); + }); + }); + }, + () => userConfig +); diff --git a/tests/integration/telemetry.test.ts b/tests/integration/telemetry.test.ts index 9be1b0ad..95bc79c2 100644 --- a/tests/integration/telemetry.test.ts +++ b/tests/integration/telemetry.test.ts @@ -5,6 +5,8 @@ import { config } from "../../src/common/config.js"; import nodeMachineId from "node-machine-id"; import { describe, expect, it } from "vitest"; import { CompositeLogger } from "../../src/common/logger.js"; +import { ConnectionManager } from "../../src/common/connectionManager.js"; +import { ExportsManager } from "../../src/common/exportsManager.js"; describe("Telemetry", () => { it("should resolve the actual machine ID", async () => { @@ -12,10 +14,13 @@ describe("Telemetry", () => { const actualHashedId = createHmac("sha256", actualId.toUpperCase()).update("atlascli").digest("hex"); + const logger = new CompositeLogger(); const telemetry = Telemetry.create( new Session({ apiBaseUrl: "", logger: new CompositeLogger(), + exportsManager: ExportsManager.init(config, logger), + connectionManager: new ConnectionManager(), }), config ); diff --git a/tests/integration/tools/mongodb/read/export.test.ts b/tests/integration/tools/mongodb/read/export.test.ts new file mode 100644 index 00000000..343f3ef4 --- /dev/null +++ b/tests/integration/tools/mongodb/read/export.test.ts @@ -0,0 +1,377 @@ +import path from "path"; +import { Long } from "bson"; +import fs from "fs/promises"; +import { afterAll, beforeEach, describe, expect, it } from "vitest"; +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { + databaseCollectionParameters, + defaultTestConfig, + resourceChangedNotification, + validateThrowsForInvalidArguments, + validateToolMetadata, +} from "../../../helpers.js"; +import { describeWithMongoDB } from "../mongodbHelpers.js"; +import { UserConfig } from "../../../../../src/lib.js"; + +const userConfig: UserConfig = { + ...defaultTestConfig, + exportsPath: path.join(path.dirname(defaultTestConfig.exportsPath), `exports-${Date.now()}`), +}; + +export function contentWithTextResourceURI( + content: CallToolResult["content"] +): CallToolResult["content"][number] | undefined { + return content.find((part) => { + return part.type === "text" && part.text.startsWith(`Data for namespace`); + }); +} + +export function contentWithResourceURILink( + content: CallToolResult["content"] +): CallToolResult["content"][number] | undefined { + return content.find((part) => { + return part.type === "resource_link"; + }); +} + +export function contentWithExportPath( + content: CallToolResult["content"] +): CallToolResult["content"][number] | undefined { + return content.find((part) => { + return ( + part.type === "text" && + part.text.startsWith( + `Optionally, when the export is finished, the exported data can also be accessed under path -` + ) + ); + }); +} + +describeWithMongoDB( + "export tool", + (integration) => { + validateToolMetadata( + integration, + "export", + "Export a collection data or query results in the specified EJSON format.", + [ + ...databaseCollectionParameters, + { + name: "exportTitle", + description: "A short description to uniquely identify the export.", + type: "string", + required: true, + }, + { + name: "filter", + description: "The query filter, matching the syntax of the query argument of db.collection.find()", + type: "object", + required: false, + }, + { + name: "jsonExportFormat", + description: [ + "The format to be used when exporting collection data as EJSON with default being relaxed.", + "relaxed: A string format that emphasizes readability and interoperability at the expense of type preservation. That is, conversion from relaxed format to BSON can lose type information.", + "canonical: A string format that emphasizes type preservation at the expense of readability and interoperability. That is, conversion from canonical to BSON will generally preserve type information except in certain specific cases.", + ].join("\n"), + type: "string", + required: false, + }, + { + name: "limit", + description: "The maximum number of documents to return", + type: "number", + required: false, + }, + { + name: "projection", + description: + "The projection, matching the syntax of the projection argument of db.collection.find()", + type: "object", + required: false, + }, + { + name: "sort", + description: + "A document, describing the sort order, matching the syntax of the sort argument of cursor.sort(). The keys of the object are the fields to sort on, while the values are the sort directions (1 for ascending, -1 for descending).", + type: "object", + required: false, + }, + ] + ); + + validateThrowsForInvalidArguments(integration, "export", [ + {}, + { database: 123, collection: "bar" }, + { database: "test", collection: [] }, + { database: "test", collection: "bar", filter: "{ $gt: { foo: 5 } }" }, + { database: "test", collection: "bar", projection: "name" }, + { database: "test", collection: "bar", limit: "10" }, + { database: "test", collection: "bar", sort: [], limit: 10 }, + ]); + + beforeEach(async () => { + await integration.connectMcpClient(); + }); + + afterAll(async () => { + await fs.rm(userConfig.exportsPath, { recursive: true, force: true }); + }); + + it("when provided with incorrect namespace, export should have empty data", async function () { + const response = await integration.mcpClient().callTool({ + name: "export", + arguments: { + database: "non-existent", + collection: "foos", + exportTitle: "Export for non-existent.foos", + }, + }); + const content = response.content as CallToolResult["content"]; + const exportURI = contentWithResourceURILink(content)?.uri as string; + await resourceChangedNotification(integration.mcpClient(), exportURI); + + expect(content).toHaveLength(3); + expect(contentWithTextResourceURI(content)).toBeDefined(); + expect(contentWithResourceURILink(content)).toBeDefined(); + + const localPathPart = contentWithExportPath(content); + expect(localPathPart).toBeDefined(); + + const [, localPath] = /"(.*)"/.exec(String(localPathPart?.text)) ?? []; + expect(localPath).toBeDefined(); + + expect(await fs.readFile(localPath as string, "utf8")).toEqual("[]"); + }); + + describe("with correct namespace", function () { + beforeEach(async () => { + const mongoClient = integration.mongoClient(); + await mongoClient + .db(integration.randomDbName()) + .collection("foo") + .insertMany([ + { name: "foo", longNumber: new Long(1234) }, + { name: "bar", bigInt: new Long(123412341234) }, + ]); + }); + + it("should export entire namespace when filter are empty", async function () { + await integration.connectMcpClient(); + const response = await integration.mcpClient().callTool({ + name: "export", + arguments: { + database: integration.randomDbName(), + collection: "foo", + exportTitle: `Export for ${integration.randomDbName()}.foo`, + }, + }); + const content = response.content as CallToolResult["content"]; + const exportURI = contentWithResourceURILink(content)?.uri as string; + await resourceChangedNotification(integration.mcpClient(), exportURI); + + const localPathPart = contentWithExportPath(content); + expect(localPathPart).toBeDefined(); + const [, localPath] = /"(.*)"/.exec(String(localPathPart?.text)) ?? []; + expect(localPath).toBeDefined(); + + const exportedContent = JSON.parse(await fs.readFile(localPath as string, "utf8")) as Record< + string, + unknown + >[]; + expect(exportedContent).toHaveLength(2); + expect(exportedContent[0]?.name).toEqual("foo"); + expect(exportedContent[1]?.name).toEqual("bar"); + }); + + it("should export filter results namespace when there are filters", async function () { + await integration.connectMcpClient(); + const response = await integration.mcpClient().callTool({ + name: "export", + arguments: { + database: integration.randomDbName(), + collection: "foo", + filter: { name: "foo" }, + exportTitle: `Export for ${integration.randomDbName()}.foo`, + }, + }); + const content = response.content as CallToolResult["content"]; + const exportURI = contentWithResourceURILink(content)?.uri as string; + await resourceChangedNotification(integration.mcpClient(), exportURI); + + const localPathPart = contentWithExportPath(content); + expect(localPathPart).toBeDefined(); + const [, localPath] = /"(.*)"/.exec(String(localPathPart?.text)) ?? []; + expect(localPath).toBeDefined(); + + const exportedContent = JSON.parse(await fs.readFile(localPath as string, "utf8")) as Record< + string, + unknown + >[]; + expect(exportedContent).toHaveLength(1); + expect(exportedContent[0]?.name).toEqual("foo"); + }); + + it("should export results limited to the provided limit", async function () { + await integration.connectMcpClient(); + const response = await integration.mcpClient().callTool({ + name: "export", + arguments: { + database: integration.randomDbName(), + collection: "foo", + limit: 1, + exportTitle: `Export for ${integration.randomDbName()}.foo`, + }, + }); + const content = response.content as CallToolResult["content"]; + const exportURI = contentWithResourceURILink(content)?.uri as string; + await resourceChangedNotification(integration.mcpClient(), exportURI); + + const localPathPart = contentWithExportPath(content); + expect(localPathPart).toBeDefined(); + const [, localPath] = /"(.*)"/.exec(String(localPathPart?.text)) ?? []; + expect(localPath).toBeDefined(); + + const exportedContent = JSON.parse(await fs.readFile(localPath as string, "utf8")) as Record< + string, + unknown + >[]; + expect(exportedContent).toHaveLength(1); + expect(exportedContent[0]?.name).toEqual("foo"); + }); + + it("should export results with sorted by the provided sort", async function () { + await integration.connectMcpClient(); + const response = await integration.mcpClient().callTool({ + name: "export", + arguments: { + database: integration.randomDbName(), + collection: "foo", + limit: 1, + sort: { longNumber: 1 }, + exportTitle: `Export for ${integration.randomDbName()}.foo`, + }, + }); + const content = response.content as CallToolResult["content"]; + const exportURI = contentWithResourceURILink(content)?.uri as string; + await resourceChangedNotification(integration.mcpClient(), exportURI); + + const localPathPart = contentWithExportPath(content); + expect(localPathPart).toBeDefined(); + const [, localPath] = /"(.*)"/.exec(String(localPathPart?.text)) ?? []; + expect(localPath).toBeDefined(); + + const exportedContent = JSON.parse(await fs.readFile(localPath as string, "utf8")) as Record< + string, + unknown + >[]; + expect(exportedContent).toHaveLength(1); + expect(exportedContent[0]?.name).toEqual("bar"); + }); + + it("should export results containing only projected fields", async function () { + await integration.connectMcpClient(); + const response = await integration.mcpClient().callTool({ + name: "export", + arguments: { + database: integration.randomDbName(), + collection: "foo", + limit: 1, + projection: { _id: 0, name: 1 }, + exportTitle: `Export for ${integration.randomDbName()}.foo`, + }, + }); + const content = response.content as CallToolResult["content"]; + const exportURI = contentWithResourceURILink(content)?.uri as string; + await resourceChangedNotification(integration.mcpClient(), exportURI); + + const localPathPart = contentWithExportPath(content); + expect(localPathPart).toBeDefined(); + const [, localPath] = /"(.*)"/.exec(String(localPathPart?.text)) ?? []; + expect(localPath).toBeDefined(); + + const exportedContent = JSON.parse(await fs.readFile(localPath as string, "utf8")) as Record< + string, + unknown + >[]; + expect(exportedContent).toEqual([ + { + name: "foo", + }, + ]); + }); + + it("should export relaxed json when provided jsonExportFormat is relaxed", async function () { + await integration.connectMcpClient(); + const response = await integration.mcpClient().callTool({ + name: "export", + arguments: { + database: integration.randomDbName(), + collection: "foo", + limit: 1, + projection: { _id: 0 }, + jsonExportFormat: "relaxed", + exportTitle: `Export for ${integration.randomDbName()}.foo`, + }, + }); + const content = response.content as CallToolResult["content"]; + const exportURI = contentWithResourceURILink(content)?.uri as string; + await resourceChangedNotification(integration.mcpClient(), exportURI); + + const localPathPart = contentWithExportPath(content); + expect(localPathPart).toBeDefined(); + const [, localPath] = /"(.*)"/.exec(String(localPathPart?.text)) ?? []; + expect(localPath).toBeDefined(); + + const exportedContent = JSON.parse(await fs.readFile(localPath as string, "utf8")) as Record< + string, + unknown + >[]; + expect(exportedContent).toEqual([ + { + name: "foo", + longNumber: 1234, + }, + ]); + }); + + it("should export canonical json when provided jsonExportFormat is canonical", async function () { + await integration.connectMcpClient(); + const response = await integration.mcpClient().callTool({ + name: "export", + arguments: { + database: integration.randomDbName(), + collection: "foo", + limit: 1, + projection: { _id: 0 }, + jsonExportFormat: "canonical", + exportTitle: `Export for ${integration.randomDbName()}.foo`, + }, + }); + const content = response.content as CallToolResult["content"]; + const exportURI = contentWithResourceURILink(content)?.uri as string; + await resourceChangedNotification(integration.mcpClient(), exportURI); + + const localPathPart = contentWithExportPath(content); + expect(localPathPart).toBeDefined(); + const [, localPath] = /"(.*)"/.exec(String(localPathPart?.text)) ?? []; + expect(localPath).toBeDefined(); + + const exportedContent = JSON.parse(await fs.readFile(localPath as string, "utf8")) as Record< + string, + unknown + >[]; + expect(exportedContent).toEqual([ + { + name: "foo", + longNumber: { + $numberLong: "1234", + }, + }, + ]); + }); + }); + }, + () => userConfig +); diff --git a/tests/integration/transports/stdio.test.ts b/tests/integration/transports/stdio.test.ts index 6b08e4e6..aaa61d63 100644 --- a/tests/integration/transports/stdio.test.ts +++ b/tests/integration/transports/stdio.test.ts @@ -32,7 +32,7 @@ describeWithMongoDB("StdioRunner", (integration) => { const response = await client.listTools(); expect(response).toBeDefined(); expect(response.tools).toBeDefined(); - expect(response.tools).toHaveLength(20); + expect(response.tools).toHaveLength(21); const sortedTools = response.tools.sort((a, b) => a.name.localeCompare(b.name)); expect(sortedTools[0]?.name).toBe("aggregate"); diff --git a/tests/unit/common/exportsManager.test.ts b/tests/unit/common/exportsManager.test.ts new file mode 100644 index 00000000..d871933a --- /dev/null +++ b/tests/unit/common/exportsManager.test.ts @@ -0,0 +1,621 @@ +import path from "path"; +import fs from "fs/promises"; +import { Readable, Transform } from "stream"; +import { FindCursor, Long } from "mongodb"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import { + ensureExtension, + isExportExpired, + ExportsManager, + ExportsManagerConfig, + validateExportName, +} from "../../../src/common/exportsManager.js"; + +import { config } from "../../../src/common/config.js"; +import { ROOT_DIR } from "../../accuracy/sdk/constants.js"; +import { timeout } from "../../integration/helpers.js"; +import { EJSON, EJSONOptions, ObjectId } from "bson"; +import { CompositeLogger } from "../../../src/common/logger.js"; + +const logger = new CompositeLogger(); +const exportsPath = path.join(ROOT_DIR, "tests", "tmp", `exports-${Date.now()}`); +const exportsManagerConfig: ExportsManagerConfig = { + exportsPath, + exportTimeoutMs: config.exportTimeoutMs, + exportCleanupIntervalMs: config.exportCleanupIntervalMs, +} as const; + +function getExportNameAndPath({ + uniqueExportsId = new ObjectId().toString(), + uniqueFileId = new ObjectId().toString(), + database = "foo", + collection = "bar", +}: + | { + uniqueExportsId?: string; + uniqueFileId?: string; + database?: string; + collection?: string; + } + | undefined = {}): { + sessionExportsPath: string; + exportName: string; + exportPath: string; + exportURI: string; + uniqueExportsId: string; +} { + const exportName = `${database}.${collection}.${uniqueFileId}.json`; + // This is the exports directory for a session. + const sessionExportsPath = path.join(exportsPath, uniqueExportsId); + const exportPath = path.join(sessionExportsPath, exportName); + return { + sessionExportsPath, + exportName, + exportPath, + exportURI: `exported-data://${exportName}`, + uniqueExportsId, + }; +} + +function createDummyFindCursor( + dataArray: unknown[], + beforeEachChunk?: (chunkIndex: number) => void | Promise +): { cursor: FindCursor; cursorCloseNotification: Promise } { + let index = 0; + const readable = new Readable({ + objectMode: true, + async read(): Promise { + try { + await beforeEachChunk?.(index); + if (index < dataArray.length) { + this.push(dataArray[index++]); + } else { + this.push(null); + } + } catch (error) { + this.destroy(error as Error); + } + }, + }); + + let notifyClose: () => Promise; + const cursorCloseNotification = new Promise((resolve) => { + notifyClose = async (): Promise => { + await timeout(10); + resolve(); + }; + }); + readable.once("close", () => void notifyClose?.()); + + return { + cursor: { + stream() { + return readable; + }, + close() { + return Promise.resolve(readable.destroy()); + }, + } as unknown as FindCursor, + cursorCloseNotification, + }; +} + +function createDummyFindCursorWithDelay( + dataArray: unknown[], + delayMs: number +): { cursor: FindCursor; cursorCloseNotification: Promise } { + return createDummyFindCursor(dataArray, () => timeout(delayMs)); +} + +async function fileExists(filePath: string): Promise { + try { + await fs.access(filePath); + return true; + } catch { + return false; + } +} + +function timeoutPromise(timeoutMS: number, context: string): Promise { + return new Promise((_, reject) => { + setTimeout(() => reject(new Error(`${context} - Timed out!`)), timeoutMS); + }); +} + +async function getExportAvailableNotifier( + expectedExportURI: string, + manager: ExportsManager, + timeoutMS = 10_000 +): Promise { + const exportAvailablePromise = new Promise((resolve) => { + manager.on("export-available", (exportURI) => { + if (expectedExportURI === exportURI) { + resolve(exportURI); + } + }); + }); + return await Promise.race([ + timeoutPromise(timeoutMS, `Waiting for export-available - ${expectedExportURI}`), + exportAvailablePromise, + ]); +} + +describe("ExportsManager unit test", () => { + let manager: ExportsManager; + let managerClosedPromise: Promise; + + beforeEach(async () => { + await fs.mkdir(exportsManagerConfig.exportsPath, { recursive: true }); + manager = ExportsManager.init(exportsManagerConfig, logger); + + let notifyManagerClosed: () => void; + managerClosedPromise = new Promise((resolve): void => { + notifyManagerClosed = resolve; + }); + manager.once("closed", (): void => { + notifyManagerClosed(); + }); + }); + + afterEach(async () => { + await manager?.close(); + await managerClosedPromise; + await fs.rm(exportsManagerConfig.exportsPath, { recursive: true, force: true }); + }); + + describe("#availableExport", () => { + it("should throw if the manager is shutting down", () => { + void manager.close(); + expect(() => manager.availableExports).toThrow("ExportsManager is shutting down."); + }); + + it("should list only the exports that are in ready state", async () => { + // This export will finish in at-least 1 second + const { exportName: exportName1, uniqueExportsId } = getExportNameAndPath(); + await manager.createJSONExport({ + input: createDummyFindCursorWithDelay([{ name: "Test1" }], 1000).cursor, + exportName: exportName1, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + + // This export will finish way sooner than the first one + const { exportName: exportName2, exportURI } = getExportNameAndPath({ uniqueExportsId }); + const secondExportNotifier = getExportAvailableNotifier(exportURI, manager); + const { cursor } = createDummyFindCursor([{ name: "Test1" }]); + await manager.createJSONExport({ + input: cursor, + exportName: exportName2, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + + await secondExportNotifier; + expect(manager.availableExports).toHaveLength(1); + expect(manager.availableExports[0]?.exportName).toEqual(exportName2); + }); + }); + + describe("#readExport", () => { + it("should throw if the manager is shutting down", async () => { + void manager.close(); + await expect(() => manager.readExport("name")).rejects.toThrow("ExportsManager is shutting down."); + }); + + it("should notify the user if resource is still being generated", async () => { + const { exportName } = getExportNameAndPath(); + const { cursor } = createDummyFindCursorWithDelay([{ name: "Test1" }], 200); + // create only provides a readable handle but does not guarantee + // that resource is available for read + await manager.createJSONExport({ + input: cursor, + exportName, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + + try { + await manager.readExport(exportName); + throw new Error("Should have failed."); + } catch (err: unknown) { + expect(String(err)).toEqual("Error: Requested export is still being generated. Try again later."); + } + }); + + it("should return the resource content if the resource is ready to be consumed", async () => { + const { exportName, exportURI } = getExportNameAndPath(); + const { cursor } = createDummyFindCursor([]); + const exportAvailableNotifier = getExportAvailableNotifier(exportURI, manager); + await manager.createJSONExport({ + input: cursor, + exportName, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + await exportAvailableNotifier; + expect(await manager.readExport(exportName)).toEqual("[]"); + }); + + it("should handle encoded name", async () => { + const { exportName, exportURI } = getExportNameAndPath({ database: "some database", collection: "coll" }); + const { cursor } = createDummyFindCursor([]); + const exportAvailableNotifier = getExportAvailableNotifier(encodeURI(exportURI), manager); + await manager.createJSONExport({ + input: cursor, + exportName: encodeURIComponent(exportName), + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + await exportAvailableNotifier; + expect(await manager.readExport(encodeURIComponent(exportName))).toEqual("[]"); + }); + }); + + describe("#createJSONExport", () => { + let cursor: FindCursor; + let cursorCloseNotification: Promise; + let exportName: string; + let exportPath: string; + let exportURI: string; + beforeEach(() => { + void cursor?.close(); + ({ cursor, cursorCloseNotification } = createDummyFindCursor([ + { + name: "foo", + longNumber: Long.fromNumber(12), + }, + { + name: "bar", + longNumber: Long.fromNumber(123456), + }, + ])); + ({ exportName, exportPath, exportURI } = getExportNameAndPath()); + }); + + it("should throw if the manager is shutting down", async () => { + const { cursor } = createDummyFindCursor([]); + void manager.close(); + await expect(() => + manager.createJSONExport({ + input: cursor, + exportName, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }) + ).rejects.toThrow(); + }); + + it("should throw if the same name export is requested more than once", async () => { + await manager.createJSONExport({ + input: createDummyFindCursor([{ name: 1 }, { name: 2 }]).cursor, + exportName, + exportTitle: "Export title 1", + jsonExportFormat: "relaxed", + }); + await expect(() => + manager.createJSONExport({ + input: createDummyFindCursor([{ name: 1 }, { name: 2 }]).cursor, + exportName, + exportTitle: "Export title 2", + jsonExportFormat: "relaxed", + }) + ).rejects.toThrow("Export with same name is either already available or being generated"); + }); + + describe("when cursor is empty", () => { + it("should create an empty export", async () => { + const { cursor, cursorCloseNotification } = createDummyFindCursor([]); + + const emitSpy = vi.spyOn(manager, "emit"); + await manager.createJSONExport({ + input: cursor, + exportName, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + await cursorCloseNotification; + + // Updates available export + const availableExports = manager.availableExports; + expect(availableExports).toHaveLength(1); + expect(availableExports).toContainEqual( + expect.objectContaining({ + exportName, + exportURI, + }) + ); + + // Emit event + expect(emitSpy).toHaveBeenCalledWith("export-available", exportURI); + + // Exports relaxed json + const jsonData = JSON.parse(await manager.readExport(exportName)) as unknown[]; + expect(jsonData).toEqual([]); + }); + }); + + describe.each([ + { cond: "when exportName does not contain extension", exportName: `foo.bar.${Date.now()}` }, + { cond: "when exportName contains extension", exportName: `foo.bar.${Date.now()}.json` }, + ])("$cond", ({ exportName }) => { + it("should export relaxed json, update available exports and emit export-available event", async () => { + const emitSpy = vi.spyOn(manager, "emit"); + await manager.createJSONExport({ + input: cursor, + exportName, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + await cursorCloseNotification; + + const expectedExportName = exportName.endsWith(".json") ? exportName : `${exportName}.json`; + // Updates available export + const availableExports = manager.availableExports; + expect(availableExports).toHaveLength(1); + expect(availableExports).toContainEqual( + expect.objectContaining({ + exportName: expectedExportName, + exportURI: `exported-data://${expectedExportName}`, + }) + ); + + // Emit event + expect(emitSpy).toHaveBeenCalledWith("export-available", `exported-data://${expectedExportName}`); + + // Exports relaxed json + const jsonData = JSON.parse(await manager.readExport(expectedExportName)) as unknown[]; + expect(jsonData).toContainEqual(expect.objectContaining({ name: "foo", longNumber: 12 })); + expect(jsonData).toContainEqual(expect.objectContaining({ name: "bar", longNumber: 123456 })); + }); + }); + + describe.each([ + { cond: "when exportName does not contain extension", exportName: `foo.bar.${Date.now()}` }, + { cond: "when exportName contains extension", exportName: `foo.bar.${Date.now()}.json` }, + ])("$cond", ({ exportName }) => { + it("should export canonical json, update available exports and emit export-available event", async () => { + const emitSpy = vi.spyOn(manager, "emit"); + await manager.createJSONExport({ + input: cursor, + exportName, + exportTitle: "Some export", + jsonExportFormat: "canonical", + }); + await cursorCloseNotification; + + const expectedExportName = exportName.endsWith(".json") ? exportName : `${exportName}.json`; + // Updates available export + const availableExports = manager.availableExports; + expect(availableExports).toHaveLength(1); + expect(availableExports).toContainEqual( + expect.objectContaining({ + exportName: expectedExportName, + exportURI: `exported-data://${expectedExportName}`, + }) + ); + + // Emit event + expect(emitSpy).toHaveBeenCalledWith("export-available", `exported-data://${expectedExportName}`); + + // Exports relaxed json + const jsonData = JSON.parse(await manager.readExport(expectedExportName)) as unknown[]; + expect(jsonData).toContainEqual( + expect.objectContaining({ name: "foo", longNumber: { $numberLong: "12" } }) + ); + expect(jsonData).toContainEqual( + expect.objectContaining({ name: "bar", longNumber: { $numberLong: "123456" } }) + ); + }); + }); + + describe("when there is an error during stream transform", () => { + it("should remove the partial export and never make it available", async () => { + const emitSpy = vi.spyOn(manager, "emit"); + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any + (manager as any).docToEJSONStream = function (ejsonOptions: EJSONOptions | undefined): Transform { + let docsTransformed = 0; + return new Transform({ + objectMode: true, + transform(chunk: unknown, encoding, callback): void { + try { + const doc = EJSON.stringify(chunk, undefined, undefined, ejsonOptions); + if (docsTransformed === 0) { + this.push("[" + doc); + } else if (docsTransformed === 1) { + throw new Error("Could not transform the chunk!"); + } else { + this.push(",\n" + doc); + } + docsTransformed++; + callback(); + } catch (err) { + callback(err as Error); + } + }, + flush(this: Transform, cb): void { + if (docsTransformed === 0) { + this.push("[]"); + } else { + this.push("]"); + } + cb(); + }, + }); + }; + await manager.createJSONExport({ + input: cursor, + exportName, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + await cursorCloseNotification; + + // Because the export was never populated in the available exports. + await expect(() => manager.readExport(exportName)).rejects.toThrow( + "Requested export has either expired or does not exist." + ); + expect(emitSpy).not.toHaveBeenCalled(); + expect(manager.availableExports).toEqual([]); + expect(await fileExists(exportPath)).toEqual(false); + }); + }); + + describe("when there is an error on read stream", () => { + it("should remove the partial export and never make it available", async () => { + const emitSpy = vi.spyOn(manager, "emit"); + // A cursor that will make the read stream fail after the first chunk + const { cursor, cursorCloseNotification } = createDummyFindCursor([{ name: "Test1" }], (chunkIndex) => { + if (chunkIndex > 0) { + return Promise.reject(new Error("Connection timedout!")); + } + return Promise.resolve(); + }); + await manager.createJSONExport({ + input: cursor, + exportName, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + await cursorCloseNotification; + + // Because the export was never populated in the available exports. + await expect(() => manager.readExport(exportName)).rejects.toThrow( + "Requested export has either expired or does not exist." + ); + expect(emitSpy).not.toHaveBeenCalled(); + expect(manager.availableExports).toEqual([]); + expect(await fileExists(exportPath)).toEqual(false); + }); + }); + }); + + describe("#cleanupExpiredExports", () => { + let cursor: FindCursor; + let cursorCloseNotification: Promise; + beforeEach(() => { + void cursor?.close(); + ({ cursor, cursorCloseNotification } = createDummyFindCursor([ + { + name: "foo", + longNumber: Long.fromNumber(12), + }, + { + name: "bar", + longNumber: Long.fromNumber(123456), + }, + ])); + }); + + it("should not clean up in-progress exports", async () => { + const { exportName, uniqueExportsId } = getExportNameAndPath(); + const manager = ExportsManager.init( + { + ...exportsManagerConfig, + exportTimeoutMs: 100, + exportCleanupIntervalMs: 50, + }, + new CompositeLogger(), + uniqueExportsId + ); + const { cursor } = createDummyFindCursorWithDelay([{ name: "Test" }], 2000); + await manager.createJSONExport({ + input: cursor, + exportName, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + + // eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-member-access + expect((manager as any).storedExports[exportName]?.exportStatus).toEqual("in-progress"); + + // After clean up interval the export should still be there + await timeout(200); + // eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-member-access + expect((manager as any).storedExports[exportName]?.exportStatus).toEqual("in-progress"); + }); + + it("should cleanup expired exports", async () => { + const { exportName, exportPath, exportURI, uniqueExportsId } = getExportNameAndPath(); + const manager = ExportsManager.init( + { + ...exportsManagerConfig, + exportTimeoutMs: 100, + exportCleanupIntervalMs: 50, + }, + new CompositeLogger(), + uniqueExportsId + ); + await manager.createJSONExport({ + input: cursor, + exportName, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + await cursorCloseNotification; + + expect(manager.availableExports).toContainEqual( + expect.objectContaining({ + exportName, + exportURI, + }) + ); + expect(await fileExists(exportPath)).toEqual(true); + await timeout(200); + expect(manager.availableExports).toEqual([]); + expect(await fileExists(exportPath)).toEqual(false); + }); + }); + + describe("#close", () => { + it("should abort ongoing export and remove partial file", async () => { + const { exportName, exportPath } = getExportNameAndPath(); + const { cursor } = createDummyFindCursorWithDelay([{ name: "Test" }], 2000); + await manager.createJSONExport({ + input: cursor, + exportName, + exportTitle: "Some export", + jsonExportFormat: "relaxed", + }); + // Give the pipeline a brief moment to start and create the file + await timeout(50); + + await manager.close(); + + await expect(fileExists(exportPath)).resolves.toEqual(false); + }); + }); +}); + +describe("#ensureExtension", () => { + it("should append provided extension when not present", () => { + expect(ensureExtension("random", "json")).toEqual("random.json"); + expect(ensureExtension("random.1234", "json")).toEqual("random.1234.json"); + expect(ensureExtension("/random/random-file", "json")).toEqual("/random/random-file.json"); + }); + it("should not append provided when present", () => { + expect(ensureExtension("random.json", "json")).toEqual("random.json"); + expect(ensureExtension("random.1234.json", "json")).toEqual("random.1234.json"); + expect(ensureExtension("/random/random-file.json", "json")).toEqual("/random/random-file.json"); + }); +}); + +describe("#validateExportName", () => { + it("should return decoded name when name is valid", () => { + expect(validateExportName(encodeURIComponent("Test Name.json"))).toEqual("Test Name.json"); + }); + it("should throw when name is invalid", () => { + expect(() => validateExportName("NoExtension")).toThrow("Provided export name has no extension"); + expect(() => validateExportName("../something.json")).toThrow("Invalid export name: path traversal hinted"); + }); +}); + +describe("#isExportExpired", () => { + it("should return true if export is expired", () => { + const createdAt = Date.now() - 1000; + expect(isExportExpired(createdAt, 500)).toEqual(true); + }); + it("should return false if export is not expired", () => { + const createdAt = Date.now(); + expect(isExportExpired(createdAt, 500)).toEqual(false); + }); +}); diff --git a/tests/unit/common/session.test.ts b/tests/unit/common/session.test.ts index 592d60fe..1d26d8d8 100644 --- a/tests/unit/common/session.test.ts +++ b/tests/unit/common/session.test.ts @@ -3,6 +3,8 @@ import { NodeDriverServiceProvider } from "@mongosh/service-provider-node-driver import { Session } from "../../../src/common/session.js"; import { config } from "../../../src/common/config.js"; import { CompositeLogger } from "../../../src/common/logger.js"; +import { ConnectionManager } from "../../../src/common/connectionManager.js"; +import { ExportsManager } from "../../../src/common/exportsManager.js"; vi.mock("@mongosh/service-provider-node-driver"); const MockNodeDriverServiceProvider = vi.mocked(NodeDriverServiceProvider); @@ -10,10 +12,13 @@ const MockNodeDriverServiceProvider = vi.mocked(NodeDriverServiceProvider); describe("Session", () => { let session: Session; beforeEach(() => { + const logger = new CompositeLogger(); session = new Session({ apiClientId: "test-client-id", apiBaseUrl: "https://api.test.com", - logger: new CompositeLogger(), + logger, + exportsManager: ExportsManager.init(config, logger), + connectionManager: new ConnectionManager(), }); MockNodeDriverServiceProvider.connect = vi.fn().mockResolvedValue({} as unknown as NodeDriverServiceProvider); diff --git a/tests/unit/resources/common/debug.test.ts b/tests/unit/resources/common/debug.test.ts index 4a2f704b..8e798827 100644 --- a/tests/unit/resources/common/debug.test.ts +++ b/tests/unit/resources/common/debug.test.ts @@ -1,21 +1,26 @@ import { beforeEach, describe, expect, it } from "vitest"; import { DebugResource } from "../../../../src/resources/common/debug.js"; import { Session } from "../../../../src/common/session.js"; -import { Server } from "../../../../src/server.js"; import { Telemetry } from "../../../../src/telemetry/telemetry.js"; import { config } from "../../../../src/common/config.js"; +import { CompositeLogger } from "../../../../src/common/logger.js"; +import { ConnectionManager } from "../../../../src/common/connectionManager.js"; +import { ExportsManager } from "../../../../src/common/exportsManager.js"; describe("debug resource", () => { - // eslint-disable-next-line - const session = new Session({} as any); - // eslint-disable-next-line - const server = new Server({ session } as any); + const logger = new CompositeLogger(); + const session = new Session({ + apiBaseUrl: "", + logger, + exportsManager: ExportsManager.init(config, logger), + connectionManager: new ConnectionManager(), + }); const telemetry = Telemetry.create(session, { ...config, telemetry: "disabled" }); - let debugResource: DebugResource = new DebugResource(server, telemetry); + let debugResource: DebugResource = new DebugResource(session, config, telemetry); beforeEach(() => { - debugResource = new DebugResource(server, telemetry); + debugResource = new DebugResource(session, config, telemetry); }); it("should be connected when a connected event happens", () => {