diff --git a/.changeset/dull-pumas-punch.md b/.changeset/dull-pumas-punch.md new file mode 100644 index 000000000..fb69ff215 --- /dev/null +++ b/.changeset/dull-pumas-punch.md @@ -0,0 +1,9 @@ +--- +'@powersync/service-module-mongodb-storage': patch +'@powersync/service-errors': patch +'@powersync/lib-service-mongodb': patch +'@powersync/service-core': patch +'@powersync/service-image': patch +--- + +[MongoDB Storage] Improve error messages for checksum query timeouts diff --git a/libs/lib-mongodb/src/db/errors.ts b/libs/lib-mongodb/src/db/errors.ts index 2c5eb183f..85b0838ee 100644 --- a/libs/lib-mongodb/src/db/errors.ts +++ b/libs/lib-mongodb/src/db/errors.ts @@ -1,4 +1,9 @@ -import { DatabaseConnectionError, ErrorCode, ServiceError } from '@powersync/lib-services-framework'; +import { + DatabaseConnectionError, + DatabaseQueryError, + ErrorCode, + ServiceError +} from '@powersync/lib-services-framework'; import { isMongoServerError } from './mongo.js'; import { MongoNetworkError, MongoServerSelectionError } from 'mongodb'; @@ -58,6 +63,22 @@ export function mapConnectionError(err: any): ServiceError { } } +export function mapQueryError(err: any, context: string): ServiceError { + if (ServiceError.isServiceError(err)) { + return err; + } else if (isMongoServerError(err)) { + if (err.codeName == 'MaxTimeMSExpired') { + return new DatabaseQueryError(ErrorCode.PSYNC_S2403, `Query timed out ${context}`, err); + } + + // Fallback + return new DatabaseQueryError(ErrorCode.PSYNC_S2404, `MongoDB server error ${context}: ${err.codeName}`, err); + } else { + // Fallback + return new DatabaseQueryError(ErrorCode.PSYNC_S2404, `MongoDB connection error ${context}`, err); + } +} + function isNetworkError(err: any): err is MongoNetworkError { return err?.name === 'MongoNetworkError'; } diff --git a/libs/lib-mongodb/src/db/mongo.ts b/libs/lib-mongodb/src/db/mongo.ts index 059c4facd..b8f8ff170 100644 --- a/libs/lib-mongodb/src/db/mongo.ts +++ b/libs/lib-mongodb/src/db/mongo.ts @@ -19,7 +19,7 @@ export const MONGO_SOCKET_TIMEOUT_MS = 60_000; * * Must be less than MONGO_SOCKET_TIMEOUT_MS to ensure proper error handling. */ -export const MONGO_OPERATION_TIMEOUT_MS = 30_000; +export const MONGO_OPERATION_TIMEOUT_MS = 40_000; /** * Same as above, but specifically for clear operations. diff --git a/modules/module-mongodb-storage/src/storage/implementation/MongoSyncBucketStorage.ts b/modules/module-mongodb-storage/src/storage/implementation/MongoSyncBucketStorage.ts index 676ef7130..d069d28f6 100644 --- a/modules/module-mongodb-storage/src/storage/implementation/MongoSyncBucketStorage.ts +++ b/modules/module-mongodb-storage/src/storage/implementation/MongoSyncBucketStorage.ts @@ -348,7 +348,10 @@ export class MongoSyncBucketStorage // 1. We can calculate the document size accurately without serializing again. // 2. We can delay parsing the results until it's needed. // We manually use bson.deserialize below - raw: true + raw: true, + + // Limit the time for the operation to complete, to avoid getting connection timeouts + maxTimeMS: lib_mongo.db.MONGO_OPERATION_TIMEOUT_MS } ) as unknown as mongo.FindCursor; @@ -357,7 +360,9 @@ export class MongoSyncBucketStorage // to the lower of the batch count and size limits. // This is similar to using `singleBatch: true` in the find options, but allows // detecting "hasMore". - let { data, hasMore: batchHasMore } = await readSingleBatch(cursor); + let { data, hasMore: batchHasMore } = await readSingleBatch(cursor).catch((e) => { + throw lib_mongo.mapQueryError(e, 'while reading bucket data'); + }); if (data.length == batchLimit) { // Limit reached - could have more data, despite the cursor being drained. batchHasMore = true; @@ -486,9 +491,12 @@ export class MongoSyncBucketStorage } } ], - { session: undefined, readConcern: 'snapshot' } + { session: undefined, readConcern: 'snapshot', maxTimeMS: lib_mongo.db.MONGO_OPERATION_TIMEOUT_MS } ) - .toArray(); + .toArray() + .catch((e) => { + throw lib_mongo.mapQueryError(e, 'while reading checksums'); + }); return new Map( aggregate.map((doc) => { diff --git a/packages/service-errors/src/codes.ts b/packages/service-errors/src/codes.ts index 64e70c619..74f92a73f 100644 --- a/packages/service-errors/src/codes.ts +++ b/packages/service-errors/src/codes.ts @@ -428,6 +428,17 @@ export enum ErrorCode { */ PSYNC_S2402 = 'PSYNC_S2402', + /** + * Query timed out. Could be due to a large query or a temporary load issue on the storage database. + * Retry the request. + */ + PSYNC_S2403 = 'PSYNC_S2403', + + /** + * Query failure on the storage database. See error details for more information. + */ + PSYNC_S2404 = 'PSYNC_S2404', + // ## PSYNC_S23xx: Sync API errors - Postgres Storage // ## PSYNC_S3xxx: Service configuration issues diff --git a/packages/service-errors/src/errors.ts b/packages/service-errors/src/errors.ts index a9a6fe593..fdafcfca9 100644 --- a/packages/service-errors/src/errors.ts +++ b/packages/service-errors/src/errors.ts @@ -243,3 +243,19 @@ export class DatabaseConnectionError extends ServiceError { this.cause = cause; } } + +export class DatabaseQueryError extends ServiceError { + public cause: any; + + constructor(code: ErrorCode, message: string, cause?: any) { + super({ + code: code, + status: 500, + description: message, + // Cause is always logged. Return details via the API only in development mode + details: process.env.NODE_ENV !== 'production' && cause != null ? `cause: ${cause.message}` : undefined, + stack: process.env.NODE_ENV !== 'production' ? cause.stack : undefined + }); + this.cause = cause; + } +}