From 8cbe3804942206f5c03f2f8a579228ddc1045b9f Mon Sep 17 00:00:00 2001 From: Satyaki Ghosh Date: Tue, 9 Dec 2025 16:13:39 -0500 Subject: [PATCH 1/2] Replace in-process mutex with cross-process file locking for FileStore --- package-lock.json | 68 ++++++----- package.json | 4 +- src/datastore/DataStore.ts | 2 + src/datastore/FileStore.ts | 15 ++- src/datastore/LMDB.ts | 4 +- src/datastore/file/EncryptedFileStore.ts | 139 +++++++++++------------ src/utils/ErrorStackInfo.ts | 10 -- src/utils/Errors.ts | 18 +-- tst/unit/datastore/FileStore.test.ts | 84 ++++++++------ tst/unit/datastore/FilestoreWorker.ts | 29 +++++ tst/unit/utils/Errors.test.ts | 50 ++++---- 11 files changed, 226 insertions(+), 197 deletions(-) create mode 100644 tst/unit/datastore/FilestoreWorker.ts diff --git a/package-lock.json b/package-lock.json index 0570e105..116c7d45 100644 --- a/package-lock.json +++ b/package-lock.json @@ -50,8 +50,8 @@ "partial-json": "0.1.7", "pino": "9.9.0", "pino-pretty": "13.1.1", + "proper-lockfile": "4.1.2", "pyodide": "0.28.2", - "stack-utils": "2.0.6", "tree-sitter": "0.22.4", "tree-sitter-json": "0.24.8", "ts-essentials": "10.1.1", @@ -68,8 +68,8 @@ "@types/archiver": "7.0.0", "@types/js-yaml": "4.0.9", "@types/luxon": "3.7.1", + "@types/proper-lockfile": "4.1.4", "@types/semver": "7.7.1", - "@types/stack-utils": "2.0.3", "@types/yargs": "17.0.33", "@types/yauzl": "2.10.3", "@vitest/coverage-v8": "3.2.4", @@ -7759,6 +7759,16 @@ "@types/pg": "*" } }, + "node_modules/@types/proper-lockfile": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/@types/proper-lockfile/-/proper-lockfile-4.1.4.tgz", + "integrity": "sha512-uo2ABllncSqg9F1D4nugVl9v93RmjxF6LJzQLMLDdPaXCUIDPeOJ21Gbqi43xNKzBi/WQ0Q0dICqufzQbMjipQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/retry": "*" + } + }, "node_modules/@types/readdir-glob": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/@types/readdir-glob/-/readdir-glob-1.1.5.tgz", @@ -7816,13 +7826,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/stack-utils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", - "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/tedious": { "version": "4.0.14", "resolved": "https://registry.npmjs.org/@types/tedious/-/tedious-4.0.14.tgz", @@ -14603,6 +14606,32 @@ "asap": "~2.0.3" } }, + "node_modules/proper-lockfile": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/proper-lockfile/-/proper-lockfile-4.1.2.tgz", + "integrity": "sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA==", + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "retry": "^0.12.0", + "signal-exit": "^3.0.2" + } + }, + "node_modules/proper-lockfile/node_modules/retry": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/proper-lockfile/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "license": "ISC" + }, "node_modules/protobufjs": { "version": "7.5.4", "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.4.tgz", @@ -15765,27 +15794,6 @@ "node": ">= 10.x" } }, - "node_modules/stack-utils": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", - "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", - "license": "MIT", - "dependencies": { - "escape-string-regexp": "^2.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/stack-utils/node_modules/escape-string-regexp": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", - "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/stackback": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", diff --git a/package.json b/package.json index d8071c7d..3324bc73 100644 --- a/package.json +++ b/package.json @@ -88,8 +88,8 @@ "partial-json": "0.1.7", "pino": "9.9.0", "pino-pretty": "13.1.1", + "proper-lockfile": "4.1.2", "pyodide": "0.28.2", - "stack-utils": "2.0.6", "tree-sitter": "0.22.4", "tree-sitter-json": "0.24.8", "ts-essentials": "10.1.1", @@ -106,8 +106,8 @@ "@types/archiver": "7.0.0", "@types/js-yaml": "4.0.9", "@types/luxon": "3.7.1", + "@types/proper-lockfile": "4.1.4", "@types/semver": "7.7.1", - "@types/stack-utils": "2.0.3", "@types/yargs": "17.0.33", "@types/yauzl": "2.10.3", "@vitest/coverage-v8": "3.2.4", diff --git a/src/datastore/DataStore.ts b/src/datastore/DataStore.ts index deff8206..bad48c0f 100644 --- a/src/datastore/DataStore.ts +++ b/src/datastore/DataStore.ts @@ -16,6 +16,8 @@ export enum StoreName { private_schemas = 'private_schemas', } +export const PersistedStores = [StoreName.public_schemas, StoreName.sam_schemas]; + export interface DataStore { get(key: string): T | undefined; diff --git a/src/datastore/FileStore.ts b/src/datastore/FileStore.ts index f11ad846..433d9126 100644 --- a/src/datastore/FileStore.ts +++ b/src/datastore/FileStore.ts @@ -5,7 +5,7 @@ import { LoggerFactory } from '../telemetry/LoggerFactory'; import { ScopedTelemetry } from '../telemetry/ScopedTelemetry'; import { Telemetry } from '../telemetry/TelemetryDecorator'; import { formatNumber } from '../utils/String'; -import { DataStore, DataStoreFactory, StoreName } from './DataStore'; +import { DataStore, DataStoreFactory, PersistedStores, StoreName } from './DataStore'; import { EncryptedFileStore } from './file/EncryptedFileStore'; import { encryptionKey } from './file/Encryption'; @@ -20,7 +20,7 @@ export class FileStoreFactory implements DataStoreFactory { private readonly metricsInterval: NodeJS.Timeout; private readonly timeout: NodeJS.Timeout; - constructor(rootDir: string) { + constructor(rootDir: string, storeNames: StoreName[] = PersistedStores) { this.log = LoggerFactory.getLogger('FileStore.Global'); this.fileDbRoot = join(rootDir, 'filedb'); @@ -30,6 +30,10 @@ export class FileStoreFactory implements DataStoreFactory { mkdirSync(this.fileDbDir, { recursive: true }); } + for (const store of storeNames) { + this.stores.set(store, new EncryptedFileStore(encryptionKey(VersionNumber), store, this.fileDbDir)); + } + this.metricsInterval = setInterval(() => { this.emitMetrics(); }, 60 * 1000); @@ -45,10 +49,9 @@ export class FileStoreFactory implements DataStoreFactory { } get(store: StoreName): DataStore { - let val = this.stores.get(store); - if (!val) { - val = new EncryptedFileStore(encryptionKey(VersionNumber), store, this.fileDbDir); - this.stores.set(store, val); + const val = this.stores.get(store); + if (val === undefined) { + throw new Error(`Store ${store} not found. Available stores: ${[...this.stores.keys()].join(', ')}`); } return val; } diff --git a/src/datastore/LMDB.ts b/src/datastore/LMDB.ts index e85cf919..159c30e3 100644 --- a/src/datastore/LMDB.ts +++ b/src/datastore/LMDB.ts @@ -6,7 +6,7 @@ import { ScopedTelemetry } from '../telemetry/ScopedTelemetry'; import { Telemetry } from '../telemetry/TelemetryDecorator'; import { isWindows } from '../utils/Environment'; import { formatNumber, toString } from '../utils/String'; -import { DataStore, DataStoreFactory, StoreName } from './DataStore'; +import { DataStore, DataStoreFactory, PersistedStores, StoreName } from './DataStore'; import { LMDBStore } from './lmdb/LMDBStore'; import { stats } from './lmdb/Stats'; import { encryptionStrategy } from './lmdb/Utils'; @@ -22,7 +22,7 @@ export class LMDBStoreFactory implements DataStoreFactory { private readonly stores = new Map(); - constructor(rootDir: string, storeNames: StoreName[] = [StoreName.public_schemas, StoreName.sam_schemas]) { + constructor(rootDir: string, storeNames: StoreName[] = PersistedStores) { this.lmdbDir = join(rootDir, 'lmdb'); const config: RootDatabaseOptionsWithPath = { diff --git a/src/datastore/file/EncryptedFileStore.ts b/src/datastore/file/EncryptedFileStore.ts index e1b7beef..116b5573 100644 --- a/src/datastore/file/EncryptedFileStore.ts +++ b/src/datastore/file/EncryptedFileStore.ts @@ -1,122 +1,115 @@ -import { existsSync, readFileSync, statSync, unlinkSync } from 'fs'; // eslint-disable-line no-restricted-imports -- files being checked +import { existsSync, readFileSync, statSync, writeFileSync } from 'fs'; // eslint-disable-line no-restricted-imports -- files being checked import { writeFile } from 'fs/promises'; import { join } from 'path'; -import { Mutex } from 'async-mutex'; import { Logger } from 'pino'; +import { lock, lockSync } from 'proper-lockfile'; import { LoggerFactory } from '../../telemetry/LoggerFactory'; import { ScopedTelemetry } from '../../telemetry/ScopedTelemetry'; import { TelemetryService } from '../../telemetry/TelemetryService'; import { DataStore } from '../DataStore'; import { decrypt, encrypt } from './Encryption'; +const LOCK_OPTIONS = { stale: 10_000 }; // 10 seconds + export class EncryptedFileStore implements DataStore { private readonly log: Logger; - private readonly file: string; - private content?: Record; + private content: Record = {}; private readonly telemetry: ScopedTelemetry; - private readonly lock = new Mutex(); constructor( private readonly KEY: Buffer, - private readonly name: string, + name: string, fileDbDir: string, ) { this.log = LoggerFactory.getLogger(`FileStore.${name}`); this.file = join(fileDbDir, `${name}.enc`); - this.telemetry = TelemetryService.instance.get(`FileStore.${name}`); - } - - get(key: string): T | undefined { - return this.telemetry.countExecution('get', () => { - if (this.content) { - return this.content[key] as T | undefined; - } - if (!existsSync(this.file)) { - return; - } - - if (this.lock.isLocked()) { - return this.content?.[key]; + if (existsSync(this.file)) { + try { + this.content = this.readFile(); + } catch (error) { + this.log.error(error, 'Failed to decrypt file store, recreating store'); + this.telemetry.count('filestore.recreate', 1); + + const release = lockSync(this.file, LOCK_OPTIONS); + try { + this.saveSync(); + } finally { + release(); + } } + } else { + this.saveSync(); + } + } - const decrypted = decrypt(this.KEY, readFileSync(this.file)); - this.content = JSON.parse(decrypted) as Record; - return this.content[key] as T | undefined; - }); + get(key: string): T | undefined { + return this.telemetry.countExecution('get', () => this.content[key] as T | undefined); } put(key: string, value: T): Promise { - return this.lock.runExclusive(() => - this.telemetry.measureAsync('put', async () => { - if (!this.content) { - this.get(key); - } - - this.content = { - ...this.content, - [key]: value, - }; - const encrypted = encrypt(this.KEY, JSON.stringify(this.content)); - await writeFile(this.file, encrypted); - return true; - }), - ); + return this.withLock('put', async () => { + this.content[key] = value; + await this.save(); + return true; + }); } remove(key: string): Promise { - return this.lock.runExclusive(() => { - return this.telemetry.measureAsync('remove', async () => { - if (!this.content) { - this.get(key); - } - - if (!this.content || !(key in this.content)) { - return false; - } + return this.withLock('remove', async () => { + if (!(key in this.content)) { + return false; + } - delete this.content[key]; - const encrypted = encrypt(this.KEY, JSON.stringify(this.content)); - await writeFile(this.file, encrypted); - return true; - }); + delete this.content[key]; + await this.save(); + return true; }); } clear(): Promise { - return this.lock.runExclusive(() => { - return this.telemetry.countExecutionAsync('clear', () => { - if (existsSync(this.file)) { - unlinkSync(this.file); - } - this.content = undefined; - return Promise.resolve(); - }); + return this.withLock('clear', async () => { + this.content = {}; + await this.save(); }); } keys(limit: number): ReadonlyArray { - return this.telemetry.countExecution('keys', () => { - if (!this.content) { - this.get('ANY_KEY'); - } - - return Object.keys(this.content ?? {}).slice(0, limit); - }); + return this.telemetry.countExecution('keys', () => Object.keys(this.content).slice(0, limit)); } stats(): FileStoreStats { - if (!this.content) { - this.get('ANY_KEY'); - } - return { - entries: Object.keys(this.content ?? {}).length, + entries: Object.keys(this.content).length, totalSize: existsSync(this.file) ? statSync(this.file).size : 0, }; } + + private async withLock(operation: string, fn: () => Promise): Promise { + return await this.telemetry.measureAsync(operation, async () => { + const release = await lock(this.file, LOCK_OPTIONS); + try { + this.content = this.readFile(); + return await fn(); + } finally { + await release(); + } + }); + } + + private readFile(): Record { + return JSON.parse(decrypt(this.KEY, readFileSync(this.file))) as Record; + } + + private saveSync() { + writeFileSync(this.file, encrypt(this.KEY, JSON.stringify(this.content))); + } + + private async save() { + await writeFile(this.file, encrypt(this.KEY, JSON.stringify(this.content))); + } } export type FileStoreStats = { diff --git a/src/utils/ErrorStackInfo.ts b/src/utils/ErrorStackInfo.ts index a2c0dff4..eee07dc6 100644 --- a/src/utils/ErrorStackInfo.ts +++ b/src/utils/ErrorStackInfo.ts @@ -1,4 +1,3 @@ -import StackUtils from 'stack-utils'; import { LoggerFactory } from '../telemetry/LoggerFactory'; let errorStackInfo: string[] | undefined; @@ -26,12 +25,3 @@ export function determineSensitiveInfo(): string[] { return errorStackInfo; } - -export function getErrorStack() { - try { - return new StackUtils({ cwd: process.cwd() }); - } catch (err) { - LoggerFactory.getLogger('SensitiveInfo').warn(err, 'Cannot get process.cwd()'); - return new StackUtils(); - } -} diff --git a/src/utils/Errors.ts b/src/utils/Errors.ts index d25e9e54..46cb5dec 100644 --- a/src/utils/Errors.ts +++ b/src/utils/Errors.ts @@ -1,5 +1,5 @@ import { ErrorCodes, ResponseError } from 'vscode-languageserver'; -import { determineSensitiveInfo, getErrorStack } from './ErrorStackInfo'; +import { determineSensitiveInfo } from './ErrorStackInfo'; import { toString } from './String'; export function extractErrorMessage(error: unknown) { @@ -21,8 +21,6 @@ export function handleLspError(error: unknown, contextMessage: string): never { throw new ResponseError(ErrorCodes.InternalError, `${contextMessage}: ${extractErrorMessage(error)}`); } -const Stack = getErrorStack(); - /** * Best effort extraction of location of exception based on stack trace */ @@ -40,7 +38,7 @@ export function extractLocationFromStack(stack?: string): Record } } - return newLine; + return newLine.replaceAll('\\\\', '/').replaceAll('\\', '/'); }); if (lines.length === 0) { @@ -49,16 +47,6 @@ export function extractLocationFromStack(stack?: string): Record const result: Record = {}; result['error.message'] = lines[0]; - - for (const [idx, line] of lines.slice(1, 6).entries()) { - const parsed = Stack.parseLine(line); - - if (!parsed) { - continue; - } - - result[`error.stack${idx}`] = `${parsed.function} ${parsed.file} ${parsed.line}:${parsed.column}`; - } - + result['error.stack'] = lines.slice(1).join('\n'); return result; } diff --git a/tst/unit/datastore/FileStore.test.ts b/tst/unit/datastore/FileStore.test.ts index 6ab79d9e..df9c5830 100644 --- a/tst/unit/datastore/FileStore.test.ts +++ b/tst/unit/datastore/FileStore.test.ts @@ -1,8 +1,10 @@ -import { rmSync } from 'fs'; +import { rmSync, mkdirSync, writeFileSync } from 'fs'; import { join } from 'path'; import { v4 } from 'uuid'; import { describe, it, expect, beforeEach, afterEach } from 'vitest'; import { DataStore, StoreName } from '../../../src/datastore/DataStore'; +import { EncryptedFileStore } from '../../../src/datastore/file/EncryptedFileStore'; +import { encryptionKey } from '../../../src/datastore/file/Encryption'; import { FileStoreFactory } from '../../../src/datastore/FileStore'; describe('FileStore', () => { @@ -137,21 +139,17 @@ describe('FileStore', () => { const key = 'shared-key'; const schemaValue = 'schema-value'; const astValue = 'ast-value'; - const settingsValue = 'settings-value'; const schemaStore = fileFactory.get(StoreName.public_schemas); const astStore = fileFactory.get(StoreName.sam_schemas); - const settingsStore = fileFactory.get(StoreName.private_schemas); await schemaStore.put(key, schemaValue); await astStore.put(key, astValue); - await settingsStore.put(key, settingsValue); await schemaStore.clear(); expect(schemaStore.get(key)).toBeUndefined(); expect(astStore.get(key)).toBe(astValue); - expect(settingsStore.get(key)).toBe(settingsValue); }); it('should allow putting new data after clearing', async () => { @@ -204,6 +202,25 @@ describe('FileStore', () => { }); describe('persistence', () => { + it('should preserve existing data when put is called on fresh instance', async () => { + // Test EncryptedFileStore directly to avoid stats() being called + const encTestDir = join(testDir, 'enc-test'); + mkdirSync(encTestDir, { recursive: true }); + const key = encryptionKey(2); + + // Session 1: write key1 + const store1 = new EncryptedFileStore(key, 'test', encTestDir); + await store1.put('key1', 'value1'); + + // Session 2: fresh instance, put key2 WITHOUT reading first + const store2 = new EncryptedFileStore(key, 'test', encTestDir); + await store2.put('key2', 'value2'); + + // Verify both keys exist - key1 should NOT be lost + expect(store2.get('key1')).toBe('value1'); + expect(store2.get('key2')).toBe('value2'); + }); + it('should persist data across store instances', async () => { const key = 'persist-key'; const value = { data: 'persist-value' }; @@ -235,37 +252,6 @@ describe('FileStore', () => { }); }); - describe('concurrent operations', () => { - it('should handle concurrent puts', async () => { - const promises = Array.from({ length: 10 }, (_, i) => fileStore.put(`key${i}`, `value${i}`)); - - const results = await Promise.all(promises); - expect(results.every((r) => r === true)).toBe(true); - - const keys = fileStore.keys(20); - expect(keys).toHaveLength(10); - }); - - it('should handle concurrent mixed operations', async () => { - await fileStore.put('key1', 'value1'); - await fileStore.put('key2', 'value2'); - - const operations = [ - fileStore.put('key3', 'value3'), - fileStore.get('key1'), - fileStore.remove('key2'), - fileStore.put('key4', 'value4'), - ]; - - await Promise.all(operations); - - expect(fileStore.get('key1')).toBe('value1'); - expect(fileStore.get('key2')).toBeUndefined(); - expect(fileStore.get('key3')).toBe('value3'); - expect(fileStore.get('key4')).toBe('value4'); - }); - }); - describe('edge cases', () => { it('should handle empty string as key', async () => { await fileStore.put('', 'empty-key-value'); @@ -294,4 +280,30 @@ describe('FileStore', () => { await expect(fileStore.clear()).resolves.not.toThrow(); }); }); + + describe('recovery', () => { + it('should recover from corrupted file and allow new writes', async () => { + const encTestDir = join(testDir, 'recovery-test'); + mkdirSync(encTestDir, { recursive: true }); + const key = encryptionKey(2); + + // Write corrupted data to the file + const corruptedFile = join(encTestDir, 'test.enc'); + writeFileSync(corruptedFile, 'corrupted-not-encrypted-data'); + + // Should not throw, should recover + const store = new EncryptedFileStore(key, 'test', encTestDir); + + // Should start with empty content after recovery + expect(store.get('anyKey')).toBeUndefined(); + + // Should be able to write new data + await store.put('newKey', 'newValue'); + expect(store.get('newKey')).toBe('newValue'); + + // Verify data persists after reload + const store2 = new EncryptedFileStore(key, 'test', encTestDir); + expect(store2.get('newKey')).toBe('newValue'); + }); + }); }); diff --git a/tst/unit/datastore/FilestoreWorker.ts b/tst/unit/datastore/FilestoreWorker.ts new file mode 100644 index 00000000..42fa9234 --- /dev/null +++ b/tst/unit/datastore/FilestoreWorker.ts @@ -0,0 +1,29 @@ +import { join } from 'path'; +import { v4 } from 'uuid'; +import { EncryptedFileStore } from '../../../src/datastore/file/EncryptedFileStore'; +import { encryptionKey } from '../../../src/datastore/file/Encryption'; +import { LoggerFactory } from '../../../src/telemetry/LoggerFactory'; +import { TelemetryService } from '../../../src/telemetry/TelemetryService'; + +// Worker script for multiprocess FileStore testing +LoggerFactory.initialize('silent', join(process.cwd(), 'node_modules', '.cache', 'filedb-worker', v4())); +TelemetryService.initialize(undefined, { telemetryEnabled: false }); + +const [encTestDir, workerId, numWrites] = process.argv.slice(2); +const key = encryptionKey(2); + +async function main() { + const store = new EncryptedFileStore(key, 'test', encTestDir); + + for (let i = 0; i < Number.parseInt(numWrites); i++) { + await store.put(`worker${workerId}_key${i}`, `worker${workerId}_value${i}`); + } +} + +/* eslint-disable unicorn/no-process-exit, unicorn/prefer-top-level-await */ +main() + .then(() => process.exit(0)) + .catch((err) => { + console.error(err); // eslint-disable-line no-console + process.exit(1); + }); diff --git a/tst/unit/utils/Errors.test.ts b/tst/unit/utils/Errors.test.ts index 5f56ba98..950d88e9 100644 --- a/tst/unit/utils/Errors.test.ts +++ b/tst/unit/utils/Errors.test.ts @@ -14,7 +14,7 @@ describe('extractLocationFromStack', () => { const stack = 'Error: test\n at Object. (/path/to/file.ts:01234:56789)'; expect(extractLocationFromStack(stack)).toEqual({ 'error.message': 'Error: test', - 'error.stack0': 'Object. /path/to/file.ts 1234:56789', + 'error.stack': 'at Object. (/path/to/file.ts:01234:56789)', }); }); @@ -22,7 +22,7 @@ describe('extractLocationFromStack', () => { const stack = 'Error: test\n at /path/to/file.js:01234:56789'; expect(extractLocationFromStack(stack)).toEqual({ 'error.message': 'Error: test', - 'error.stack0': 'undefined /path/to/file.js 1234:56789', + 'error.stack': 'at /path/to/file.js:01234:56789', }); }); @@ -30,13 +30,16 @@ describe('extractLocationFromStack', () => { const stack = 'Error: test\n at Object. (C:\\path\\to\\file.ts:01234:56789)'; expect(extractLocationFromStack(stack)).toEqual({ 'error.message': 'Error: test', - 'error.stack0': 'Object. C:/path/to/file.ts 1234:56789', + 'error.stack': `at Object. (C:/path/to/file.ts:01234:56789)`, }); }); test('returns just message when no match found', () => { const stack = 'Error: test\n at something without location'; - expect(extractLocationFromStack(stack)).toEqual({ 'error.message': 'Error: test' }); + expect(extractLocationFromStack(stack)).toEqual({ + 'error.message': 'Error: test', + 'error.stack': 'at something without location', + }); }); test('extract error from exception', () => { @@ -48,8 +51,9 @@ Error: Request cancelled for key: SendDocuments `; expect(extractLocationFromStack(stack)).toEqual({ 'error.message': 'Error: Request cancelled for key: SendDocuments', - 'error.stack0': 'Delayer.cancel webpack://aws/cloudformation-languageserver/[*]/[*]/Delayer.ts?f28b 145:28', - 'error.stack1': 'eval webpack://aws/cloudformation-languageserver/[*]/[*]/Delayer.ts?f28b 36:18', + 'error.stack': `at Delayer.cancel (webpack://aws/cloudformation-languageserver/[*]/[*]/Delayer.ts?f28b:145:28) +at eval (webpack://aws/cloudformation-languageserver/[*]/[*]/Delayer.ts?f28b:36:18) +at new Promise ()`, }); }); @@ -67,12 +71,12 @@ Error: ENOENT: no such file or directory, scandir 'some-dir/cloudformation-langu ).toEqual({ 'error.message': "Error: ENOENT: no such file or directory, scandir 'some-dir/cloudformation-languageserver/bundle/development/.aws-cfn-storage/lmdb'", - 'error.stack0': 'readdirSync node:fs 1584:26', - 'error.stack1': 'undefined node:electron/js2c/node_init 2:16044', - 'error.stack2': - 'LMDBStoreFactory.cleanupOldVersions webpack://aws/cloudformation-languageserver/[*]/datastore/LMDB.ts?d928 98:36', - 'error.stack3': 'Timeout.eval webpack://aws/cloudformation-languageserver/[*]/datastore/LMDB.ts?d928 58:22', - 'error.stack4': 'listOnTimeout node:internal/timers 588:17', + 'error.stack': `at readdirSync (node:fs:1584:26) +at node:electron/js2c/node_init:2:16044 +at LMDBStoreFactory.cleanupOldVersions (webpack://aws/cloudformation-languageserver/[*]/datastore/LMDB.ts?d928:98:36) +at Timeout.eval (webpack://aws/cloudformation-languageserver/[*]/datastore/LMDB.ts?d928:58:22) +at listOnTimeout (node:internal/timers:588:17) +at process.processTimers (node:internal/timers:523:7)`, }); }); @@ -87,11 +91,9 @@ Error: PeriodicExportingMetricReader: metrics export failed (error Error: socket ).toEqual({ 'error.message': 'Error: PeriodicExportingMetricReader: metrics export failed (error Error: socket hang up)', - 'error.stack0': - 'PeriodicExportingMetricReader._doRun cloudformation-languageserver/1.0.0/cloudformation-languageserver-1.0.0-darwin-x64-node22/node_modules/@opentelemetry/sdk-metrics/build/[*]/export/PeriodicExportingMetricReader.js 88:19', - 'error.stack1': 'process.processTicksAndRejections node:internal/process/task_queues 105:5', - 'error.stack2': - 'async PeriodicExportingMetricReader._runOnce cloudformation-languageserver/1.0.0/cloudformation-languageserver-1.0.0-darwin-x64-node22/node_modules/@opentelemetry/sdk-metrics/build/[*]/export/PeriodicExportingMetricReader.js 57:13', + 'error.stack': `at PeriodicExportingMetricReader._doRun (cloudformation-languageserver/1.0.0/cloudformation-languageserver-1.0.0-darwin-x64-node22/node_modules/@opentelemetry/sdk-metrics/build/[*]/export/PeriodicExportingMetricReader.js:88:19) +at process.processTicksAndRejections (node:internal/process/task_queues:105:5) +at async PeriodicExportingMetricReader._runOnce (cloudformation-languageserver/1.0.0/cloudformation-languageserver-1.0.0-darwin-x64-node22/node_modules/@opentelemetry/sdk-metrics/build/[*]/export/PeriodicExportingMetricReader.js:57:13)`, }); }); @@ -101,7 +103,7 @@ Error: PeriodicExportingMetricReader: metrics export failed (error Error: socket expect(extractLocationFromStack(stack)).toEqual({ 'error.message': 'Error: test', - 'error.stack0': 'Object. C:/testuser/cloudformation-languageserver//[*]/file.ts 10:5', + 'error.stack': 'at Object. (C:/testuser/cloudformation-languageserver/[*]/file.ts:10:5)', }); }); @@ -111,7 +113,7 @@ Error: PeriodicExportingMetricReader: metrics export failed (error Error: socket expect(extractLocationFromStack(stack)).toEqual({ 'error.message': 'Error: test', - 'error.stack0': 'func C:/cloudformation-languageserver/[*]/file.ts 10:5', + 'error.stack': 'at func (C:/cloudformation-languageserver/[*]/file.ts:10:5)', }); }); @@ -120,6 +122,7 @@ Error: PeriodicExportingMetricReader: metrics export failed (error Error: socket expect(extractLocationFromStack(stack)).toEqual({ 'error.message': 'Error: test', + 'error.stack': 'at ', }); }); @@ -128,8 +131,9 @@ Error: PeriodicExportingMetricReader: metrics export failed (error Error: socket expect(extractLocationFromStack(stack)).toEqual({ 'error.message': 'Error: test', - 'error.stack0': 'func1 file.ts 1:1', - 'error.stack2': 'func2 file.ts 2:2', + 'error.stack': `at func1 (file.ts:1:1) +at +at func2 (file.ts:2:2)`, }); }); @@ -140,8 +144,8 @@ Error: PeriodicExportingMetricReader: metrics export failed (error Error: socket expect(extractLocationFromStack(stack)).toEqual({ 'error.message': 'Error: test', - 'error.stack0': 'Module._compile node:internal/modules/cjs/loader 1159:14', - 'error.stack1': 'Object.Module._extensions..js node:internal/modules/cjs/loader 1213:10', + 'error.stack': `at Module._compile (node:internal/modules/cjs/loader:1159:14) +at Object.Module._extensions..js (node:internal/modules/cjs/loader:1213:10)`, }); }); }); From 4c6ac0144a4c219a79da9a826cd5f4249bb2e873 Mon Sep 17 00:00:00 2001 From: Satyaki Ghosh Date: Thu, 11 Dec 2025 16:09:14 -0500 Subject: [PATCH 2/2] Add multiple worker tests --- src/datastore/file/EncryptedFileStore.ts | 7 +-- tst/unit/datastore/FileStore.test.ts | 68 +++++++++++++++++++++++- 2 files changed, 71 insertions(+), 4 deletions(-) diff --git a/src/datastore/file/EncryptedFileStore.ts b/src/datastore/file/EncryptedFileStore.ts index 116b5573..5cab1ddf 100644 --- a/src/datastore/file/EncryptedFileStore.ts +++ b/src/datastore/file/EncryptedFileStore.ts @@ -2,14 +2,15 @@ import { existsSync, readFileSync, statSync, writeFileSync } from 'fs'; // eslin import { writeFile } from 'fs/promises'; import { join } from 'path'; import { Logger } from 'pino'; -import { lock, lockSync } from 'proper-lockfile'; +import { lock, LockOptions, lockSync } from 'proper-lockfile'; import { LoggerFactory } from '../../telemetry/LoggerFactory'; import { ScopedTelemetry } from '../../telemetry/ScopedTelemetry'; import { TelemetryService } from '../../telemetry/TelemetryService'; import { DataStore } from '../DataStore'; import { decrypt, encrypt } from './Encryption'; -const LOCK_OPTIONS = { stale: 10_000 }; // 10 seconds +const LOCK_OPTIONS_SYNC: LockOptions = { stale: 10_000 }; +const LOCK_OPTIONS: LockOptions = { ...LOCK_OPTIONS_SYNC, retries: { retries: 15, minTimeout: 10, maxTimeout: 500 } }; export class EncryptedFileStore implements DataStore { private readonly log: Logger; @@ -33,7 +34,7 @@ export class EncryptedFileStore implements DataStore { this.log.error(error, 'Failed to decrypt file store, recreating store'); this.telemetry.count('filestore.recreate', 1); - const release = lockSync(this.file, LOCK_OPTIONS); + const release = lockSync(this.file, LOCK_OPTIONS_SYNC); try { this.saveSync(); } finally { diff --git a/tst/unit/datastore/FileStore.test.ts b/tst/unit/datastore/FileStore.test.ts index df9c5830..f32283af 100644 --- a/tst/unit/datastore/FileStore.test.ts +++ b/tst/unit/datastore/FileStore.test.ts @@ -1,5 +1,7 @@ +import { execFile } from 'child_process'; import { rmSync, mkdirSync, writeFileSync } from 'fs'; import { join } from 'path'; +import { promisify } from 'util'; import { v4 } from 'uuid'; import { describe, it, expect, beforeEach, afterEach } from 'vitest'; import { DataStore, StoreName } from '../../../src/datastore/DataStore'; @@ -203,7 +205,6 @@ describe('FileStore', () => { describe('persistence', () => { it('should preserve existing data when put is called on fresh instance', async () => { - // Test EncryptedFileStore directly to avoid stats() being called const encTestDir = join(testDir, 'enc-test'); mkdirSync(encTestDir, { recursive: true }); const key = encryptionKey(2); @@ -252,6 +253,37 @@ describe('FileStore', () => { }); }); + describe('concurrent operations', () => { + it('should handle concurrent puts', async () => { + const promises = Array.from({ length: 10 }, (_, i) => fileStore.put(`key${i}`, `value${i}`)); + + const results = await Promise.all(promises); + expect(results.every((r) => r === true)).toBe(true); + + const keys = fileStore.keys(20); + expect(keys).toHaveLength(10); + }); + + it('should handle concurrent mixed operations', async () => { + await fileStore.put('key1', 'value1'); + await fileStore.put('key2', 'value2'); + + const operations = [ + fileStore.put('key3', 'value3'), + fileStore.get('key1'), + fileStore.remove('key2'), + fileStore.put('key4', 'value4'), + ]; + + await Promise.all(operations); + + expect(fileStore.get('key1')).toBe('value1'); + expect(fileStore.get('key2')).toBeUndefined(); + expect(fileStore.get('key3')).toBe('value3'); + expect(fileStore.get('key4')).toBe('value4'); + }); + }); + describe('edge cases', () => { it('should handle empty string as key', async () => { await fileStore.put('', 'empty-key-value'); @@ -306,4 +338,38 @@ describe('FileStore', () => { expect(store2.get('newKey')).toBe('newValue'); }); }); + + describe('multiprocess', () => { + it('should handle concurrent writes from multiple processes', async () => { + const encTestDir = join(testDir, 'multiprocess-test'); + mkdirSync(encTestDir, { recursive: true }); + + const workerPath = join(process.cwd(), 'tst', 'unit', 'datastore', 'FilestoreWorker.ts'); + const numWorkers = 3; + const numWrites = 5; + const execFileAsync = promisify(execFile); + + const workers = Array.from({ length: numWorkers }, (_, i) => + execFileAsync(process.execPath, [ + '--import', + 'tsx', + workerPath, + encTestDir, + String(i), + String(numWrites), + ]), + ); + + await Promise.all(workers); + + const key = encryptionKey(2); + const store = new EncryptedFileStore(key, 'test', encTestDir); + + for (let w = 0; w < numWorkers; w++) { + for (let k = 0; k < numWrites; k++) { + expect(store.get(`worker${w}_key${k}`)).toBe(`worker${w}_value${k}`); + } + } + }); + }); });