diff --git a/packages/mongodb-log-writer/src/mongo-log-manager.spec.ts b/packages/mongodb-log-writer/src/mongo-log-manager.spec.ts index 39eea94a..75c5fdc9 100644 --- a/packages/mongodb-log-writer/src/mongo-log-manager.spec.ts +++ b/packages/mongodb-log-writer/src/mongo-log-manager.spec.ts @@ -22,13 +22,13 @@ describe('MongoLogManager', function () { onerror = sinon.stub(); directory = path.join( os.tmpdir(), - `log-writer-test-${Math.random()}-${Date.now()}` + `log-writer-test-${Math.random()}-${Date.now()}`, ); await fs.mkdir(directory, { recursive: true }); }); afterEach(async function () { - await fs.rmdir(directory, { recursive: true }); + await fs.rm(directory, { recursive: true }); sinon.restore(); }); @@ -84,10 +84,10 @@ describe('MongoLogManager', function () { const writer = await manager.createLogWriter(); expect( - path.relative(directory, writer.logFilePath as string)[0] + path.relative(directory, writer.logFilePath as string)[0], ).to.not.equal('.'); expect((writer.logFilePath as string).includes(writer.logId)).to.equal( - true + true, ); writer.info('component', mongoLogId(12345), 'context', 'message', { @@ -147,15 +147,54 @@ describe('MongoLogManager', function () { } }); + it('can recursively clean up old files', async function () { + const childDirectory = path.join(directory, 'child', '1'); + await fs.mkdir(childDirectory, { recursive: true }); + // The child manager writes in a subdirectory of the log directory + // The expectation is that when the parent manager recursively cleans up the + // old log files, it will be able to delete the files of the child manager. + const childManager = new MongoLogManager({ + directory: childDirectory, + retentionDays: 60, + onwarn, + onerror, + }); + + const childWriter = await childManager.createLogWriter(); + childWriter.info('child', mongoLogId(12345), 'context', 'message'); + + childWriter.end(); + await once(childWriter, 'finish'); + await fs.stat(childWriter.logFilePath as string); + await new Promise((resolve) => setTimeout(resolve, 100)); + + const parentManager = new MongoLogManager({ + directory, + retentionDays: 0.000001, // 86.4 ms + onwarn, + onerror, + }); + + await parentManager.cleanupOldLogFiles({ recursive: true }); + + try { + await fs.stat(childWriter.logFilePath as string); + + expect.fail('missed exception'); + } catch (err: any) { + expect(err.code).to.equal('ENOENT'); + } + }); + const getFilesState = async (paths: string[]) => { return ( await Promise.all( paths.map((path) => fs.stat(path).then( () => 1, - () => 0 - ) - ) + () => 0, + ), + ), ) ).join(''); }; @@ -174,7 +213,7 @@ describe('MongoLogManager', function () { for (let i = 0; i < 10; i++) { const filename = path.join( directory, - ObjectId.createFromTime(offset - i).toHexString() + '_log' + ObjectId.createFromTime(offset - i).toHexString() + '_log', ); await fs.writeFile(filename, ''); paths.unshift(filename); @@ -198,7 +237,7 @@ describe('MongoLogManager', function () { const faultyFile = path.join( directory, - ObjectId.createFromTime(offset - 10).toHexString() + '_log' + ObjectId.createFromTime(offset - 10).toHexString() + '_log', ); await fs.writeFile(faultyFile, ''); @@ -209,7 +248,7 @@ describe('MongoLogManager', function () { for (let i = 5; i >= 0; i--) { const filename = path.join( directory, - ObjectId.createFromTime(offset - i).toHexString() + '_log' + ObjectId.createFromTime(offset - i).toHexString() + '_log', ); await fs.writeFile(filename, ''); validFiles.push(filename); @@ -254,7 +293,7 @@ describe('MongoLogManager', function () { for (let i = 1; i >= 0; i--) { const withoutPrefix = path.join( directory, - ObjectId.createFromTime(offset - i).toHexString() + '_log' + ObjectId.createFromTime(offset - i).toHexString() + '_log', ); await fs.writeFile(withoutPrefix, ''); paths.push(withoutPrefix); @@ -263,7 +302,7 @@ describe('MongoLogManager', function () { directory, 'different_' + ObjectId.createFromTime(offset - i).toHexString() + - '_log' + '_log', ); await fs.writeFile(withDifferentPrefix, ''); paths.push(withDifferentPrefix); @@ -273,7 +312,7 @@ describe('MongoLogManager', function () { for (let i = 9; i >= 0; i--) { const filename = path.join( directory, - `custom_${ObjectId.createFromTime(offset - i).toHexString()}_log` + `custom_${ObjectId.createFromTime(offset - i).toHexString()}_log`, ); await fs.writeFile(filename, ''); paths.push(filename); @@ -305,7 +344,7 @@ describe('MongoLogManager', function () { for (let i = 0; i < 10; i++) { const filename = path.join( directory, - ObjectId.createFromTime(offset - i).toHexString() + '_log' + ObjectId.createFromTime(offset - i).toHexString() + '_log', ); await fs.writeFile(filename, '0'.repeat(1024)); paths.unshift(filename); @@ -332,7 +371,7 @@ describe('MongoLogManager', function () { for (let i = 0; i < 10; i++) { const filename = path.join( directory, - ObjectId.createFromTime(offset - i).toHexString() + '_log' + ObjectId.createFromTime(offset - i).toHexString() + '_log', ); await fs.writeFile(filename, ''); } @@ -345,11 +384,12 @@ describe('MongoLogManager', function () { describe('with a random file order', function () { let paths: string[] = []; const times = [92, 90, 1, 2, 3, 91]; + let offset: number; beforeEach(async function () { const fileNames: string[] = []; paths = []; - const offset = Math.floor(Date.now() / 1000); + offset = Math.floor(Date.now() / 1000); for (const time of times) { const fileName = @@ -359,19 +399,6 @@ describe('MongoLogManager', function () { fileNames.push(fileName); paths.push(fullPath); } - - sinon.replace(fs, 'opendir', async () => - Promise.resolve({ - [Symbol.asyncIterator]: function* () { - for (const fileName of fileNames) { - yield { - name: fileName, - isFile: () => true, - }; - } - }, - } as unknown as Dir) - ); }); it('cleans up in the expected order with maxLogFileCount', async function () { @@ -405,6 +432,81 @@ describe('MongoLogManager', function () { expect(await getFilesState(paths)).to.equal('001110'); }); + + describe('with subdirectories', function () { + beforeEach(async function () { + // Add a recent child file + const childPath1 = path.join( + directory, + 'subdir1', + ObjectId.createFromTime(offset - 2).toHexString() + '_log', + ); + await fs.mkdir(path.join(directory, 'subdir1'), { recursive: true }); + await fs.writeFile(childPath1, '0'.repeat(1024)); + paths.push(childPath1); + + // Add an older child file + const childPath2 = path.join( + directory, + 'subdir2', + ObjectId.createFromTime(offset - 20).toHexString() + '_log', + ); + await fs.mkdir(path.join(directory, 'subdir2'), { recursive: true }); + await fs.writeFile(childPath2, '0'.repeat(1024)); + paths.push(childPath2); + }); + + it('cleans up in the expected order with maxLogFileCount', async function () { + const manager = new MongoLogManager({ + directory, + retentionDays, + maxLogFileCount: 3, + onwarn, + onerror, + }); + + expect(await getFilesState(paths)).to.equal('11111111'); + + await manager.cleanupOldLogFiles({ recursive: true }); + + expect(await getFilesState(paths)).to.equal('00110010'); + }); + + it('deletes empty directories after cleanup', async function () { + // Add an old file in subdir1 - it should be deleted, but subdir1 should remain + const oldSubdir1Child = path.join( + directory, + 'subdir1', + ObjectId.createFromTime(offset - 50).toHexString() + '_log', + ); + await fs.writeFile(oldSubdir1Child, '0'.repeat(1024)); + paths.push(oldSubdir1Child); + + const manager = new MongoLogManager({ + directory, + retentionDays, + maxLogFileCount: 3, + onwarn, + onerror, + }); + + expect(await getFilesState(paths)).to.equal('111111111'); + + await manager.cleanupOldLogFiles({ recursive: true }); + + expect(await getFilesState(paths)).to.equal('001100100'); + + // subdir1 should have been left alone because of the newer child + // subdir2 should have been deleted because it was empty + await fs.stat(path.join(directory, 'subdir1')); + try { + await fs.stat(path.join(directory, 'subdir2')); + expect.fail('subdir2 should have been deleted'); + } catch (err: any) { + expect(err.code).to.equal('ENOENT'); + } + }); + }); }); describe('with multiple log retention settings', function () { @@ -426,13 +528,13 @@ describe('MongoLogManager', function () { const yesterday = today - 25 * 60 * 60; const todayFile = path.join( directory, - ObjectId.createFromTime(today - i).toHexString() + '_log' + ObjectId.createFromTime(today - i).toHexString() + '_log', ); await fs.writeFile(todayFile, '0'.repeat(1024)); const yesterdayFile = path.join( directory, - ObjectId.createFromTime(yesterday - i).toHexString() + '_log' + ObjectId.createFromTime(yesterday - i).toHexString() + '_log', ); await fs.writeFile(yesterdayFile, '0'.repeat(1024)); @@ -467,7 +569,7 @@ describe('MongoLogManager', function () { for (let i = 0; i < 10; i++) { const filename = path.join( directory, - ObjectId.createFromTime(offset - i).toHexString() + '_log' + ObjectId.createFromTime(offset - i).toHexString() + '_log', ); await fs.writeFile(filename, '0'.repeat(1024)); paths.unshift(filename); @@ -496,7 +598,7 @@ describe('MongoLogManager', function () { for (let i = 0; i < 10; i++) { const filename = path.join( directory, - ObjectId.createFromTime(offset - i).toHexString() + '_log' + ObjectId.createFromTime(offset - i).toHexString() + '_log', ); await fs.writeFile(filename, '0'.repeat(1024)); paths.unshift(filename); @@ -528,7 +630,7 @@ describe('MongoLogManager', function () { }); const writer = await manager.createLogWriter(); - expect(onwarn).to.have.been.calledOnce; // eslint-disable-line + expect(onwarn).to.have.been.calledOnce; expect(writer.logFilePath).to.equal(null); writer.info('component', mongoLogId(12345), 'context', 'message', { @@ -538,7 +640,7 @@ describe('MongoLogManager', function () { await once(writer, 'finish'); }); - it('optionally allow gzip’ed log files', async function () { + it("optionally allow gzip'ed log files", async function () { const manager = new MongoLogManager({ directory, retentionDays, @@ -566,7 +668,7 @@ describe('MongoLogManager', function () { expect(log[0].t.$date).to.be.a('string'); }); - it('optionally can read truncated gzip’ed log files', async function () { + it("optionally can read truncated gzip'ed log files", async function () { const manager = new MongoLogManager({ directory, retentionDays, @@ -608,7 +710,7 @@ describe('MongoLogManager', function () { }; const opendirStub = sinon .stub(fs, 'opendir') - .resolves(fakeDirHandle as any); + .resolves(fakeDirHandle as unknown as Dir); retentionDays = 0.000001; // 86.4 ms const manager = new MongoLogManager({ diff --git a/packages/mongodb-log-writer/src/mongo-log-manager.ts b/packages/mongodb-log-writer/src/mongo-log-manager.ts index 41b4d374..189862a8 100644 --- a/packages/mongodb-log-writer/src/mongo-log-manager.ts +++ b/packages/mongodb-log-writer/src/mongo-log-manager.ts @@ -1,6 +1,7 @@ import path from 'path'; import { ObjectId } from 'bson'; import { once } from 'events'; +import type { Dir } from 'fs'; import { createWriteStream, promises as fs } from 'fs'; import { createGzip, constants as zlibConstants } from 'zlib'; import { Heap } from 'heap-js'; @@ -39,22 +40,38 @@ export class MongoLogManager { if (options.prefix) { if (!/^[a-z0-9_]+$/i.test(options.prefix)) { throw new Error( - 'Prefix must only contain letters, numbers, and underscores' + 'Prefix must only contain letters, numbers, and underscores', ); } } this._options = options; } - private async deleteFile(path: string): Promise { + private async deleteFile(filePath: string): Promise { try { - await fs.unlink(path); + await fs.unlink(filePath); // eslint-disable-next-line @typescript-eslint/no-explicit-any } catch (err: any) { if (err?.code !== 'ENOENT') { - this._options.onerror(err as Error, path); + this._options.onerror(err as Error, filePath); } } + + let basePath = path.dirname(filePath); + while (basePath !== this._options.directory) { + // Delete empty directories up to the base directory + try { + await fs.rmdir(path.dirname(filePath)); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } catch (err: any) { + // If the directory is not empty, we stop deleting. + if (err?.code === 'ENOTEMPTY' || err?.code === 'ENOENT') { + break; + } + } + + basePath = path.dirname(basePath); + } } private get prefix() { @@ -62,19 +79,24 @@ export class MongoLogManager { } /** Clean up log files older than `retentionDays`. */ - async cleanupOldLogFiles( + async cleanupOldLogFiles({ maxDurationMs = 5_000, - remainingRetries = 1 - ): Promise { + remainingRetries = 1, + recursive = false, + }: { + maxDurationMs?: number; + remainingRetries?: number; + recursive?: boolean; + } = {}): Promise { const deletionStartTimestamp = Date.now(); // Delete files older than N days const deletionCutoffTimestamp = deletionStartTimestamp - this._options.retentionDays * 86400 * 1000; const dir = this._options.directory; - let dirHandle; + let dirHandle: Dir; try { - dirHandle = await fs.opendir(dir); + dirHandle = await fs.opendir(dir, { recursive }); } catch { return; } @@ -101,18 +123,25 @@ export class MongoLogManager { // a large number of machines using a shared network-mounted $HOME directory // where lots and lots of log files end up and filesystem operations happen // with network latency. - if (Date.now() - deletionStartTimestamp > maxDurationMs) break; + if (Date.now() - deletionStartTimestamp > maxDurationMs) { + break; + } + + if (!dirent.isFile()) { + continue; + } - if (!dirent.isFile()) continue; const logRegExp = new RegExp( `^${this.prefix}(?[a-f0-9]{24})_log(\\.gz)?$`, - 'i' + 'i', ); const { id } = logRegExp.exec(dirent.name)?.groups ?? {}; - if (!id) continue; + if (!id) { + continue; + } const fileTimestamp = +new ObjectId(id).getTimestamp(); - const fullPath = path.join(dir, dirent.name); + const fullPath = path.join(dirent.parentPath ?? dir, dirent.name); // If the file is older than expected, delete it. If the file is recent, // add it to the list of seen files, and if that list is too large, remove @@ -143,22 +172,34 @@ export class MongoLogManager { leastRecentFileHeap.size() > this._options.maxLogFileCount ) { const toDelete = leastRecentFileHeap.pop(); - if (!toDelete) continue; + if (!toDelete) { + continue; + } + await this.deleteFile(toDelete.fullPath); usedStorageSize -= toDelete.fileSize ?? 0; } } - } catch (statErr: any) { + } catch (err: unknown) { // Multiple processes may attempt to clean up log files in parallel. // A situation can arise where one process tries to read a file // that another process has already unlinked (see MONGOSH-1914). // To handle such scenarios, we will catch lstat errors and retry cleaning up // to let different processes reach out to different log files. - if (statErr.code === 'ENOENT' && remainingRetries > 0) { - await this.cleanupOldLogFiles( - maxDurationMs - (Date.now() - deletionStartTimestamp), - remainingRetries - 1 - ); + if ( + typeof err === 'object' && + err !== null && + 'code' in err && + err.code === 'ENOENT' && + remainingRetries > 0 + ) { + await this.cleanupOldLogFiles({ + maxDurationMs: maxDurationMs - (Date.now() - deletionStartTimestamp), + remainingRetries: remainingRetries - 1, + recursive, + }); + } else { + this._options.onerror(err as Error, dir); } } @@ -184,7 +225,7 @@ export class MongoLogManager { const doGzip = !!this._options.gzip; const logFilePath = path.join( this._options.directory, - `${this.prefix}${logId}_log${doGzip ? '.gz' : ''}` + `${this.prefix}${logId}_log${doGzip ? '.gz' : ''}`, ); let originalTarget: Writable;