|
1 | 1 | import { MongoLogManager, mongoLogId } from '.';
|
2 | 2 | import { ObjectId } from 'bson';
|
3 | 3 | import { once } from 'events';
|
4 |
| -import type { Stats } from 'fs'; |
| 4 | +import type { Stats, Dir } from 'fs'; |
5 | 5 | import { promises as fs } from 'fs';
|
6 | 6 | import path from 'path';
|
7 | 7 | import os from 'os';
|
@@ -178,7 +178,7 @@ describe('MongoLogManager', function () {
|
178 | 178 | expect(leftoverFiles).deep.equals([faultyFile, ...validFiles.slice(3)]);
|
179 | 179 | });
|
180 | 180 |
|
181 |
| - it('cleans up least recent log files when requested with a storage limit', async function () { |
| 181 | + it('cleans up least recent log files when over a storage limit', async function () { |
182 | 182 | const manager = new MongoLogManager({
|
183 | 183 | directory,
|
184 | 184 | retentionDays,
|
@@ -207,6 +207,172 @@ describe('MongoLogManager', function () {
|
207 | 207 | expect(await getFilesState(paths)).to.equal('0000111111');
|
208 | 208 | });
|
209 | 209 |
|
| 210 | + describe('with a random file order', function () { |
| 211 | + let paths: string[] = []; |
| 212 | + const times = [92, 90, 1, 2, 3, 91]; |
| 213 | + |
| 214 | + beforeEach(async function () { |
| 215 | + const fileNames: string[] = []; |
| 216 | + paths = []; |
| 217 | + const offset = Math.floor(Date.now() / 1000); |
| 218 | + |
| 219 | + for (const time of times) { |
| 220 | + const fileName = |
| 221 | + ObjectId.createFromTime(offset - time).toHexString() + '_log'; |
| 222 | + const fullPath = path.join(directory, fileName); |
| 223 | + await fs.writeFile(fullPath, '0'.repeat(1024)); |
| 224 | + fileNames.push(fileName); |
| 225 | + paths.push(fullPath); |
| 226 | + } |
| 227 | + |
| 228 | + sinon.replace(fs, 'opendir', async () => |
| 229 | + Promise.resolve({ |
| 230 | + [Symbol.asyncIterator]: function* () { |
| 231 | + for (const fileName of fileNames) { |
| 232 | + yield { |
| 233 | + name: fileName, |
| 234 | + isFile: () => true, |
| 235 | + }; |
| 236 | + } |
| 237 | + }, |
| 238 | + } as unknown as Dir) |
| 239 | + ); |
| 240 | + }); |
| 241 | + |
| 242 | + it('cleans up in the expected order with maxLogFileCount', async function () { |
| 243 | + const manager = new MongoLogManager({ |
| 244 | + directory, |
| 245 | + retentionDays, |
| 246 | + maxLogFileCount: 3, |
| 247 | + onwarn, |
| 248 | + onerror, |
| 249 | + }); |
| 250 | + |
| 251 | + expect(await getFilesState(paths)).to.equal('111111'); |
| 252 | + |
| 253 | + await manager.cleanupOldLogFiles(); |
| 254 | + |
| 255 | + expect(await getFilesState(paths)).to.equal('001110'); |
| 256 | + }); |
| 257 | + |
| 258 | + it('cleans up in the expected order with retentionGB', async function () { |
| 259 | + const manager = new MongoLogManager({ |
| 260 | + directory, |
| 261 | + retentionDays, |
| 262 | + retentionGB: 3 / 1024 / 1024, |
| 263 | + onwarn, |
| 264 | + onerror, |
| 265 | + }); |
| 266 | + |
| 267 | + expect(await getFilesState(paths)).to.equal('111111'); |
| 268 | + |
| 269 | + await manager.cleanupOldLogFiles(); |
| 270 | + |
| 271 | + expect(await getFilesState(paths)).to.equal('001110'); |
| 272 | + }); |
| 273 | + }); |
| 274 | + |
| 275 | + describe('with multiple log retention settings', function () { |
| 276 | + it('with retention days, file count, and max size maintains all conditions', async function () { |
| 277 | + const manager = new MongoLogManager({ |
| 278 | + directory, |
| 279 | + retentionDays: 1, |
| 280 | + maxLogFileCount: 3, |
| 281 | + retentionGB: 2 / 1024 / 1024, |
| 282 | + onwarn, |
| 283 | + onerror, |
| 284 | + }); |
| 285 | + |
| 286 | + const paths: string[] = []; |
| 287 | + |
| 288 | + // Create 4 files which are all older than 1 day and 4 which are from today. |
| 289 | + for (let i = 0; i < 4; i++) { |
| 290 | + const today = Math.floor(Date.now() / 1000); |
| 291 | + const yesterday = today - 25 * 60 * 60; |
| 292 | + const todayFile = path.join( |
| 293 | + directory, |
| 294 | + ObjectId.createFromTime(today - i).toHexString() + '_log' |
| 295 | + ); |
| 296 | + await fs.writeFile(todayFile, '0'.repeat(1024)); |
| 297 | + |
| 298 | + const yesterdayFile = path.join( |
| 299 | + directory, |
| 300 | + ObjectId.createFromTime(yesterday - i).toHexString() + '_log' |
| 301 | + ); |
| 302 | + await fs.writeFile(yesterdayFile, '0'.repeat(1024)); |
| 303 | + |
| 304 | + paths.unshift(todayFile); |
| 305 | + paths.unshift(yesterdayFile); |
| 306 | + } |
| 307 | + |
| 308 | + expect(await getFilesState(paths)).to.equal('11111111'); |
| 309 | + |
| 310 | + await manager.cleanupOldLogFiles(); |
| 311 | + |
| 312 | + // All yesterdays files, 2 of today's files should be deleted. |
| 313 | + // (because of file count and file size) |
| 314 | + expect(await getFilesState(paths)).to.equal('00000101'); |
| 315 | + }); |
| 316 | + |
| 317 | + it('with low GB but high file count maintains both conditions', async function () { |
| 318 | + const manager = new MongoLogManager({ |
| 319 | + directory, |
| 320 | + retentionDays, |
| 321 | + maxLogFileCount: 3, |
| 322 | + // 2 KB, so 2 files |
| 323 | + retentionGB: 2 / 1024 / 1024, |
| 324 | + onwarn, |
| 325 | + onerror, |
| 326 | + }); |
| 327 | + |
| 328 | + const paths: string[] = []; |
| 329 | + const offset = Math.floor(Date.now() / 1000); |
| 330 | + |
| 331 | + // Create 10 files of 1 KB each. |
| 332 | + for (let i = 0; i < 10; i++) { |
| 333 | + const filename = path.join( |
| 334 | + directory, |
| 335 | + ObjectId.createFromTime(offset - i).toHexString() + '_log' |
| 336 | + ); |
| 337 | + await fs.writeFile(filename, '0'.repeat(1024)); |
| 338 | + paths.unshift(filename); |
| 339 | + } |
| 340 | + |
| 341 | + expect(await getFilesState(paths)).to.equal('1111111111'); |
| 342 | + await manager.cleanupOldLogFiles(); |
| 343 | + expect(await getFilesState(paths)).to.equal('0000000011'); |
| 344 | + }); |
| 345 | + |
| 346 | + it('with high GB but low file count maintains both conditions', async function () { |
| 347 | + const manager = new MongoLogManager({ |
| 348 | + directory, |
| 349 | + retentionDays, |
| 350 | + maxLogFileCount: 2, |
| 351 | + // 3 KB, so 3 files |
| 352 | + retentionGB: 3 / 1024 / 1024, |
| 353 | + onwarn, |
| 354 | + onerror, |
| 355 | + }); |
| 356 | + |
| 357 | + const paths: string[] = []; |
| 358 | + const offset = Math.floor(Date.now() / 1000); |
| 359 | + |
| 360 | + // Create 10 files of 1 KB each. |
| 361 | + for (let i = 0; i < 10; i++) { |
| 362 | + const filename = path.join( |
| 363 | + directory, |
| 364 | + ObjectId.createFromTime(offset - i).toHexString() + '_log' |
| 365 | + ); |
| 366 | + await fs.writeFile(filename, '0'.repeat(1024)); |
| 367 | + paths.unshift(filename); |
| 368 | + } |
| 369 | + |
| 370 | + expect(await getFilesState(paths)).to.equal('1111111111'); |
| 371 | + await manager.cleanupOldLogFiles(); |
| 372 | + expect(await getFilesState(paths)).to.equal('0000000011'); |
| 373 | + }); |
| 374 | + }); |
| 375 | + |
210 | 376 | it('cleaning up old log files is a no-op by default', async function () {
|
211 | 377 | const manager = new MongoLogManager({
|
212 | 378 | directory: path.join('directory', 'nonexistent'),
|
|
0 commit comments