|
| 1 | +//FIXME: after refactor, move to the correct path |
| 2 | +import bytes from 'bytes'; |
| 3 | +import fs from 'node:fs/promises'; |
| 4 | +import path from 'node:path'; |
| 5 | +import { createHash } from 'node:crypto'; |
| 6 | +import { txEnv } from './globalData'; |
| 7 | + |
| 8 | +//Hash test |
| 9 | +const hashFile = async (filePath: string) => { |
| 10 | + const rawFile = await fs.readFile(filePath, 'utf8') |
| 11 | + const normalized = rawFile.normalize('NFKC') |
| 12 | + .replace(/\r\n/g, '\n') |
| 13 | + .replace(/^\uFEFF/, ''); |
| 14 | + return createHash('sha1').update(normalized).digest('hex'); |
| 15 | +} |
| 16 | + |
| 17 | +// Limits |
| 18 | +const MAX_FILES = 300; |
| 19 | +const MAX_TOTAL_SIZE = bytes('50MB'); |
| 20 | +const MAX_FILE_SIZE = bytes('20MB'); |
| 21 | +const MAX_DEPTH = 10; |
| 22 | +const MAX_EXECUTION_TIME = 30 * 1000; |
| 23 | +const IGNORED_FOLDERS = [ |
| 24 | + 'db', |
| 25 | + 'cache', |
| 26 | + 'dist', |
| 27 | + '.reports', |
| 28 | + 'license_report', |
| 29 | + 'tmp_core_tsc', |
| 30 | + 'node_modules', |
| 31 | + 'txData', |
| 32 | +]; |
| 33 | + |
| 34 | + |
| 35 | +type ContentFileType = { |
| 36 | + path: string; |
| 37 | + size: number; |
| 38 | + hash: string; |
| 39 | +} |
| 40 | + |
| 41 | +export default async function checksumMonitorFolder() { |
| 42 | + const rootPath = txEnv.txAdminResourcePath; |
| 43 | + const allFiles: ContentFileType[] = []; |
| 44 | + let totalFiles = 0; |
| 45 | + let totalSize = 0; |
| 46 | + |
| 47 | + try { |
| 48 | + const tsStart = Date.now(); |
| 49 | + const scanDir = async (dir: string, depth: number = 0) => { |
| 50 | + if (depth > MAX_DEPTH) { |
| 51 | + throw new Error('MAX_DEPTH'); |
| 52 | + } |
| 53 | + |
| 54 | + let filesFound = 0; |
| 55 | + const entries = await fs.readdir(dir, { withFileTypes: true }); |
| 56 | + for (const entry of entries) { |
| 57 | + if (totalFiles >= MAX_FILES) { |
| 58 | + throw new Error('MAX_FILES'); |
| 59 | + } else if (totalSize >= MAX_TOTAL_SIZE) { |
| 60 | + throw new Error('MAX_TOTAL_SIZE'); |
| 61 | + } else if (Date.now() - tsStart > MAX_EXECUTION_TIME) { |
| 62 | + throw new Error('MAX_EXECUTION_TIME'); |
| 63 | + } |
| 64 | + |
| 65 | + const entryPath = path.join(dir, entry.name); |
| 66 | + let relativeEntryPath = path.relative(rootPath, entryPath); |
| 67 | + relativeEntryPath = './' + relativeEntryPath.split(path.sep).join(path.posix.sep); |
| 68 | + |
| 69 | + if (entry.isDirectory()) { |
| 70 | + if (IGNORED_FOLDERS.includes(entry.name)) { |
| 71 | + continue; |
| 72 | + } |
| 73 | + await scanDir(entryPath, depth + 1); |
| 74 | + } else if (entry.isFile()) { |
| 75 | + const stats = await fs.stat(entryPath); |
| 76 | + if (stats.size > MAX_FILE_SIZE) { |
| 77 | + throw new Error('MAX_SIZE'); |
| 78 | + } |
| 79 | + |
| 80 | + allFiles.push({ |
| 81 | + path: relativeEntryPath, |
| 82 | + size: stats.size, |
| 83 | + hash: await hashFile(entryPath), |
| 84 | + }); |
| 85 | + filesFound++; |
| 86 | + totalFiles++; |
| 87 | + totalSize += stats.size; |
| 88 | + } |
| 89 | + } |
| 90 | + return filesFound; |
| 91 | + }; |
| 92 | + await scanDir(rootPath); |
| 93 | + allFiles.sort((a, b) => a.path.localeCompare(b.path)); |
| 94 | + return { |
| 95 | + totalFiles, |
| 96 | + totalSize, |
| 97 | + allFiles, |
| 98 | + }; |
| 99 | + } catch (error) { |
| 100 | + //At least saving the progress |
| 101 | + return { |
| 102 | + error: (error as any).message, |
| 103 | + totalFiles, |
| 104 | + totalSize, |
| 105 | + allFiles, |
| 106 | + }; |
| 107 | + } |
| 108 | +} |
0 commit comments