diff --git a/.husky/pre-commit b/.husky/pre-commit deleted file mode 100755 index af481ec..0000000 --- a/.husky/pre-commit +++ /dev/null @@ -1 +0,0 @@ -npm run validate:ids diff --git a/lib/__tests__/data-id-validator.test.ts b/lib/__tests__/data-id-validator.test.ts new file mode 100644 index 0000000..1b500ca --- /dev/null +++ b/lib/__tests__/data-id-validator.test.ts @@ -0,0 +1,16 @@ +import { scanDataIds } from '../data-id-validator' + +describe('data ID validation', () => { + test('ensures IDs across data files remain unique', () => { + const { duplicates } = scanDataIds() + + if (duplicates.length > 0) { + const summary = duplicates + .map(({ id, files }) => `- ${id}: ${files.join(', ')}`) + .join('\n') + + throw new Error(`Duplicate IDs detected:\n${summary}`) + } + }) +}) + diff --git a/lib/data-id-validator.ts b/lib/data-id-validator.ts new file mode 100644 index 0000000..4ee1b35 --- /dev/null +++ b/lib/data-id-validator.ts @@ -0,0 +1,112 @@ +import fs from 'fs' +import path from 'path' + +export type DuplicateIdRecord = { + id: string + files: string[] +} + +export type IdScanResult = { + totalIds: number + duplicates: DuplicateIdRecord[] +} + +export function scanDataIds(dataDir = path.join(process.cwd(), 'data')): IdScanResult { + const jsonFiles = collectJsonFiles(dataDir) + const idOrigins = new Map() + const duplicateSources = new Map>() + + for (const filePath of jsonFiles) { + const relativePath = path.relative(dataDir, filePath) + const content = fs.readFileSync(filePath, 'utf8') + let parsedContent: unknown + + try { + parsedContent = JSON.parse(content) + } catch (error) { + throw new Error(`Failed to parse ${relativePath}: ${(error as Error).message}`) + } + + collectIds(parsedContent, relativePath, idOrigins, duplicateSources) + } + + const duplicates: DuplicateIdRecord[] = Array.from(duplicateSources.entries()).map(([id, files]) => ({ + id, + files: Array.from(files), + })) + + duplicates.sort((a, b) => a.id.localeCompare(b.id)) + + return { + totalIds: idOrigins.size, + duplicates, + } +} + +function collectJsonFiles(targetDir: string): string[] { + const directoryEntries = fs.readdirSync(targetDir, { withFileTypes: true }) + const collected: string[] = [] + + for (const entry of directoryEntries) { + const entryPath = path.join(targetDir, entry.name) + + if (entry.isDirectory()) { + collected.push(...collectJsonFiles(entryPath)) + } else if (entry.isFile() && entry.name.endsWith('.json')) { + collected.push(entryPath) + } + } + + return collected +} + +function collectIds( + node: unknown, + filePath: string, + idOrigins: Map, + duplicateSources: Map> +) { + if (Array.isArray(node)) { + for (const value of node) { + collectIds(value, filePath, idOrigins, duplicateSources) + } + return + } + + if (node === null || typeof node !== 'object') { + return + } + + const record = node as Record + + if (typeof record.id === 'string' && record.id.trim().length > 0) { + registerId(record.id, filePath, idOrigins, duplicateSources) + } + + for (const value of Object.values(record)) { + collectIds(value, filePath, idOrigins, duplicateSources) + } +} + +function registerId( + id: string, + filePath: string, + idOrigins: Map, + duplicateSources: Map> +) { + const trimmedId = id.trim() + + if (trimmedId.length === 0) { + return + } + + if (!idOrigins.has(trimmedId)) { + idOrigins.set(trimmedId, filePath) + return + } + + const origin = idOrigins.get(trimmedId) as string + const sources = duplicateSources.get(trimmedId) ?? new Set([origin]) + sources.add(filePath) + duplicateSources.set(trimmedId, sources) +} diff --git a/package-lock.json b/package-lock.json index 2839897..a3a52fd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -33,7 +33,6 @@ "eslint-config-next": "15.5.3", "eslint-config-prettier": "^10.1.8", "eslint-plugin-prettier": "^5.5.4", - "husky": "^9.1.7", "jsdom": "^27.0.0", "prettier": "^3.6.2", "tailwindcss": "^4", @@ -5746,22 +5745,6 @@ "node": ">= 14" } }, - "node_modules/husky": { - "version": "9.1.7", - "resolved": "https://registry.npmjs.org/husky/-/husky-9.1.7.tgz", - "integrity": "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA==", - "dev": true, - "license": "MIT", - "bin": { - "husky": "bin.js" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/typicode" - } - }, "node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", diff --git a/package.json b/package.json index 8381459..9d11d79 100644 --- a/package.json +++ b/package.json @@ -10,8 +10,7 @@ "test": "vitest", "test:ui": "vitest --ui", "test:run": "vitest run", - "validate:ids": "tsx scripts/validate-question-ids.ts", - "prepare": "husky" + "validate:ids": "tsx scripts/validate-question-ids.ts" }, "dependencies": { "@radix-ui/react-slot": "^1.2.3", @@ -39,7 +38,6 @@ "eslint-config-next": "15.5.3", "eslint-config-prettier": "^10.1.8", "eslint-plugin-prettier": "^5.5.4", - "husky": "^9.1.7", "jsdom": "^27.0.0", "prettier": "^3.6.2", "tailwindcss": "^4", @@ -48,4 +46,4 @@ "typescript": "^5", "vitest": "^3.2.4" } -} \ No newline at end of file +} diff --git a/scripts/validate-question-ids.ts b/scripts/validate-question-ids.ts index 6cd00e8..04dc744 100644 --- a/scripts/validate-question-ids.ts +++ b/scripts/validate-question-ids.ts @@ -1,104 +1,25 @@ -import fs from 'fs'; -import path from 'path'; +import path from 'path' -interface Question { - id: string; - question: string; - answers: Array<{ - value: string; - label: string; - example?: string; - docs?: string; - }>; -} - -function validateQuestionIds() { - const dataDir = path.join(process.cwd(), 'data'); - const questionsDir = path.join(dataDir, 'questions'); - - const allIds = new Set(); - const duplicates: string[] = []; - const fileIdMap = new Map(); +import { scanDataIds } from '../lib/data-id-validator' - // Check main data files - const mainFiles = [ - 'general.json', - 'architecture.json', - 'performance.json', - 'security.json', - 'commits.json', - 'files.json' - ]; - - for (const file of mainFiles) { - const filePath = path.join(dataDir, file); - if (fs.existsSync(filePath)) { - checkFileForDuplicates(filePath, file, allIds, duplicates, fileIdMap); - } - } - - // Check framework-specific question files - if (fs.existsSync(questionsDir)) { - const questionFiles = fs.readdirSync(questionsDir) - .filter(file => file.endsWith('.json')); - - for (const file of questionFiles) { - const filePath = path.join(questionsDir, file); - checkFileForDuplicates(filePath, `questions/${file}`, allIds, duplicates, fileIdMap); - } - } +export function validateQuestionIds() { + const dataDir = path.join(process.cwd(), 'data') + const { duplicates, totalIds } = scanDataIds(dataDir) if (duplicates.length > 0) { - console.error('❌ Duplicate question IDs found:'); - duplicates.forEach(id => { - console.error(` - "${id}" in ${fileIdMap.get(id)}`); - }); - process.exit(1); - } - - console.log('✅ All question IDs are unique across all files'); - console.log(`📊 Total unique question IDs: ${allIds.size}`); -} + console.error('❌ Duplicate IDs found across data files:') -function checkFileForDuplicates( - filePath: string, - fileName: string, - allIds: Set, - duplicates: string[], - fileIdMap: Map -) { - try { - const content = fs.readFileSync(filePath, 'utf-8'); - const questions: Question[] = JSON.parse(content); - - if (!Array.isArray(questions)) { - console.warn(`⚠️ Skipping ${fileName}: not an array of questions`); - return; + for (const { id, files } of duplicates) { + console.error(` - "${id}" in ${files.join(', ')}`) } - questions.forEach((question, index) => { - if (!question.id) { - console.warn(`⚠️ Question at index ${index} in ${fileName} has no ID`); - return; - } - - if (allIds.has(question.id)) { - duplicates.push(question.id); - const existingFile = fileIdMap.get(question.id); - fileIdMap.set(question.id, `${existingFile} and ${fileName}`); - } else { - allIds.add(question.id); - fileIdMap.set(question.id, fileName); - } - }); - } catch (error) { - console.error(`❌ Error reading ${fileName}:`, error); - process.exit(1); + process.exit(1) } + + console.log('✅ All IDs are unique across data files') + console.log(`📊 Total unique IDs: ${totalIds}`) } if (require.main === module) { - validateQuestionIds(); + validateQuestionIds() } - -export { validateQuestionIds }; \ No newline at end of file