Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .husky/pre-commit

This file was deleted.

16 changes: 16 additions & 0 deletions lib/__tests__/data-id-validator.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import { scanDataIds } from '../data-id-validator'

describe('data ID validation', () => {
test('ensures IDs across data files remain unique', () => {
const { duplicates } = scanDataIds()

if (duplicates.length > 0) {
const summary = duplicates
.map(({ id, files }) => `- ${id}: ${files.join(', ')}`)
.join('\n')

throw new Error(`Duplicate IDs detected:\n${summary}`)
}
})
})

112 changes: 112 additions & 0 deletions lib/data-id-validator.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import fs from 'fs'
import path from 'path'

export type DuplicateIdRecord = {
id: string
files: string[]
}

export type IdScanResult = {
totalIds: number
duplicates: DuplicateIdRecord[]
}

export function scanDataIds(dataDir = path.join(process.cwd(), 'data')): IdScanResult {
const jsonFiles = collectJsonFiles(dataDir)
const idOrigins = new Map<string, string>()
const duplicateSources = new Map<string, Set<string>>()

for (const filePath of jsonFiles) {
const relativePath = path.relative(dataDir, filePath)
const content = fs.readFileSync(filePath, 'utf8')
let parsedContent: unknown

try {
parsedContent = JSON.parse(content)
} catch (error) {
throw new Error(`Failed to parse ${relativePath}: ${(error as Error).message}`)
}

collectIds(parsedContent, relativePath, idOrigins, duplicateSources)
}

const duplicates: DuplicateIdRecord[] = Array.from(duplicateSources.entries()).map(([id, files]) => ({
id,
files: Array.from(files),
}))

duplicates.sort((a, b) => a.id.localeCompare(b.id))

return {
totalIds: idOrigins.size,
duplicates,
}
}

function collectJsonFiles(targetDir: string): string[] {
const directoryEntries = fs.readdirSync(targetDir, { withFileTypes: true })
const collected: string[] = []

for (const entry of directoryEntries) {
const entryPath = path.join(targetDir, entry.name)

if (entry.isDirectory()) {
collected.push(...collectJsonFiles(entryPath))
} else if (entry.isFile() && entry.name.endsWith('.json')) {
collected.push(entryPath)
}
}

return collected
}

function collectIds(
node: unknown,
filePath: string,
idOrigins: Map<string, string>,
duplicateSources: Map<string, Set<string>>
) {
if (Array.isArray(node)) {
for (const value of node) {
collectIds(value, filePath, idOrigins, duplicateSources)
}
return
}

if (node === null || typeof node !== 'object') {
return
}

const record = node as Record<string, unknown>

if (typeof record.id === 'string' && record.id.trim().length > 0) {
registerId(record.id, filePath, idOrigins, duplicateSources)
}

for (const value of Object.values(record)) {
collectIds(value, filePath, idOrigins, duplicateSources)
}
}

function registerId(
id: string,
filePath: string,
idOrigins: Map<string, string>,
duplicateSources: Map<string, Set<string>>
) {
const trimmedId = id.trim()

if (trimmedId.length === 0) {
return
}

if (!idOrigins.has(trimmedId)) {
idOrigins.set(trimmedId, filePath)
return
}

const origin = idOrigins.get(trimmedId) as string
const sources = duplicateSources.get(trimmedId) ?? new Set<string>([origin])
sources.add(filePath)
duplicateSources.set(trimmedId, sources)
}
17 changes: 0 additions & 17 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 2 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@
"test": "vitest",
"test:ui": "vitest --ui",
"test:run": "vitest run",
"validate:ids": "tsx scripts/validate-question-ids.ts",
"prepare": "husky"
"validate:ids": "tsx scripts/validate-question-ids.ts"
},
"dependencies": {
"@radix-ui/react-slot": "^1.2.3",
Expand Down Expand Up @@ -39,7 +38,6 @@
"eslint-config-next": "15.5.3",
"eslint-config-prettier": "^10.1.8",
"eslint-plugin-prettier": "^5.5.4",
"husky": "^9.1.7",
"jsdom": "^27.0.0",
"prettier": "^3.6.2",
"tailwindcss": "^4",
Expand All @@ -48,4 +46,4 @@
"typescript": "^5",
"vitest": "^3.2.4"
}
}
}
105 changes: 13 additions & 92 deletions scripts/validate-question-ids.ts
Original file line number Diff line number Diff line change
@@ -1,104 +1,25 @@
import fs from 'fs';
import path from 'path';
import path from 'path'

interface Question {
id: string;
question: string;
answers: Array<{
value: string;
label: string;
example?: string;
docs?: string;
}>;
}

function validateQuestionIds() {
const dataDir = path.join(process.cwd(), 'data');
const questionsDir = path.join(dataDir, 'questions');

const allIds = new Set<string>();
const duplicates: string[] = [];
const fileIdMap = new Map<string, string>();
import { scanDataIds } from '../lib/data-id-validator'

// Check main data files
const mainFiles = [
'general.json',
'architecture.json',
'performance.json',
'security.json',
'commits.json',
'files.json'
];

for (const file of mainFiles) {
const filePath = path.join(dataDir, file);
if (fs.existsSync(filePath)) {
checkFileForDuplicates(filePath, file, allIds, duplicates, fileIdMap);
}
}

// Check framework-specific question files
if (fs.existsSync(questionsDir)) {
const questionFiles = fs.readdirSync(questionsDir)
.filter(file => file.endsWith('.json'));

for (const file of questionFiles) {
const filePath = path.join(questionsDir, file);
checkFileForDuplicates(filePath, `questions/${file}`, allIds, duplicates, fileIdMap);
}
}
export function validateQuestionIds() {
const dataDir = path.join(process.cwd(), 'data')
const { duplicates, totalIds } = scanDataIds(dataDir)

if (duplicates.length > 0) {
console.error('❌ Duplicate question IDs found:');
duplicates.forEach(id => {
console.error(` - "${id}" in ${fileIdMap.get(id)}`);
});
process.exit(1);
}

console.log('✅ All question IDs are unique across all files');
console.log(`📊 Total unique question IDs: ${allIds.size}`);
}
console.error('❌ Duplicate IDs found across data files:')

function checkFileForDuplicates(
filePath: string,
fileName: string,
allIds: Set<string>,
duplicates: string[],
fileIdMap: Map<string, string>
) {
try {
const content = fs.readFileSync(filePath, 'utf-8');
const questions: Question[] = JSON.parse(content);

if (!Array.isArray(questions)) {
console.warn(`⚠️ Skipping ${fileName}: not an array of questions`);
return;
for (const { id, files } of duplicates) {
console.error(` - "${id}" in ${files.join(', ')}`)
}

questions.forEach((question, index) => {
if (!question.id) {
console.warn(`⚠️ Question at index ${index} in ${fileName} has no ID`);
return;
}

if (allIds.has(question.id)) {
duplicates.push(question.id);
const existingFile = fileIdMap.get(question.id);
fileIdMap.set(question.id, `${existingFile} and ${fileName}`);
} else {
allIds.add(question.id);
fileIdMap.set(question.id, fileName);
}
});
} catch (error) {
console.error(`❌ Error reading ${fileName}:`, error);
process.exit(1);
process.exit(1)
}

console.log('✅ All IDs are unique across data files')
console.log(`📊 Total unique IDs: ${totalIds}`)
}

if (require.main === module) {
validateQuestionIds();
validateQuestionIds()
}

export { validateQuestionIds };