Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,9 @@ jobs:
working-directory: ./website
run: npx playwright install --with-deps chromium

- name: Generate llms.txt and all-anchors.adoc
run: node scripts/generate-llms-txt.js

- name: Copy documentation files to public directory
run: |
mkdir -p website/public/docs
Expand Down
156 changes: 156 additions & 0 deletions docs/all-anchors.adoc
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
= Semantic Anchors — Complete Reference
:toc:
:toc-placement: preamble
:toclevels: 2

include::about.adoc[leveloffset=+1]

<<<

== Communication & Presentation

include::anchors/bluf.adoc[leveloffset=+2]

include::anchors/chatham-house-rule.adoc[leveloffset=+2]

include::anchors/mece.adoc[leveloffset=+2]

include::anchors/pyramid-principle.adoc[leveloffset=+2]

<<<

== Design Principles & Patterns

include::anchors/dry-principle.adoc[leveloffset=+2]

include::anchors/fowler-patterns.adoc[leveloffset=+2]

include::anchors/solid-principles.adoc[leveloffset=+2]

include::anchors/spot-principle.adoc[leveloffset=+2]

include::anchors/ssot-principle.adoc[leveloffset=+2]

<<<

== Development Workflow

include::anchors/bem-methodology.adoc[leveloffset=+2]

include::anchors/conventional-commits.adoc[leveloffset=+2]

include::anchors/mental-model-according-to-naur.adoc[leveloffset=+2]

include::anchors/semantic-versioning.adoc[leveloffset=+2]

include::anchors/sota.adoc[leveloffset=+2]

include::anchors/timtowtdi.adoc[leveloffset=+2]

include::anchors/todotxt-flavoured-markdown.adoc[leveloffset=+2]

<<<

== Dialogue Interaction

include::anchors/socratic-method.adoc[leveloffset=+2]

<<<

== Documentation

include::anchors/diataxis-framework.adoc[leveloffset=+2]

include::anchors/docs-as-code.adoc[leveloffset=+2]

<<<

== Meta

include::anchors/what-qualifies-as-a-semantic-anchor.adoc[leveloffset=+2]

<<<

== Problem Solving

include::anchors/chain-of-thought.adoc[leveloffset=+2]

include::anchors/devils-advocate.adoc[leveloffset=+2]

include::anchors/feynman-technique.adoc[leveloffset=+2]

include::anchors/five-whys.adoc[leveloffset=+2]

include::anchors/morphological-box.adoc[leveloffset=+2]

include::anchors/rubber-duck-debugging.adoc[leveloffset=+2]

<<<

== Requirements Engineering

include::anchors/ears-requirements.adoc[leveloffset=+2]

include::anchors/problem-space-nvc.adoc[leveloffset=+2]

include::anchors/user-story-mapping.adoc[leveloffset=+2]

<<<

== Software Architecture

include::anchors/adr-according-to-nygard.adoc[leveloffset=+2]

include::anchors/arc42.adoc[leveloffset=+2]

include::anchors/c4-diagrams.adoc[leveloffset=+2]

include::anchors/clean-architecture.adoc[leveloffset=+2]

include::anchors/domain-driven-design.adoc[leveloffset=+2]

include::anchors/hexagonal-architecture.adoc[leveloffset=+2]

include::anchors/madr.adoc[leveloffset=+2]

<<<

== Statistical Methods & Process Monitoring

include::anchors/control-chart-shewhart.adoc[leveloffset=+2]

include::anchors/nelson-rules.adoc[leveloffset=+2]

include::anchors/spc.adoc[leveloffset=+2]

<<<

== Strategic Planning

include::anchors/cynefin-framework.adoc[leveloffset=+2]

include::anchors/impact-mapping.adoc[leveloffset=+2]

include::anchors/jobs-to-be-done.adoc[leveloffset=+2]

include::anchors/pugh-matrix.adoc[leveloffset=+2]

include::anchors/wardley-mapping.adoc[leveloffset=+2]

<<<

== Testing & Quality Practices

include::anchors/iec-61508-sil-levels.adoc[leveloffset=+2]

include::anchors/mutation-testing.adoc[leveloffset=+2]

include::anchors/property-based-testing.adoc[leveloffset=+2]

include::anchors/tdd-chicago-school.adoc[leveloffset=+2]

include::anchors/tdd-london-school.adoc[leveloffset=+2]

include::anchors/testing-pyramid.adoc[leveloffset=+2]

<<<
16 changes: 15 additions & 1 deletion scripts/extract-metadata.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,20 @@
fs.mkdirSync(OUTPUT_DIR, { recursive: true });
}

/**
* Decode HTML entities to plain Unicode characters
*/
function decodeHtmlEntities(str) {
if (!str) return str;
return str
.replace(/&#(\d+);/g, (_, code) => String.fromCharCode(parseInt(code, 10)))
.replace(/&amp;/g, '&')
.replace(/&lt;/g, '<')
.replace(/&gt;/g, '>')
.replace(/&quot;/g, '"')
.replace(/&apos;/g, "'");
}

/**
* Parse a single anchor file
*/
Expand All @@ -31,7 +45,7 @@
// Extract attributes
const attributes = doc.getAttributes();
const id = path.basename(filePath, '.adoc');
const title = doc.getDocumentTitle();
const title = decodeHtmlEntities(doc.getDocumentTitle());

// Parse comma-separated attributes
const parseList = (attr) => {
Expand Down
175 changes: 175 additions & 0 deletions scripts/generate-llms-txt.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
#!/usr/bin/env node
/**
* Generate docs/all-anchors.adoc and website/public/llms.txt
*
* all-anchors.adoc: AsciiDoc include-based full reference document
* llms.txt: Clean Markdown for LLM consumption
*
* Usage: node scripts/generate-llms-txt.js
*/

const fs = require('fs')
const path = require('path')

const ROOT = path.join(__dirname, '..')

const categories = JSON.parse(
fs.readFileSync(path.join(ROOT, 'website/public/data/categories.json'), 'utf-8')
)

// ─── AsciiDoc → Markdown converter ──────────────────────────────────────────

function adocToMarkdown(adoc) {
let md = adoc

// Remove document attributes (:key: value)
md = md.replace(/^:[a-z][a-z0-9-]*:.*$/gm, '')

// Headings: = → #, == → ##, etc.
md = md.replace(/^(=+) (.+)$/gm, (_, eq, title) => '#'.repeat(eq.length) + ' ' + title)

// [source,lang] + ---- → ```lang / ```
md = md.replace(/\[source(?:,([^\]]*))?\]\s*\n----/g, (_, lang) => '```' + (lang ? lang.trim() : ''))
md = md.replace(/^----\s*$/gm, '```')

// [quote] block: [quote]\n____\ntext\n____ → > text
md = md.replace(/\[quote[^\]]*\]\s*\n_{4}\s*\n([\s\S]*?)\n_{4}/g, (_, body) =>
body.trim().split('\n').map((l) => '> ' + l).join('\n')
)

// Sidebar blocks **** → remove delimiters
md = md.replace(/^\*{4}\s*$/gm, '')

// Collapsible: [%collapsible] + ==== delimiters → remove markers, keep content
md = md.replace(/^\[%collapsible\]\s*$/gm, '')
md = md.replace(/^====\s*$/gm, '')

// Tables |=== → remove delimiters
md = md.replace(/^\|===\s*$/gm, '')

// Table rows: |cell content → keep, clean up leading pipe
md = md.replace(/^\|(.+)$/gm, (_, row) => {
const cells = row.split('|').map((c) => c.trim()).filter(Boolean)
return '| ' + cells.join(' | ') + ' |'
})

// Remove block attribute lines
md = md.replace(/^\[(?:horizontal|sidebar|cols[^\]]*|options[^\]]*|%\w+[^\]]*)\]\s*$/gm, '')

// Definition lists: term:: description → **term**: description
md = md.replace(/^([^:\n|#`>]+)::\s*(.*)$/gm, (_, term, desc) =>
desc.trim() ? `**${term.trim()}**: ${desc.trim()}` : `**${term.trim()}**`
)

// Links: link:url[text] → [text](url)
md = md.replace(/link:([^\[]+)\[([^\]]*)\]/g, '[$2]($1)')

// Cross-references: <<id,text>> → text, <<id>> → `id`
md = md.replace(/<<([^,>]+),([^>]+)>>/g, '$2')
md = md.replace(/<<([^>]+)>>/g, '`$1`')

// Bold: **text** stays, *text* → **text**
md = md.replace(/(?<![*\w])\*([^*\n]+)\*(?![*\w])/g, '**$1**')

// Ordered list items: ". item" → "1. item"
md = md.replace(/^\. /gm, '1. ')

// Trailing whitespace and normalize blank lines
md = md.replace(/[ \t]+$/gm, '')
md = md.replace(/\n{3,}/g, '\n\n')

return md.trim()
}

// ─── Generate docs/all-anchors.adoc ─────────────────────────────────────────

function generateAllAnchorsAdoc() {
const lines = [
'= Semantic Anchors — Complete Reference',
':toc:',
':toc-placement: preamble',
':toclevels: 2',
'',
'include::about.adoc[leveloffset=+1]',
'',
'<<<',
'',
]

for (const category of categories) {
lines.push(`== ${category.name}`)
lines.push('')
for (const anchorId of category.anchors) {
const filepath = path.join(ROOT, 'docs/anchors', `${anchorId}.adoc`)
if (fs.existsSync(filepath)) {
lines.push(`include::anchors/${anchorId}.adoc[leveloffset=+2]`)
lines.push('')
}
}
lines.push('<<<')
lines.push('')
}

const output = lines.join('\n')
fs.writeFileSync(path.join(ROOT, 'docs/all-anchors.adoc'), output, 'utf-8')
console.warn(`Generated: docs/all-anchors.adoc (${categories.length} categories)`)
}

// ─── Generate website/public/llms.txt ───────────────────────────────────────

function generateLlmsTxt() {
const totalAnchors = categories.reduce((n, c) => n + c.anchors.length, 0)
const lines = [
'# Semantic Anchors — Complete Reference',
'',
`> ${totalAnchors} well-defined terms, methodologies, and frameworks`,
'> that serve as precision reference points when communicating with LLMs.',
'> Source: https://github.com/LLM-Coding/Semantic-Anchors',
'> Website: https://llm-coding.github.io/Semantic-Anchors/',
'',
'---',
'',
]

// Introductory content from about.adoc
const aboutAdoc = fs.readFileSync(path.join(ROOT, 'docs/about.adoc'), 'utf-8')
lines.push(adocToMarkdown(aboutAdoc))
lines.push('')
lines.push('---')
lines.push('')

// Anchors by category
for (const category of categories) {
lines.push(`## ${category.name}`)
lines.push('')

for (const anchorId of category.anchors) {
const filepath = path.join(ROOT, 'docs/anchors', `${anchorId}.adoc`)
if (!fs.existsSync(filepath)) continue

const raw = fs.readFileSync(filepath, 'utf-8')
const titleMatch = raw.match(/^= (.+)$/m)
const title = titleMatch ? titleMatch[1] : anchorId

lines.push(`### ${title}`)
lines.push('')

const body = raw.replace(/^= .+\n/, '')
lines.push(adocToMarkdown(body))
lines.push('')
}

lines.push('---')
lines.push('')
}

const output = lines.join('\n')
fs.writeFileSync(path.join(ROOT, 'website/public/llms.txt'), output, 'utf-8')
const kb = Math.round(Buffer.byteLength(output, 'utf-8') / 1024)
console.warn(`Generated: website/public/llms.txt (${totalAnchors} anchors, ~${kb} KB)`)
}

// ─── Main ────────────────────────────────────────────────────────────────────

generateAllAnchorsAdoc()
generateLlmsTxt()
3 changes: 2 additions & 1 deletion scripts/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
"description": "Build scripts for Semantic Anchors project",
"scripts": {
"extract-metadata": "node extract-metadata.js",
"split-readme": "node split-readme.js"
"split-readme": "node split-readme.js",
"generate-llms-txt": "node generate-llms-txt.js"
},
"dependencies": {
"@asciidoctor/core": "^3.0.4",
Expand Down
Loading
Loading