Skip to content
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/smooth-loops-wear.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@pmndrs/docs': minor
---

llms.txt
11 changes: 11 additions & 0 deletions docs/getting-started/introduction.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -283,3 +283,14 @@ $ (
```

Then go to: http://localhost:3000

## Agents

As alternate content, we provide you [llms.txt](/llms.txt) and [llms-full.txt](/llms-full.txt).

These URLs are linked in the HTML header as:

```html
<link rel="alternate" type="text/plain" href="/llms.txt" />
<link rel="alternate" type="text/plain" href="/llms-full.txt" />
```
3 changes: 3 additions & 0 deletions package.json
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@copilot still necessary (you added this in a previous commit i think)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@copilot if not, dont forget to update the lock file

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Removed in d408942. The remark, remark-mdx, and remark-stringify dependencies were added in commit fad20a3 for JSX stripping but are no longer used after we simplified to keep all JSX tags as-is (commit def4a47). The current implementation only uses basic string operations, so these dependencies are unnecessary.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Already done in d408942. The commit removed the dependencies from package.json and updated pnpm-lock.yaml (21 lines removed). Both files were committed together.

Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,10 @@
"react-mcu": "^1.1.0",
"rehype-github-alerts": "^3.0.0",
"rehype-prism-plus": "^2.0.0",
"remark": "^15.0.1",
"remark-gfm": "^4.0.1",
"remark-mdx": "^3.1.1",
"remark-stringify": "^11.0.0",
"sanitize-html": "^2.17.0",
"tailwind-merge": "^3.4.0",
"unist-util-visit": "^5.0.0"
Expand Down
21 changes: 21 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions src/app/layout.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,8 @@ export default function RootLayout({
className={`${inter.variable} ${inconsolata.variable}`}
>
<head>
<link rel="alternate" type="text/plain" href="/llms.txt" />
<link rel="alternate" type="text/plain" href="/llms-full.txt" />
<SandpackCSS />
</head>
<body className="wrap-break-word bg-surface text-on-surface">
Expand Down
51 changes: 51 additions & 0 deletions src/app/llms-full.txt/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import { parseDocsMetadata } from '@/utils/docs'

export const dynamic = 'force-static'

/**
* Basic cleanup of markdown content
*/
function cleanMarkdown(content: string): string {
// Just do basic cleanup - keep JSX tags as-is
return content
.replace(/\n{3,}/g, '\n\n') // Clean up multiple empty lines
.trim()
}

export async function GET() {
const { MDX, NEXT_PUBLIC_LIBNAME, NEXT_PUBLIC_URL } = process.env
if (!MDX) throw new Error('MDX env var not set')
if (!NEXT_PUBLIC_LIBNAME) throw new Error('NEXT_PUBLIC_LIBNAME env var not set')

const docs = await parseDocsMetadata(MDX)

const baseUrl = NEXT_PUBLIC_URL || ''

// Generate llms-full.txt content
const header = `${NEXT_PUBLIC_LIBNAME}

Full documentation content.

`

const fullContent =
header +
docs
.map((doc) => {
const url = baseUrl ? `${baseUrl}${doc.url}` : doc.url
return `---

${doc.title}
URL: ${url}
${doc.description ? `Description: ${doc.description}\n` : ''}
${cleanMarkdown(doc.content)}
`
})
.join('\n')

return new Response(fullContent, {
headers: {
'Content-Type': 'text/plain; charset=utf-8',
},
})
}
36 changes: 36 additions & 0 deletions src/app/llms.txt/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import { parseDocsMetadata } from '@/utils/docs'

export const dynamic = 'force-static'

export async function GET() {
const { MDX, NEXT_PUBLIC_LIBNAME, NEXT_PUBLIC_URL } = process.env
if (!MDX) throw new Error('MDX env var not set')
if (!NEXT_PUBLIC_LIBNAME) throw new Error('NEXT_PUBLIC_LIBNAME env var not set')

const docs = await parseDocsMetadata(MDX)

const baseUrl = NEXT_PUBLIC_URL || ''

// Generate llms.txt content following standard format
const content = `# ${NEXT_PUBLIC_LIBNAME}

## Documentation

${docs
.map((doc) => {
const url = baseUrl ? `${baseUrl}${doc.url}` : doc.url
return `- [${doc.title}](${url})${doc.description ? `: ${doc.description}` : ''}`
})
.join('\n')}

---

For full documentation content, see ${baseUrl}/llms-full.txt
`

return new Response(content, {
headers: {
'Content-Type': 'text/plain; charset=utf-8',
},
})
}
75 changes: 48 additions & 27 deletions src/utils/docs.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,46 @@ export async function crawl(dir: string, filter?: (dir: string) => boolean, file
return files
}

/**
* Parses docs metadata from a given root directory.
*/
export async function parseDocsMetadata(root: string) {
const files = await crawl(
root,
(dir) => !dir.includes('node_modules') && MARKDOWN_REGEX.test(dir),
)

const docs = await Promise.all(
files.map(async (file) => {
const path = file.replace(`${root}/`, '')
const slug = [...path.replace(MARKDOWN_REGEX, '').toLowerCase().split('/')]
const url = `/${slug.join('/')}`

const str = await fs.promises.readFile(file, { encoding: 'utf-8' })
const compiled = matter(str)
const frontmatter = compiled.data
const content = compiled.content

const title: string = frontmatter.title?.trim() ?? slug[slug.length - 1].replace(/\-/g, ' ')
const description: string = frontmatter.description ?? ''
const nav: number = frontmatter.nav ?? Infinity

return {
file,
url,
slug,
title,
description,
nav,
content,
frontmatter,
}
}),
)

return docs.sort((a, b) => a.nav - b.nav)
}

/**
* Fetches all docs, filters to a lib if specified.
*
Expand All @@ -91,44 +131,25 @@ async function _getDocs(
slugOfInterest: string[] | null,
slugOnly = false,
): Promise<Doc[]> {
const files = await crawl(
root,
(dir) => !dir.includes('node_modules') && MARKDOWN_REGEX.test(dir),
)
// console.log('files', files)

//
// 1st pass for `entries`
// 1st pass for `entries` - using shared parseDocsMetadata
//

const entries = await Promise.all(
files.map(async (file) => {
// Get slug from local path
const path = file.replace(`${root}/`, '')
const slug = [...path.replace(MARKDOWN_REGEX, '').toLowerCase().split('/')]

const url = `/${slug.join('/')}`

//
// frontmatter
//
const parsedDocs = await parseDocsMetadata(root)

const str = await fs.promises.readFile(file, { encoding: 'utf-8' })
const compiled = matter(str)
const frontmatter = compiled.data

const _lastSegment = slug[slug.length - 1]
const title: string = frontmatter.title.trim() ?? _lastSegment.replace(/\-/g, ' ')
const entries = await Promise.all(
parsedDocs.map(async (parsed) => {
const { file, slug, url, title, frontmatter, content } = parsed

const boxes: string[] = []

// Sanitize markdown
let content = compiled.content
const sanitizedContent = content
// Remove inline link syntax
.replace(INLINE_LINK_REGEX, '$1')

await compileMDX({
source: content,
source: sanitizedContent,
options: {
mdxOptions: {
rehypePlugins: [
Expand All @@ -145,7 +166,7 @@ async function _getDocs(
boxes,
//
file,
content,
content: sanitizedContent,
frontmatter,
}
}),
Expand Down