Skip to content
15 changes: 15 additions & 0 deletions playground/content.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,21 @@ const pages = defineCollection({
})

const collections = {
people: defineCollection({
type: 'data',
source: 'org/people.csv',
schema: z.object({
name: z.string(),
email: z.string().email(),
}),
}),
org: defineCollection({
type: 'data',
source: 'org/**.csv',
schema: z.object({
body: z.array(z.any()),
}),
}),
hackernews,
content,
data,
Expand Down
11 changes: 11 additions & 0 deletions playground/content/org/people.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
name,email
John Doe,[email protected]
Jane Smith,[email protected]
Bob Johnson,[email protected]
Alice Brown,[email protected]
Charlie Wilson,[email protected]
Diana Lee,[email protected]
Eve Davis,[email protected]
Frank Miller,[email protected]
Grace Taylor,[email protected]
Henry Anderson,[email protected]
10 changes: 10 additions & 0 deletions playground/pages/org/data.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<script setup lang="ts">
const { data } = await useAsyncData('tmp-content', () => queryCollection('org').all())
</script>

<template>
<div>
<h1>People</h1>
<pre>{{ data }}</pre>
</div>
</template>
10 changes: 10 additions & 0 deletions playground/pages/org/people.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<script setup lang="ts">
const { data: tmpContent } = await useAsyncData('tmp-content', () => queryCollection('people').all())
</script>

<template>
<div>
<h1>People</h1>
<pre>{{ tmpContent }}</pre>
</div>
</template>
4 changes: 3 additions & 1 deletion src/utils/content/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ async function _getHighlightPlugin(key: string, options: HighlighterOptions) {
export async function createParser(collection: ResolvedCollection, nuxt?: Nuxt) {
const nuxtOptions = nuxt?.options as unknown as { content: ModuleOptions, mdc: MDCModuleOptions }
const mdcOptions = nuxtOptions?.mdc || {}
const { pathMeta = {}, markdown = {}, transformers = [] } = nuxtOptions?.content?.build || {}
const { pathMeta = {}, markdown = {}, transformers = [], csv = {}, yaml = {} } = nuxtOptions?.content?.build || {}

const rehypeHighlightPlugin = markdown.highlight !== false
? await getHighlightPluginInstance(defu(markdown.highlight as HighlighterOptions, mdcOptions.highlight, { compress: true }))
Expand Down Expand Up @@ -149,6 +149,8 @@ export async function createParser(collection: ResolvedCollection, nuxt?: Nuxt)
},
highlight: undefined,
},
csv: csv,
yaml: yaml,
}

return async function parse(file: ContentFile) {
Expand Down
7 changes: 7 additions & 0 deletions src/utils/content/transformers/csv/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,13 @@ export default defineTransformer({
})
const { result } = await stream.process(file.body)

if (Array.isArray(result) && result.length === 1) {
return {
id: file.id,
...result[0],
}
}

return {
id: file.id,
body: result,
Expand Down
2 changes: 1 addition & 1 deletion src/utils/schema/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ export function detectSchemaVendor(schema: ContentStandardSchemaV1) {
}

export function replaceComponentSchemas<T = Draft07Definition | Draft07DefinitionProperty>(property: T): T {
if ((property as Draft07DefinitionProperty).type === 'array') {
if ((property as Draft07DefinitionProperty).type === 'array' && (property as Draft07DefinitionProperty).items) {
(property as Draft07DefinitionProperty).items = replaceComponentSchemas((property as Draft07DefinitionProperty).items as Draft07DefinitionProperty) as Draft07DefinitionProperty
}

Expand Down
59 changes: 59 additions & 0 deletions src/utils/source.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { readFile } from 'node:fs/promises'
import { createReadStream } from 'node:fs'
import { join, normalize } from 'pathe'
import { withLeadingSlash, withoutTrailingSlash } from 'ufo'
import { glob } from 'tinyglobby'
Expand All @@ -19,6 +20,12 @@ export function defineLocalSource(source: CollectionSource | ResolvedCollectionS
logger.warn('Collection source should not start with `./` or `../`.')
source.include = source.include.replace(/^(\.\/|\.\.\/|\/)*/, '')
}

// If source is a CSV file, define a CSV source
if (source.include.endsWith('.csv') && !source.include.includes('*')) {
return defineCSVSource(source)
}

const { fixed } = parseSourceBase(source)
const resolvedSource: ResolvedCollectionSource = {
_resolved: true,
Expand Down Expand Up @@ -105,6 +112,58 @@ export function defineBitbucketSource(
return resolvedSource
}

export function defineCSVSource(source: CollectionSource): ResolvedCollectionSource {
const { fixed } = parseSourceBase(source)

const resolvedSource: ResolvedCollectionSource = {
_resolved: true,
prefix: withoutTrailingSlash(withLeadingSlash(fixed)),
prepare: async ({ rootDir }) => {
resolvedSource.cwd = source.cwd
? String(normalize(source.cwd)).replace(/^~~\//, rootDir)
: join(rootDir, 'content')
},
getKeys: async () => {
const _keys = await glob(source.include, { cwd: resolvedSource.cwd, ignore: getExcludedSourcePaths(source), dot: true, expandDirectories: false })
.catch((): [] => [])
const keys = _keys.map(key => key.substring(fixed.length))
if (keys.length !== 1) {
return keys
}

return new Promise((resolve) => {
const csvKeys: string[] = []
let count = 0
createReadStream(join(resolvedSource.cwd, fixed, keys[0]!))
.on('data', function (chunk) {
for (let i = 0; i < chunk.length; i += 1)
if (chunk[i] == 10) {
csvKeys.push(`${keys[0]}#l${count}`)
count += 1
}
})
.on('end', () => resolve(csvKeys))
})
},
getItem: async (key) => {
const [csvKey, csvIndex] = key.split('#')
const fullPath = join(resolvedSource.cwd, fixed, csvKey!)
const content = await readFile(fullPath, 'utf8')

if (key.includes('#')) {
const lines = content.split('\n')
return lines[0] + '\n' + lines[+(csvIndex || 0)]!
}

return content
},
...source,
include: source.include,
cwd: '',
}
return resolvedSource
}

export function parseSourceBase(source: CollectionSource) {
const [fixPart, ...rest] = source.include.includes('*') ? source.include.split('*') : ['', source.include]
return {
Expand Down
Loading