@@ -8,7 +8,7 @@ function normalize(s) {
8
8
return s . replace ( / \\ / g, path . win32 . sep . repeat ( 2 ) ) ;
9
9
}
10
10
11
- async function gatherPartials ( ast , schemaDir ) {
11
+ async function gatherPartials ( ast , schemaDir , tokenizer ) {
12
12
let partials = { } ;
13
13
14
14
for ( const node of ast . walk ( ) ) {
@@ -25,11 +25,12 @@ async function gatherPartials(ast, schemaDir) {
25
25
const content = await fs . promises . readFile ( filepath , { encoding : 'utf8' } ) ;
26
26
27
27
if ( content ) {
28
- const ast = Markdoc . parse ( content ) ;
28
+ const tokens = tokenizer . tokenize ( content ) ;
29
+ const ast = Markdoc . parse ( tokens ) ;
29
30
partials = {
30
31
...partials ,
31
32
[ file ] : content ,
32
- ...( await gatherPartials . call ( this , ast , schemaDir ) ) ,
33
+ ...( await gatherPartials . call ( this , ast , schemaDir , tokenizer ) ) ,
33
34
} ;
34
35
}
35
36
}
@@ -51,10 +52,14 @@ async function load(source) {
51
52
dir, // Root directory from Next.js (contains next.config.js)
52
53
mode = 'static' ,
53
54
schemaPath = DEFAULT_SCHEMA_PATH ,
55
+ tokenizerOptions = undefined ,
54
56
} = this . getOptions ( ) || { } ;
55
57
58
+ const tokenizer = new Markdoc . Tokenizer ( tokenizerOptions ) ;
59
+
56
60
const schemaDir = path . resolve ( dir , schemaPath || DEFAULT_SCHEMA_PATH ) ;
57
- const ast = Markdoc . parse ( source ) ;
61
+ const tokens = tokenizer . tokenize ( source ) ;
62
+ const ast = Markdoc . parse ( tokens ) ;
58
63
59
64
// Grabs the path of the file relative to the `/pages` directory
60
65
// to pass into the app props later.
@@ -66,7 +71,8 @@ async function load(source) {
66
71
const partials = await gatherPartials . call (
67
72
this ,
68
73
ast ,
69
- path . resolve ( schemaDir , 'partials' )
74
+ path . resolve ( schemaDir , 'partials' ) ,
75
+ tokenizer
70
76
) ;
71
77
72
78
// IDEA: consider making this an option per-page
@@ -124,12 +130,17 @@ import {getSchema, defaultObject} from '${normalize(
124
130
*/
125
131
${ schemaCode }
126
132
133
+ const tokenizer = new Markdoc.Tokenizer(${
134
+ tokenizerOptions ? JSON . stringify ( tokenizerOptions ) : ''
135
+ } );
136
+
127
137
/**
128
138
* Source will never change at runtime, so parse happens at the file root
129
139
*/
130
140
const source = ${ JSON . stringify ( source ) } ;
131
141
const filepath = ${ JSON . stringify ( filepath ) } ;
132
- const ast = Markdoc.parse(source);
142
+ const tokens = tokenizer.tokenize(source);
143
+ const ast = Markdoc.parse(tokens);
133
144
134
145
/**
135
146
* Like the AST, frontmatter won't change at runtime, so it is loaded at file root.
@@ -146,7 +157,8 @@ export async function ${dataFetchingFunction}(context) {
146
157
147
158
// Ensure Node.transformChildren is available
148
159
Object.keys(partials).forEach((key) => {
149
- partials[key] = Markdoc.parse(partials[key]);
160
+ const tokens = tokenizer.tokenize(partials[key]);
161
+ partials[key] = Markdoc.parse(tokens);
150
162
});
151
163
152
164
const cfg = {
0 commit comments