Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,6 @@ npm-debug.log*
yarn-debug.log*
yarn-error.log*

# Ignore generated export markdown files
/public/md-exports/

# Runtime data
pids
*.pid
Expand Down Expand Up @@ -96,6 +93,8 @@ public/page-data
# tsbuildinfo file generated by CI
tsconfig.tsbuildinfo

# Ignore generated files
/public/md-exports/
public/mdx-images/*

# yalc
Expand Down
2 changes: 1 addition & 1 deletion scripts/generate-md-exports.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ async function createWork() {
})
);
continuationToken = response.NextContinuationToken;
for (const {Key, ETag} of response.Contents) {
for (const {Key, ETag} of response.Contents || []) {
existingFilesOnR2.set(Key, ETag.slice(1, -1)); // Remove quotes from ETag
}
} while (continuationToken);
Expand Down
93 changes: 89 additions & 4 deletions src/mdx.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,20 @@
import {cache} from 'react';
import matter from 'gray-matter';
import {s} from 'hastscript';
import yaml from 'js-yaml';
import {bundleMDX} from 'mdx-bundler';
import {access, opendir, readFile} from 'node:fs/promises';
import {BinaryLike, createHash} from 'node:crypto';
import {createReadStream, createWriteStream, mkdirSync} from 'node:fs';
import {access, cp, mkdir, opendir, readFile} from 'node:fs/promises';
import path from 'node:path';
// @ts-expect-error ts(2305) -- For some reason "compose" is not recognized in the types
import {compose, Readable} from 'node:stream';
import {json} from 'node:stream/consumers';
import {pipeline} from 'node:stream/promises';
import {
constants as zlibConstants,
createBrotliCompress,
createBrotliDecompress,
} from 'node:zlib';
import {limitFunction} from 'p-limit';
import rehypeAutolinkHeadings from 'rehype-autolink-headings';
import rehypePresetMinify from 'rehype-preset-minify';
Expand Down Expand Up @@ -48,6 +58,33 @@ const root = process.cwd();
// Functions which looks like AWS Lambda and we get `EMFILE` errors when trying to open
// so many files at once.
const FILE_CONCURRENCY_LIMIT = 200;
const CACHE_COMPRESS_LEVEL = 4;
const CACHE_DIR = path.join(root, '.next', 'cache', 'mdx-bundler');
mkdirSync(CACHE_DIR, {recursive: true});

const md5 = (data: BinaryLike) => createHash('md5').update(data).digest('hex');

async function readCacheFile<T>(file: string): Promise<T> {
const reader = createReadStream(file);
const decompressor = createBrotliDecompress();

return (await json(compose(reader, decompressor))) as T;
}

async function writeCacheFile(file: string, data: string) {
await pipeline(
Readable.from(data),
createBrotliCompress({
chunkSize: 32 * 1024,
params: {
[zlibConstants.BROTLI_PARAM_MODE]: zlibConstants.BROTLI_MODE_TEXT,
[zlibConstants.BROTLI_PARAM_QUALITY]: CACHE_COMPRESS_LEVEL,
[zlibConstants.BROTLI_PARAM_SIZE_HINT]: data.length,
},
}),
createWriteStream(file)
);
}

function formatSlug(slug: string) {
return slug.replace(/\.(mdx|md)/, '');
Expand Down Expand Up @@ -484,6 +521,36 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
);
}

let cacheKey: string | null = null;
let cacheFile: string | null = null;
let assetsCacheDir: string | null = null;
const outdir = path.join(root, 'public', 'mdx-images');
await mkdir(outdir, {recursive: true});

if (process.env.CI) {
cacheKey = md5(source);
cacheFile = path.join(CACHE_DIR, `${cacheKey}.br`);
assetsCacheDir = path.join(CACHE_DIR, cacheKey);

try {
const [cached, _] = await Promise.all([
readCacheFile<SlugFile>(cacheFile),
cp(assetsCacheDir, outdir, {recursive: true}),
]);
return cached;
} catch (err) {
if (
err.code !== 'ENOENT' &&
err.code !== 'ABORT_ERR' &&
err.code !== 'Z_BUF_ERROR'
) {
// If cache is corrupted, ignore and proceed
// eslint-disable-next-line no-console
console.warn(`Failed to read MDX cache: ${cacheFile}`, err);
}
}
}

process.env.ESBUILD_BINARY_PATH = path.join(
root,
'node_modules',
Expand Down Expand Up @@ -579,7 +646,7 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
};
// Set the `outdir` to a public location for this bundle.
// this where this images will be copied
options.outdir = path.join(root, 'public', 'mdx-images');
options.outdir = assetsCacheDir || outdir;

// Set write to true so that esbuild will output the files.
options.write = true;
Expand Down Expand Up @@ -609,12 +676,30 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
},
};

if (assetsCacheDir && cacheFile) {
await cp(assetsCacheDir, outdir, {recursive: true});
writeCacheFile(cacheFile, JSON.stringify(resultObj)).catch(e => {
// eslint-disable-next-line no-console
console.warn(`Failed to write MDX cache: ${cacheFile}`, e);
});
}

return resultObj;
}

const fileBySlugCache = new Map<string, Promise<SlugFile>>();

/**
* Cache the result of {@link getFileBySlug}.
*
* This is useful for performance when rendering the same file multiple times.
*/
export const getFileBySlugWithCache = cache(getFileBySlug);
export function getFileBySlugWithCache(slug: string): Promise<SlugFile> {
let cached = fileBySlugCache.get(slug);
if (!cached) {
cached = getFileBySlug(slug);
fileBySlugCache.set(slug, cached);
}

return cached;
}
Loading