Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 20 additions & 4 deletions next.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,22 +5,38 @@ import {redirects} from './redirects.js';

const outputFileTracingExcludes = process.env.NEXT_PUBLIC_DEVELOPER_DOCS
? {
'/**/*': ['./.git/**/*', './apps/**/*', 'docs/**/*'],
'/**/*': [
'**/*.map',
'./.git/**/*',
'./apps/**/*',
'./.next/cache/mdx-bundler/**/*',
'./.next/cache/md-exports/**/*',
'docs/**/*',
],
}
: {
'/**/*': [
'**/*.map',
'./.git/**/*',
'./.next/cache/mdx-bundler/**/*',
'./.next/cache/md-exports/**/*',
'./apps/**/*',
'develop-docs/**/*',
'node_modules/@esbuild/darwin-arm64',
'node_modules/@esbuild/*',
],
'/platform-redirect': ['**/*.gif', 'public/mdx-images/**/*', '*.pdf'],
'/platform-redirect': ['**/*.gif', 'public/mdx-images/**/*', '**/*.pdf'],
'\\[\\[\\.\\.\\.path\\]\\]': [
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@BYK - in my PR when I was looking at this I had some some indications from Claude that this wasn't actually a path - just want to double check if you if you see this one as correct? I refactored out of it in mine but if yours is good, than im good with it.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

specifically '\\[\\[\\.\\.\\.path\\]\\]'

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it's this one: https://docs.sentry.io/platform-redirect/?

It also broke on one of my earlier PRs 😅

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oh you mean the grep?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah - looks like it isnt breaking anyhing now, if it comes up - we can revert and figure it out.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

\\[\\[\\.\\.\\.path\\]\\] maps to [[...path]], which is the catchall route for every docs content page 👍

'docs/**/*',
'node_modules/prettier/plugins',
'node_modules/rollup/dist',
],
'sitemap.xml': ['docs/**/*', 'public/mdx-images/**/*', '*.gif', '*.pdf', '*.png'],
'sitemap.xml': [
'docs/**/*',
'public/mdx-images/**/*',
'**/*.gif',
'**/*.pdf',
'**/*.png',
],
};

if (
Expand Down
6 changes: 2 additions & 4 deletions scripts/generate-md-exports.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -283,9 +283,7 @@ async function processTaskList({id, tasks, cacheDir, noCache}) {
const fileHash = md5(data);
if (r2Hash !== fileHash) {
r2CacheMisses.push(relativePath);
console.log(
`📤 Worker[${id}]: Uploading ${relativePath} to R2, hash mismatch: ${r2Hash} !== ${fileHash}`
);

await uploadToCFR2(s3Client, relativePath, data);
}
}
Expand All @@ -296,7 +294,7 @@ async function processTaskList({id, tasks, cacheDir, noCache}) {
const success = tasks.length - failedTasks.length;
if (r2CacheMisses.length / tasks.length > 0.1) {
console.warn(
`⚠️ Worker[${id}]: More than 10% of files had a different hash on R2, this might indicate a problem with the cache or the generation process.`
`⚠️ Worker[${id}]: More than 10% of files had a different hash on R2 with the generation process.`
);
} else if (r2CacheMisses.length > 0) {
console.log(
Expand Down
58 changes: 0 additions & 58 deletions src/mdx.ts
Original file line number Diff line number Diff line change
@@ -1,22 +1,10 @@
import {BinaryLike, createHash} from 'crypto';

import {cache} from 'react';
import matter from 'gray-matter';
import {s} from 'hastscript';
import yaml from 'js-yaml';
import {bundleMDX} from 'mdx-bundler';
import {createReadStream, createWriteStream, mkdirSync} from 'node:fs';
import {access, opendir, readFile} from 'node:fs/promises';
import path from 'node:path';
// @ts-expect-error ts(2305) -- For some reason "compose" is not recognized in the types
import {compose, Readable} from 'node:stream';
import {json} from 'node:stream/consumers';
import {pipeline} from 'node:stream/promises';
import {
constants as zlibConstants,
createBrotliCompress,
createBrotliDecompress,
} from 'node:zlib';
import {limitFunction} from 'p-limit';
import rehypeAutolinkHeadings from 'rehype-autolink-headings';
import rehypePresetMinify from 'rehype-preset-minify';
Expand Down Expand Up @@ -60,33 +48,6 @@ const root = process.cwd();
// Functions which looks like AWS Lambda and we get `EMFILE` errors when trying to open
// so many files at once.
const FILE_CONCURRENCY_LIMIT = 200;
const CACHE_COMPRESS_LEVEL = 4;
const CACHE_DIR = path.join(root, '.next', 'cache', 'mdx-bundler');
mkdirSync(CACHE_DIR, {recursive: true});

const md5 = (data: BinaryLike) => createHash('md5').update(data).digest('hex');

async function readCacheFile<T>(file: string): Promise<T> {
const reader = createReadStream(file);
const decompressor = createBrotliDecompress();

return (await json(compose(reader, decompressor))) as T;
}

async function writeCacheFile(file: string, data: string) {
await pipeline(
Readable.from(data),
createBrotliCompress({
chunkSize: 32 * 1024,
params: {
[zlibConstants.BROTLI_PARAM_MODE]: zlibConstants.BROTLI_MODE_TEXT,
[zlibConstants.BROTLI_PARAM_QUALITY]: CACHE_COMPRESS_LEVEL,
[zlibConstants.BROTLI_PARAM_SIZE_HINT]: data.length,
},
}),
createWriteStream(file)
);
}

function formatSlug(slug: string) {
return slug.replace(/\.(mdx|md)/, '');
Expand Down Expand Up @@ -523,20 +484,6 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
);
}

const cacheKey = md5(source);
const cacheFile = path.join(CACHE_DIR, cacheKey);

try {
const cached = await readCacheFile<SlugFile>(cacheFile);
return cached;
} catch (err) {
if (err.code !== 'ENOENT' && err.code !== 'ABORT_ERR') {
// If cache is corrupted, ignore and proceed
// eslint-disable-next-line no-console
console.warn(`Failed to read MDX cache: ${cacheFile}`, err);
}
}

process.env.ESBUILD_BINARY_PATH = path.join(
root,
'node_modules',
Expand Down Expand Up @@ -662,11 +609,6 @@ export async function getFileBySlug(slug: string): Promise<SlugFile> {
},
};

writeCacheFile(cacheFile, JSON.stringify(resultObj)).catch(e => {
// eslint-disable-next-line no-console
console.warn(`Failed to write MDX cache: ${cacheFile}`, e);
});

return resultObj;
}

Expand Down
Loading