Skip to content
Merged
Show file tree
Hide file tree
Changes from 25 commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
25baf2b
ci(build): Try to cache mdx bundler
BYK Jun 23, 2025
ac13fc3
asyncify for speed?
BYK Jun 23, 2025
8779295
Merge branch 'master' into byk/ci/mdx-cache
BYK Jun 23, 2025
ce8368a
fix broken imports
BYK Jun 23, 2025
8bf9f4b
[getsentry/action-github-commit] Auto commit
getsantry[bot] Jun 23, 2025
871e0fc
don't swallow all errors from access()
BYK Jun 23, 2025
61c73a3
brotli to the rescue!
BYK Jun 23, 2025
3862de8
withResolvers shim
BYK Jun 23, 2025
f529f6a
[getsentry/action-github-commit] Auto commit
getsantry[bot] Jun 23, 2025
39f9869
revert shim as we are on n22 now
BYK Jun 23, 2025
0673c54
try without a cache version
BYK Jun 24, 2025
07062bb
Merge branch 'master' into byk/ci/mdx-cache
BYK Jun 24, 2025
6c1a7fc
Merge branch 'master' into byk/ci/mdx-cache
BYK Jun 24, 2025
54fd866
add some cache diagnostics
BYK Jun 24, 2025
eaf2bcf
finally, cache hits!
BYK Jun 24, 2025
7211307
case-insensitive script tag removal
BYK Jun 24, 2025
08df8fc
do streams correctlyO
BYK Jun 24, 2025
3887c43
Merge branch 'master' into byk/ci/mdx-cache
BYK Jun 24, 2025
a42bbba
limit caching to CI
BYK Jun 24, 2025
18d6ea6
fix incorrect import
BYK Jun 24, 2025
2c2dcc9
remove all caching logic from mdx
BYK Jun 24, 2025
6a1e895
simplification
BYK Jun 24, 2025
49c83e1
Revert "remove all caching logic from mdx"
BYK Jun 24, 2025
9ce08af
limit concurrency and try again
BYK Jun 24, 2025
b85ef27
[getsentry/action-github-commit] Auto commit
getsantry[bot] Jun 24, 2025
a93fa94
add crucial comment
BYK Jun 24, 2025
fcc9bac
mo cache mo free time
BYK Jun 25, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 0 additions & 16 deletions .babelrc.js.bak

This file was deleted.

2 changes: 1 addition & 1 deletion app/sitemap.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import {getDevDocsFrontMatter, getDocsFrontMatter} from 'sentry-docs/mdx';

export default async function sitemap(): Promise<MetadataRoute.Sitemap> {
if (isDeveloperDocs) {
const docs = getDevDocsFrontMatter();
const docs = await getDevDocsFrontMatter();
const baseUrl = 'https://develop.sentry.dev';
return docsToSitemap(docs, baseUrl);
}
Expand Down
2 changes: 0 additions & 2 deletions docs/product/explore/session-replay/web/index.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@ sidebar_order: 10
description: "Learn about Session Replay and its video-like reproductions of user interactions, which can help you see when users are frustrated and build a better web experience."
---

<Include name="feature-stage-beta-session-replay.mdx" />
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This file did not exist at all.


Session Replay allows you to see video-like reproductions of user sessions which can help you understand what happened before, during, and after an error or performance issue occurred. You'll be able to gain deeper debugging context into issues so that you can reproduce and resolve problems faster without the guesswork. As you play back each session, you'll be able to see every user interaction in relation to network requests, DOM events, and console messages. It’s effectively like having [DevTools](https://developer.chrome.com/docs/devtools/overview/) active in your production user sessions.

Replays are integrated with other parts of the Sentry product so you can see how the user experience is impacted by errors and slow transactions. You'll see session replays associated with error events on the [Issue Details](/product/issues/issue-details/) page, and those associated with slow transactions on the [Transaction Summary](/product/insights/overview/transaction-summary/) page. For [backend error replays](/product/explore/session-replay/web/getting-started/#replays-for-backend-errors), any contributing backend errors will be included in the replay's timeline, [breadcrumbs](https://docs.sentry.io/product/issues/issue-details/breadcrumbs/), and errors.
Expand Down
2 changes: 0 additions & 2 deletions docs/product/sentry-basics/performance-monitoring.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@ sidebar_order: 1
description: "Understand and monitor how your application performs in production. Track key metrics, analyze bottlenecks, and resolve performance issues with distributed tracing, detailed transaction data, and automated issue detection."
---

<Include name="performance-moving.mdx" />
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This file did not exist at all.


In many tools, Performance Monitoring is just about tracking a few key metrics on your web pages. Sentry takes a different approach. By setting up [Tracing](/concepts/key-terms/tracing/), Sentry captures detailed performance data for every transaction in your entire application stack and automatically presents it in a variety of easy-to-use but powerful features so you can rapidly identify and resolve performance issues as they happen - all in one place.

<Alert>
Expand Down
8 changes: 5 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@
"next-plausible": "^3.12.4",
"next-themes": "^0.3.0",
"nextjs-toploader": "^1.6.6",
"p-limit": "^6.2.0",
"platformicons": "^8.0.4",
"prism-sentry": "^1.0.2",
"query-string": "^6.13.1",
Expand Down Expand Up @@ -116,7 +117,7 @@
"@tailwindcss/forms": "^0.5.7",
"@tailwindcss/typography": "^0.5.10",
"@types/dompurify": "3.0.5",
"@types/node": "^20",
"@types/node": "^22",
"@types/react": "18.3.12",
"@types/react-dom": "18.3.1",
"@types/ws": "^8.5.10",
Expand All @@ -140,10 +141,11 @@
},
"resolutions": {
"dompurify": "3.2.4",
"@types/dompurify": "3.0.5"
"@types/dompurify": "3.0.5",
"@types/node": "^22"
},
"volta": {
"node": "20.11.0",
"node": "22.16.0",
"yarn": "1.22.22"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,7 @@ If you want to configure source maps to upload manually, follow the guide for yo

### Guides for Source Maps

- <PlatformLink to="/sourcemaps/uploading/typescript/">
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TypeScript (tsc)
</PlatformLink>
- <PlatformLink to="/sourcemaps/uploading/typescript/">TypeScript (tsc)</PlatformLink>

<Alert>
If you're using a bundler like Webpack, Vite, Rollup, or Esbuild, use the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ If you can't find the tool of your choice in the list below, we recommend you ch

</Alert>

<Include name="sourcemaps/overview/javascript.cloudflare.mdx" />
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This include was simply being ignored as it could not be found. With the new system, this causes a build error so I fixed it.

Check current vs fixed

<Include name="../platform-includes/sourcemaps/overview/javascript.cloudflare.mdx" />

<PageGrid />
<PageGrid />
4 changes: 2 additions & 2 deletions scripts/algolia.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@ indexAndUpload();
async function indexAndUpload() {
// the page front matters are the source of truth for the static doc routes
// as they are used directly by generateStaticParams() on [[..path]] page
const pageFrontMatters = isDeveloperDocs
const pageFrontMatters = await (isDeveloperDocs
? getDevDocsFrontMatter()
: await getDocsFrontMatter();
: getDocsFrontMatter());
const records = await generateAlogliaRecords(pageFrontMatters);
console.log('🔥 Generated %d new Algolia records.', records.length);
const existingRecordIds = await fetchExistingRecordIds(index);
Expand Down
148 changes: 98 additions & 50 deletions scripts/generate-md-exports.mjs
Original file line number Diff line number Diff line change
@@ -1,28 +1,40 @@
#!/usr/bin/env node

/* eslint-disable no-console */
import {selectAll} from 'hast-util-select';
import {createHash} from 'node:crypto';
import {constants as fsConstants, existsSync} from 'node:fs';
import {copyFile, mkdir, opendir, readFile, rm, writeFile} from 'node:fs/promises';
import {createReadStream, createWriteStream, existsSync} from 'node:fs';
import {mkdir, opendir, readFile, rm} from 'node:fs/promises';
import {cpus} from 'node:os';
import * as path from 'node:path';
import {Readable} from 'node:stream';
import {pipeline} from 'node:stream/promises';
import {fileURLToPath} from 'node:url';
import {isMainThread, parentPort, Worker, workerData} from 'node:worker_threads';
import {
constants as zlibConstants,
createBrotliCompress,
createBrotliDecompress,
} from 'node:zlib';
import rehypeParse from 'rehype-parse';
import rehypeRemark from 'rehype-remark';
import remarkGfm from 'remark-gfm';
import remarkStringify from 'remark-stringify';
import {unified} from 'unified';
import {remove} from 'unist-util-remove';

const CACHE_COMPRESS_LEVEL = 4;

function taskFinishHandler(data) {
if (data.failedTasks.length === 0) {
console.log(`✅ Worker[${data.id}]: ${data.success} files successfully.`);
} else {
hasErrors = true;
console.error(`❌ Worker[${data.id}]: ${data.failedTasks.length} files failed:`);
console.error(data.failedTasks);
console.log(
`💰 Worker[${data.id}]: Cache hits: ${data.cacheHits} (${Math.round((data.cacheHits / data.success) * 100)}%)`
);
console.log(`✅ Worker[${data.id}]: converted ${data.success} files successfully.`);
return false;
}
console.error(`❌ Worker[${data.id}]: ${data.failedTasks.length} files failed:`);
console.error(data.failedTasks);
return true;
}

async function createWork() {
Expand All @@ -37,20 +49,21 @@ async function createWork() {
const INPUT_DIR = path.join(root, '.next', 'server', 'app');
const OUTPUT_DIR = path.join(root, 'public', 'md-exports');

const CACHE_VERSION = 1;
const CACHE_DIR = path.join(root, '.next', 'cache', 'md-exports', `v${CACHE_VERSION}`);
const noCache = !existsSync(CACHE_DIR);
if (noCache) {
await mkdir(CACHE_DIR, {recursive: true});
}

console.log(`🚀 Starting markdown generation from: ${INPUT_DIR}`);
console.log(`📁 Output directory: ${OUTPUT_DIR}`);

// Clear output directory
await rm(OUTPUT_DIR, {recursive: true, force: true});
await mkdir(OUTPUT_DIR, {recursive: true});

const CACHE_DIR = path.join(root, '.next', 'cache', 'md-exports');
console.log(`💰 Cache directory: ${CACHE_DIR}`);
const noCache = !existsSync(CACHE_DIR);
if (noCache) {
console.log(`ℹ️ No cache directory found, this will take a while...`);
await mkdir(CACHE_DIR, {recursive: true});
}

// On a 16-core machine, 8 workers were optimal (and slightly faster than 16)
const numWorkers = Math.max(Math.floor(cpus().length / 2), 2);
const workerTasks = new Array(numWorkers).fill(null).map(() => []);
Expand Down Expand Up @@ -86,7 +99,7 @@ async function createWork() {
workerData: {id, noCache, cacheDir: CACHE_DIR, tasks: workerTasks[id]},
});
let hasErrors = false;
worker.on('message', taskFinishHandler);
worker.on('message', data => (hasErrors = taskFinishHandler(data)));
worker.on('error', reject);
worker.on('exit', code => {
if (code !== 0) {
Expand All @@ -104,7 +117,11 @@ async function createWork() {
cacheDir: CACHE_DIR,
tasks: workerTasks[workerTasks.length - 1],
id: workerTasks.length - 1,
}).then(taskFinishHandler)
}).then(data => {
if (taskFinishHandler(data)) {
throw new Error(`Worker[${data.id}] had some errors.`);
}
})
);

await Promise.all(workerPromises);
Expand All @@ -116,62 +133,93 @@ async function createWork() {
const md5 = data => createHash('md5').update(data).digest('hex');

async function genMDFromHTML(source, target, {cacheDir, noCache}) {
const text = await readFile(source, {encoding: 'utf8'});
const text = (await readFile(source, {encoding: 'utf8'}))
// Remove all script tags, as they are not needed in markdown
// and they are not stable across builds, causing cache misses
.replace(/<script[^>]*>[\s\S]*?<\/script>/gi, '');
const hash = md5(text);
const cacheFile = path.join(cacheDir, hash);
if (!noCache) {
try {
await copyFile(cacheFile, target, fsConstants.COPYFILE_FICLONE);
return;
await pipeline(
createReadStream(cacheFile),
createBrotliDecompress(),
createWriteStream(target, {
encoding: 'utf8',
})
);

return true;
} catch {
// pass
}
}

await writeFile(
target,
String(
await unified()
.use(rehypeParse)
// Need the `main div > hgroup` selector for the headers
.use(() => tree => selectAll('main div > hgroup, div#main', tree))
// If we don't do this wrapping, rehypeRemark just returns an empty string -- yeah WTF?
.use(() => tree => ({
type: 'element',
tagName: 'div',
properties: {},
children: tree,
}))
.use(rehypeRemark, {
document: false,
handlers: {
// Remove buttons as they usually get confusing in markdown, especially since we use them as tab headers
button() {},
},
})
// We end up with empty inline code blocks, probably from some tab logic in the HTML, remove them
.use(() => tree => remove(tree, {type: 'inlineCode', value: ''}))
.use(remarkGfm)
.use(remarkStringify)
.process(text)
)
const data = String(
await unified()
.use(rehypeParse)
// Need the `main div > hgroup` selector for the headers
.use(() => tree => selectAll('main div > hgroup, div#main', tree))
// If we don't do this wrapping, rehypeRemark just returns an empty string -- yeah WTF?
.use(() => tree => ({
type: 'element',
tagName: 'div',
properties: {},
children: tree,
}))
.use(rehypeRemark, {
document: false,
handlers: {
// Remove buttons as they usually get confusing in markdown, especially since we use them as tab headers
button() {},
},
})
// We end up with empty inline code blocks, probably from some tab logic in the HTML, remove them
.use(() => tree => remove(tree, {type: 'inlineCode', value: ''}))
.use(remarkGfm)
.use(remarkStringify)
.process(text)
);
await copyFile(target, cacheFile, fsConstants.COPYFILE_FICLONE);
const reader = Readable.from(data);

await Promise.all([
pipeline(
reader,
createWriteStream(target, {
encoding: 'utf8',
})
),
pipeline(
reader,
createBrotliCompress({
chunkSize: 32 * 1024,
params: {
[zlibConstants.BROTLI_PARAM_MODE]: zlibConstants.BROTLI_MODE_TEXT,
[zlibConstants.BROTLI_PARAM_QUALITY]: CACHE_COMPRESS_LEVEL,
[zlibConstants.BROTLI_PARAM_SIZE_HINT]: data.length,
},
}),
createWriteStream(cacheFile)
).catch(err => console.warn('Error writing cache file:', err)),
]);

return false;
}

async function processTaskList({id, tasks, cacheDir, noCache}) {
const failedTasks = [];
let cacheHits = 0;
for (const {sourcePath, targetPath} of tasks) {
try {
await genMDFromHTML(sourcePath, targetPath, {
cacheHits += await genMDFromHTML(sourcePath, targetPath, {
cacheDir,
noCache,
});
} catch (error) {
failedTasks.push({sourcePath, targetPath, error});
}
}
return {id, success: tasks.length - failedTasks.length, failedTasks};
return {id, success: tasks.length - failedTasks.length, failedTasks, cacheHits};
}

async function doWork(work) {
Expand Down
2 changes: 1 addition & 1 deletion src/docTree.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ export function getDocsRootNode(): Promise<DocNode> {

async function getDocsRootNodeUncached(): Promise<DocNode> {
return frontmatterToTree(
isDeveloperDocs ? getDevDocsFrontMatter() : await getDocsFrontMatter()
await (isDeveloperDocs ? getDevDocsFrontMatter() : getDocsFrontMatter())
);
}

Expand Down
34 changes: 5 additions & 29 deletions src/files.ts
Original file line number Diff line number Diff line change
@@ -1,37 +1,13 @@
import fs from 'fs';
import {readdir} from 'fs/promises';
import path from 'path';

// pipe two functions together
function pipe<T, U, V>(f: (x: T) => U, g: (y: U) => V): (x: T) => V;
// pipe three functions
function pipe<T, U, V, W>(f: (x: T) => U, g: (y: U) => V, h: (z: V) => W): (x: T) => W;
function pipe(...fns: Function[]) {
return x => fns.reduce((v, f) => f(v), x);
}

const map =
<T, U>(fn: (a: T) => U) =>
(input: T[]) =>
input.map(fn);

const walkDir = (fullPath: string) => {
return fs.statSync(fullPath).isFile() ? fullPath : getAllFilesRecursively(fullPath);
};

const pathJoinPrefix = (prefix: string) => (extraPath: string) =>
path.join(prefix, extraPath);

/**
* @returns Array of file paths
*/
const getAllFilesRecursively = (folder: string): [string] => {
return pipe(
// yes, this arrow function is necessary to narrow down the readdirSync overload
(x: string) => fs.readdirSync(x),
map(pipe(pathJoinPrefix(folder), walkDir)),
// flattenArray
x => x.flat(Infinity)
)(folder) as [string];
const getAllFilesRecursively = async (folder: string): Promise<string[]> => {
return (await readdir(folder, {withFileTypes: true, recursive: true}))
.filter(dirent => dirent.isFile())
.map(dirent => path.join(dirent.parentPath || dirent.path, dirent.name));
};

export default getAllFilesRecursively;
Loading
Loading