diff --git a/Dockerfile b/Dockerfile index c8abbbc3a30c..96da219a32d3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ # --------------------------------------------------------------- # To update the sha: # https://github.com/github/gh-base-image/pkgs/container/gh-base-image%2Fgh-base-noble -FROM ghcr.io/github/gh-base-image/gh-base-noble:20250805-204228-g50c20871f AS base +FROM ghcr.io/github/gh-base-image/gh-base-noble:20250911-223345-ge6d335835 AS base # Install curl for Node install and determining the early access branch # Install git for cloning docs-early-access & translations repos diff --git a/data/reusables/actions/larger-runners-table.md b/data/reusables/actions/larger-runners-table.md index a23a1dc0edaf..51082c35cbcc 100644 --- a/data/reusables/actions/larger-runners-table.md +++ b/data/reusables/actions/larger-runners-table.md @@ -1,4 +1,4 @@ | Runner Size | Architecture| Processor (CPU)| Memory (RAM) | Storage (SSD) | Workflow label | | ------------| ------------| -------------- | ------------- | ------------- |--------------------------------------------------------------------------------------------------------------------------------------------------| -| Large | Intel | 12 | 30 GB | 14 GB | macos-latest-large, macos-13-large, macos-14-large [latest], macos-15-large | -| XLarge | arm64 (M2) | 5 (+ 8 GPU hardware acceleration) | 14 GB | 14 GB | macos-latest-xlarge, macos-13-xlarge, macos-14-xlarge [latest], macos-15-xlarge | +| Large | Intel | 12 | 30 GB | 14 GB | macos-latest-large, macos-13-large, macos-14-large, macos-15-large (latest) | +| XLarge | arm64 (M2) | 5 (+ 8 GPU hardware acceleration) | 14 GB | 14 GB | macos-latest-xlarge, macos-13-xlarge, macos-14-xlarge, macos-15-xlarge (latest), macos-26-xlarge ({% data variables.release-phases.public_preview %}) | diff --git a/data/reusables/actions/supported-github-runners.md b/data/reusables/actions/supported-github-runners.md index 4de203d4268c..4709fd2132f6 100644 --- a/data/reusables/actions/supported-github-runners.md +++ b/data/reusables/actions/supported-github-runners.md @@ -76,9 +76,10 @@ For public repositories, jobs using the workflow labels shown in the table below 14 GB arm64 - macos-latest, - macos-14, - macos-15 + macos-latest, + macos-14, + macos-15, + macos-26 ({% data variables.release-phases.public_preview %}) @@ -142,9 +143,10 @@ For {% ifversion ghec %}internal and{% endif %} private repositories, jobs using 14 GB arm64 - macos-latest, - macos-14, - macos-15 + macos-latest, + macos-14, + macos-15, + macos-26 ({% data variables.release-phases.public_preview %}) diff --git a/data/reusables/copilot/grok-promo-period.md b/data/reusables/copilot/grok-promo-period.md index 90a0ffc2783e..fbda965f2191 100644 --- a/data/reusables/copilot/grok-promo-period.md +++ b/data/reusables/copilot/grok-promo-period.md @@ -1 +1 @@ -> [!Important] Complimentary access for {% data variables.copilot.copilot_grok_code %} is available until 12 p.m. PDT on Wednesday, September 10, 2025. [Regular pricing](/copilot/reference/ai-models/supported-models#model-multipliers) applies after that point. +> [!Important] Complimentary access to {% data variables.copilot.copilot_grok_code %} is continuing past the previously announced end time. A new end date has not been set. We may update or conclude this promotion at any time. [Regular pricing](/copilot/reference/ai-models/supported-models#model-multipliers) applies after the extension ends. diff --git a/src/links/scripts/rendered-content-link-checker.ts b/src/links/scripts/rendered-content-link-checker.ts index dacfe7009718..3218a712d14a 100755 --- a/src/links/scripts/rendered-content-link-checker.ts +++ b/src/links/scripts/rendered-content-link-checker.ts @@ -76,8 +76,12 @@ type Options = { bail?: boolean commentLimitToExternalLinks?: boolean actionContext?: any + concurrency?: number } +// Default concurrency limit for URL requests +const DEFAULT_CONCURRENCY_LIMIT = 3 + const STATIC_PREFIXES: Record = { assets: path.resolve('assets'), public: path.resolve(path.join('src', 'graphql', 'data')), @@ -114,6 +118,32 @@ const externalLinkCheckerDB = await JSONFilePreset(EXTERNAL_LINK_CHECKER_D type DBType = typeof externalLinkCheckerDB +// Simple concurrency limiter +async function limitConcurrency( + items: T[], + asyncFn: (item: T) => Promise, + limit: number = 3, +): Promise { + const results: Promise[] = [] + const executing = new Set>() + + for (const item of items) { + const promise = asyncFn(item).then((result) => { + executing.delete(promise) + return result + }) + + results.push(promise) + executing.add(promise) + + if (executing.size >= limit) { + await Promise.race(executing) + } + } + + return Promise.all(results) +} + // Given a number and a percentage, return the same number with a *percentage* // max change of making a bit larger or smaller. // E.g. `jitter(55, 10)` will return a value between `[55 - 55/10: 55 + 55/10]` @@ -156,6 +186,7 @@ if (import.meta.url.endsWith(process.argv[1])) { REPORT_LABEL, EXTERNAL_SERVER_ERRORS_AS_WARNINGS, CHECK_ANCHORS, + CONCURRENCY, } = process.env const octokit = github() @@ -193,6 +224,7 @@ if (import.meta.url.endsWith(process.argv[1])) { reportAuthor: REPORT_AUTHOR, actionContext: getActionContext(), externalServerErrorsAsWarning: EXTERNAL_SERVER_ERRORS_AS_WARNINGS, + concurrency: CONCURRENCY ? parseInt(CONCURRENCY, 10) : DEFAULT_CONCURRENCY_LIMIT, } if (opts.shouldComment || opts.createReport) { @@ -238,6 +270,7 @@ if (import.meta.url.endsWith(process.argv[1])) { * externalServerErrorsAsWarning {boolean} - Treat >=500 errors or temporary request errors as warning * filter {Array} - strings to match the pages' relativePath * versions {Array} - only certain pages' versions (e.g. ) + * concurrency {number} - Maximum number of concurrent URL requests (default: 3, env: CONCURRENCY) * */ @@ -263,6 +296,7 @@ async function main( reportRepository = 'github/docs-content', reportAuthor = 'docs-bot', reportLabel = 'broken link report', + concurrency = DEFAULT_CONCURRENCY_LIMIT, } = opts // Note! The reason we're using `warmServer()` in this script, @@ -337,8 +371,9 @@ async function main( debugTimeStart(core, 'processPages') const t0 = new Date().getTime() - const flawsGroups = await Promise.all( - pages.map((page: Page) => + const flawsGroups = await limitConcurrency( + pages, + (page: Page) => processPage( core, page, @@ -348,7 +383,7 @@ async function main( externalLinkCheckerDB, versions as string[], ), - ), + concurrency, // Limit concurrent page checks ) const t1 = new Date().getTime() debugTimeEnd(core, 'processPages') @@ -653,14 +688,13 @@ async function processPage( versions: string[], ) { const { verbose, verboseUrl, bail } = opts - const allFlawsEach = await Promise.all( - page.permalinks - .filter((permalink) => { - return !versions.length || versions.includes(permalink.pageVersion) - }) - .map((permalink) => { - return processPermalink(core, permalink, page, pageMap, redirects, opts, db) - }), + const filteredPermalinks = page.permalinks.filter((permalink) => { + return !versions.length || versions.includes(permalink.pageVersion) + }) + const allFlawsEach = await limitConcurrency( + filteredPermalinks, + (permalink) => processPermalink(core, permalink, page, pageMap, redirects, opts, db), + opts.concurrency || DEFAULT_CONCURRENCY_LIMIT, // Limit concurrent permalink checks per page ) const allFlaws = allFlawsEach.flat() @@ -714,8 +748,9 @@ async function processPermalink( $('a[href]').each((i, link) => { links.push(link) }) - const newFlaws: LinkFlaw[] = await Promise.all( - links.map(async (link) => { + const newFlaws: LinkFlaw[] = await limitConcurrency( + links, + async (link) => { const { href } = (link as cheerio.TagElement).attribs // The global cache can't be used for anchor links because they @@ -756,7 +791,8 @@ async function processPermalink( globalHrefCheckCache.set(href, flaw) } } - }), + }, + opts.concurrency || DEFAULT_CONCURRENCY_LIMIT, // Limit concurrent link checks per permalink ) for (const flaw of newFlaws) {