Skip to content

Commit f85983a

Browse files
author
Sarah Edwards
authored
Merge branch 'main' into ske-dep-api
2 parents 83c1d43 + df4fb53 commit f85983a

File tree

4,336 files changed

+252271
-38157
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

4,336 files changed

+252271
-38157
lines changed

.eslintrc.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ module.exports = {
1313
babelOptions: { configFile: './.babelrc' },
1414
sourceType: 'module',
1515
},
16-
ignorePatterns: ['tmp/*', '!/.*', '/.next/', 'script/bookmarklets/*', 'lib/sigsci.js'],
16+
ignorePatterns: ['tmp/*', '!/.*', '/.next/', 'script/bookmarklets/*'],
1717
rules: {
1818
'import/no-extraneous-dependencies': ['error', { packageDir: '.' }],
1919
},

.github/actions-scripts/content-changes-table-comment.js

Lines changed: 112 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,11 @@ if (!APP_URL) {
2020
throw new Error(`APP_URL environment variable not set`)
2121
}
2222

23+
// the max size of the comment (in bytes)
24+
// the action we use to post the comment caps out at about 144kb
25+
// see docs-engineering#1849 for more info
26+
const MAX_COMMENT_SIZE = 125000
27+
2328
const PROD_URL = 'https://docs.github.com'
2429
const octokit = github.getOctokit(GITHUB_TOKEN)
2530

@@ -39,78 +44,117 @@ const pathPrefix = 'content/'
3944
const articleFiles = files.filter(
4045
({ filename }) => filename.startsWith(pathPrefix) && !filename.endsWith('/index.md')
4146
)
42-
for (const file of articleFiles) {
43-
const sourceUrl = file.blob_url
44-
const fileName = file.filename.slice(pathPrefix.length)
45-
const fileUrl = fileName.slice(0, fileName.lastIndexOf('.'))
46-
47-
// get the file contents and decode them
48-
// this script is called from the main branch, so we need the API call to get the contents from the branch, instead
49-
const fileContents = await getContents(
50-
context.repo.owner,
51-
context.payload.repository.name,
52-
context.payload.pull_request.head.sha,
53-
file.filename
54-
)
55-
56-
// parse the frontmatter
57-
const { data } = parse(fileContents)
58-
59-
let contentCell = ''
60-
let previewCell = ''
61-
let prodCell = ''
62-
63-
if (file.status === 'added') contentCell = `New file: `
64-
contentCell += `[\`${fileName}\`](${sourceUrl})`
65-
66-
try {
67-
// the try/catch is needed because getApplicableVersions() returns either [] or throws an error when it can't parse the versions frontmatter
68-
// try/catch can be removed if docs-engineering#1821 is resolved
69-
// i.e. for feature based versioning, like ghae: 'issue-6337'
70-
const fileVersions = getApplicableVersions(data.versions)
71-
72-
for (const plan in allVersionShortnames) {
73-
// plan is the shortName (i.e., fpt)
74-
// allVersionShortNames[plan] is the planName (i.e., free-pro-team)
75-
76-
// walk by the plan names since we generate links differently for most plans
77-
const versions = fileVersions.filter((fileVersion) =>
78-
fileVersion.includes(allVersionShortnames[plan])
79-
)
80-
81-
if (versions.length === 1) {
82-
// for fpt, ghec, and ghae
8347

84-
if (versions.toString() === nonEnterpriseDefaultVersion) {
85-
// omit version from fpt url
86-
87-
previewCell += `[${plan}](${APP_URL}/${fileUrl})<br>`
88-
prodCell += `[${plan}](${PROD_URL}/${fileUrl})<br>`
89-
} else {
90-
// for non-versioned releases (ghae, ghec) use full url
48+
const lines = await Promise.all(
49+
articleFiles.map(async (file) => {
50+
const sourceUrl = file.blob_url
51+
const fileName = file.filename.slice(pathPrefix.length)
52+
const fileUrl = fileName.slice(0, fileName.lastIndexOf('.'))
53+
54+
// get the file contents and decode them
55+
// this script is called from the main branch, so we need the API call to get the contents from the branch, instead
56+
const fileContents = await getContents(
57+
context.repo.owner,
58+
context.payload.repository.name,
59+
// Can't get its content if it no longer exists.
60+
// Meaning, you'd get a 404 on the `getContents()` utility function.
61+
// So, to be able to get necessary meta data about what it *was*,
62+
// if it was removed, fall back to the 'base'.
63+
file.status === 'removed'
64+
? context.payload.pull_request.base.sha
65+
: context.payload.pull_request.head.sha,
66+
file.filename
67+
)
9168

92-
previewCell += `[${plan}](${APP_URL}/${versions}/${fileUrl})<br>`
93-
prodCell += `[${plan}](${PROD_URL}/${versions}/${fileUrl})<br>`
69+
// parse the frontmatter
70+
const { data } = parse(fileContents)
71+
72+
let contentCell = ''
73+
let previewCell = ''
74+
let prodCell = ''
75+
76+
if (file.status === 'added') contentCell = 'New file: '
77+
else if (file.status === 'removed') contentCell = 'Removed: '
78+
contentCell += `[\`${fileName}\`](${sourceUrl})`
79+
80+
try {
81+
// the try/catch is needed because getApplicableVersions() returns either [] or throws an error when it can't parse the versions frontmatter
82+
// try/catch can be removed if docs-engineering#1821 is resolved
83+
// i.e. for feature based versioning, like ghae: 'issue-6337'
84+
const fileVersions = getApplicableVersions(data.versions)
85+
86+
for (const plan in allVersionShortnames) {
87+
// plan is the shortName (i.e., fpt)
88+
// allVersionShortNames[plan] is the planName (i.e., free-pro-team)
89+
90+
// walk by the plan names since we generate links differently for most plans
91+
const versions = fileVersions.filter((fileVersion) =>
92+
fileVersion.includes(allVersionShortnames[plan])
93+
)
94+
95+
if (versions.length === 1) {
96+
// for fpt, ghec, and ghae
97+
98+
if (versions.toString() === nonEnterpriseDefaultVersion) {
99+
// omit version from fpt url
100+
101+
previewCell += `[${plan}](${APP_URL}/${fileUrl})<br>`
102+
prodCell += `[${plan}](${PROD_URL}/${fileUrl})<br>`
103+
} else {
104+
// for non-versioned releases (ghae, ghec) use full url
105+
106+
previewCell += `[${plan}](${APP_URL}/${versions}/${fileUrl})<br>`
107+
prodCell += `[${plan}](${PROD_URL}/${versions}/${fileUrl})<br>`
108+
}
109+
} else if (versions.length) {
110+
// for ghes releases, link each version
111+
112+
previewCell += `${plan}@ `
113+
prodCell += `${plan}@ `
114+
115+
versions.forEach((version) => {
116+
previewCell += `[${version.split('@')[1]}](${APP_URL}/${version}/${fileUrl}) `
117+
prodCell += `[${version.split('@')[1]}](${PROD_URL}/${version}/${fileUrl}) `
118+
})
119+
previewCell += '<br>'
120+
prodCell += '<br>'
94121
}
95-
} else if (versions.length) {
96-
// for ghes releases, link each version
97-
98-
previewCell += `${plan}@ `
99-
prodCell += `${plan}@ `
100-
101-
versions.forEach((version) => {
102-
previewCell += `[${version.split('@')[1]}](${APP_URL}/${version}/${fileUrl}) `
103-
prodCell += `[${version.split('@')[1]}](${PROD_URL}/${version}/${fileUrl}) `
104-
})
105-
previewCell += '<br>'
106-
prodCell += '<br>'
107122
}
123+
} catch (e) {
124+
console.error(
125+
`Version information for ${file.filename} couldn't be determined from its frontmatter.`
126+
)
108127
}
109-
} catch (e) {
110-
console.error(
111-
`Version information for ${file.filename} couldn't be determined from its frontmatter.`
112-
)
128+
let note = ''
129+
if (file.status === 'removed') {
130+
note = 'removed'
131+
// If the file was removed, the `previewCell` no longer makes sense
132+
// since it was based on looking at the base sha.
133+
previewCell = 'n/a'
134+
}
135+
136+
return `| ${contentCell} | ${previewCell} | ${prodCell} | ${note} |`
137+
})
138+
)
139+
140+
// this section limits the size of the comment
141+
const cappedLines = []
142+
let underMax = true
143+
144+
lines.reduce((previous, current, index, array) => {
145+
if (underMax) {
146+
if (previous + current.length > MAX_COMMENT_SIZE) {
147+
underMax = false
148+
cappedLines.push('**Note** There are more changes in this PR than we can show.')
149+
return previous
150+
}
151+
152+
cappedLines.push(array[index])
153+
return previous + current.length
113154
}
114-
markdownTable += `| ${contentCell} | ${previewCell} | ${prodCell} | |\n`
115-
}
155+
return previous
156+
}, markdownTable.length)
157+
158+
markdownTable += cappedLines.join('\n')
159+
116160
setOutput('changesTable', markdownTable)

.github/actions-scripts/enable-automerge.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ async function main() {
1717
const github = getOctokit(token)
1818
const pull = await github.rest.pulls.get({
1919
owner: org,
20-
repo: repo,
20+
repo,
2121
pull_number: parseInt(prNumber),
2222
})
2323

.github/actions-scripts/fr-add-docs-reviewers-requests.js

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -191,16 +191,16 @@ async function run() {
191191

192192
await graphql(updateProjectNextItemMutation, {
193193
project: projectID,
194-
statusID: statusID,
194+
statusID,
195195
statusValueID: readyForReviewID,
196-
datePostedID: datePostedID,
197-
reviewDueDateID: reviewDueDateID,
198-
contributorTypeID: contributorTypeID,
199-
contributorType: contributorType,
200-
sizeTypeID: sizeTypeID,
196+
datePostedID,
197+
reviewDueDateID,
198+
contributorTypeID,
199+
contributorType,
200+
sizeTypeID,
201201
sizeType: '', // Although we aren't populating size, we are passing the variable so that we can use the shared mutation function
202-
featureID: featureID,
203-
authorID: authorID,
202+
featureID,
203+
authorID,
204204
headers: {
205205
authorization: `token ${process.env.TOKEN}`,
206206
'GraphQL-Features': 'projects_next_graphql',
Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
#!/usr/bin/env node
2+
3+
import fs from 'fs'
4+
import github from '@actions/github'
5+
6+
const OPTIONS = Object.fromEntries(
7+
['BASE', 'BODY_FILE', 'GITHUB_TOKEN', 'HEAD', 'LANGUAGE', 'TITLE', 'GITHUB_REPOSITORY'].map(
8+
(envVarName) => {
9+
const envVarValue = process.env[envVarName]
10+
if (!envVarValue) {
11+
throw new Error(`You must supply a ${envVarName} environment variable`)
12+
}
13+
return [envVarName, envVarValue]
14+
}
15+
)
16+
)
17+
18+
if (!process.env.GITHUB_REPOSITORY) {
19+
throw new Error('GITHUB_REPOSITORY environment variable not set')
20+
}
21+
22+
const RETRY_STATUSES = [
23+
422, // Retry the operation if the PR already exists
24+
502, // Retry the operation if the API responds with a `502 Bad Gateway` error.
25+
]
26+
const RETRY_ATTEMPTS = 3
27+
const {
28+
// One of the default environment variables provided by Actions.
29+
GITHUB_REPOSITORY,
30+
31+
// These are passed in from the step in the workflow file.
32+
TITLE,
33+
BASE,
34+
HEAD,
35+
LANGUAGE,
36+
BODY_FILE,
37+
GITHUB_TOKEN,
38+
} = OPTIONS
39+
const [OWNER, REPO] = GITHUB_REPOSITORY.split('/')
40+
41+
const octokit = github.getOctokit(GITHUB_TOKEN)
42+
43+
/**
44+
* @param {object} config Configuration options for finding the PR.
45+
* @returns {Promise<number | undefined>} The PR number.
46+
*/
47+
async function findPullRequestNumber(config) {
48+
// Get a list of PRs and see if one already exists.
49+
const { data: listOfPullRequests } = await octokit.rest.pulls.list({
50+
owner: config.owner,
51+
repo: config.repo,
52+
head: `${config.owner}:${config.head}`,
53+
})
54+
55+
return listOfPullRequests[0]?.number
56+
}
57+
58+
/**
59+
* When this file was first created, we only introduced support for creating a pull request for some translation batch.
60+
* However, some of our first workflow runs failed during the pull request creation due to a timeout error.
61+
* There have been cases where, despite the timeout error, the pull request gets created _anyway_.
62+
* To accommodate this reality, we created this function to look for an existing pull request before a new one is created.
63+
* Although the "find" check is redundant in the first "cycle", it's designed this way to recursively call the function again via its retry mechanism should that be necessary.
64+
*
65+
* @param {object} config Configuration options for creating the pull request.
66+
* @returns {Promise<number>} The PR number.
67+
*/
68+
async function findOrCreatePullRequest(config) {
69+
const found = await findPullRequestNumber(config)
70+
71+
if (found) {
72+
return found
73+
}
74+
75+
try {
76+
const { data: pullRequest } = await octokit.rest.pulls.create({
77+
owner: config.owner,
78+
repo: config.repo,
79+
base: config.base,
80+
head: config.head,
81+
title: config.title,
82+
body: config.body,
83+
draft: false,
84+
})
85+
86+
return pullRequest.number
87+
} catch (error) {
88+
if (!error.response || !config.retryCount) {
89+
throw error
90+
}
91+
92+
if (!config.retryStatuses.includes(error.response.status)) {
93+
throw error
94+
}
95+
96+
console.error(`Error creating pull request: ${error.message}`)
97+
console.warn(`Retrying in 5 seconds...`)
98+
await new Promise((resolve) => setTimeout(resolve, 5000))
99+
100+
config.retryCount -= 1
101+
102+
return findOrCreatePullRequest(config)
103+
}
104+
}
105+
106+
/**
107+
* @param {object} config Configuration options for labeling the PR
108+
* @returns {Promise<undefined>}
109+
*/
110+
// async function labelPullRequest(config) {
111+
// await octokit.rest.issues.update({
112+
// owner: config.owner,
113+
// repo: config.repo,
114+
// issue_number: config.issue_number,
115+
// labels: config.labels,
116+
// })
117+
// }
118+
119+
async function main() {
120+
const options = {
121+
title: TITLE,
122+
base: BASE,
123+
head: HEAD,
124+
body: fs.readFileSync(BODY_FILE, 'utf8'),
125+
labels: ['translation-batch', `translation-batch-${LANGUAGE}`],
126+
owner: OWNER,
127+
repo: REPO,
128+
retryStatuses: RETRY_STATUSES,
129+
retryCount: RETRY_ATTEMPTS,
130+
}
131+
132+
options.issue_number = await findOrCreatePullRequest(options)
133+
const pr = `${GITHUB_REPOSITORY}#${options.issue_number}`
134+
console.log(`Created PR ${pr}`)
135+
136+
// metadata parameters aren't currently available in `github.rest.pulls.create`,
137+
// but they are in `github.rest.issues.update`.
138+
// await labelPullRequest(options)
139+
// console.log(`Updated ${pr} with these labels: ${options.labels.join(', ')}`)
140+
}
141+
142+
main()

0 commit comments

Comments
 (0)