Skip to content

Commit fde819b

Browse files
authored
Merge branch 'main' into patch-1
2 parents 03caaff + 532facc commit fde819b

File tree

980 files changed

+28488
-19071
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

980 files changed

+28488
-19071
lines changed

.ci/metrics/metrics.py

Lines changed: 2 additions & 201 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,9 @@
11
import collections
22
import datetime
3-
import dateutil
43
import github
5-
import json
64
import logging
75
import os
86
import requests
9-
import sys
107
import time
118

129
from dataclasses import dataclass
@@ -55,18 +52,6 @@
5552
# by trial and error).
5653
GRAFANA_METRIC_MAX_AGE_MN = 120
5754

58-
# Lists the BuildKite jobs we want to track. Maps the BuildKite job name to
59-
# the metric name in Grafana. This is important not to lose metrics history
60-
# if the workflow name changes.
61-
BUILDKITE_WORKFLOW_TO_TRACK = {
62-
":linux: Linux x64": "buildkite_linux",
63-
":windows: Windows x64": "buildkite_windows",
64-
}
65-
66-
# Number of builds to fetch per page. Since we scrape regularly, this can
67-
# remain small.
68-
BUILDKITE_GRAPHQL_BUILDS_PER_PAGE = 50
69-
7055

7156
@dataclass
7257
class JobMetrics:
@@ -86,181 +71,6 @@ class GaugeMetric:
8671
time_ns: int
8772

8873

89-
def buildkite_fetch_page_build_list(
90-
buildkite_token: str, after_cursor: str = None
91-
) -> list[dict[str, str]]:
92-
"""Fetches a page of the build list using the GraphQL BuildKite API.
93-
94-
Returns the BUILDKITE_GRAPHQL_BUILDS_PER_PAGE last running/queued builds,
95-
or the BUILDKITE_GRAPHQL_BUILDS_PER_PAGE running/queued builds
96-
older than the one pointer by |after_cursor| if provided.
97-
The |after_cursor| value is taken from the previous page returned by the
98-
API.
99-
100-
Args:
101-
buildkite_token: the secret token to authenticate GraphQL requests.
102-
after_cursor: cursor after which to start the page fetch.
103-
104-
Returns:
105-
The most recent builds after cursor (if set) with the following format:
106-
[
107-
{
108-
"cursor": <value>,
109-
"number": <build-number>,
110-
}
111-
]
112-
"""
113-
114-
BUILDKITE_GRAPHQL_QUERY = """
115-
query OrganizationShowQuery {{
116-
organization(slug: "llvm-project") {{
117-
pipelines(search: "Github pull requests", first: 1) {{
118-
edges {{
119-
node {{
120-
builds (state: [CANCELING, CREATING, FAILING, RUNNING], first: {PAGE_SIZE}, after: {AFTER}) {{
121-
edges {{
122-
cursor
123-
node {{
124-
number
125-
}}
126-
}}
127-
}}
128-
}}
129-
}}
130-
}}
131-
}}
132-
}}
133-
"""
134-
query = BUILDKITE_GRAPHQL_QUERY.format(
135-
PAGE_SIZE=BUILDKITE_GRAPHQL_BUILDS_PER_PAGE,
136-
AFTER="null" if after_cursor is None else '"{}"'.format(after_cursor),
137-
)
138-
query = json.dumps({"query": query})
139-
url = "https://graphql.buildkite.com/v1"
140-
headers = {
141-
"Authorization": "Bearer " + buildkite_token,
142-
"Content-Type": "application/json",
143-
}
144-
data = requests.post(url, data=query, headers=headers).json()
145-
# De-nest the build list.
146-
if "errors" in data:
147-
logging.info("Failed to fetch BuildKite jobs: {}".format(data["errors"]))
148-
return []
149-
builds = data["data"]["organization"]["pipelines"]["edges"][0]["node"]["builds"][
150-
"edges"
151-
]
152-
# Fold cursor info into the node dictionnary.
153-
return [{**x["node"], "cursor": x["cursor"]} for x in builds]
154-
155-
156-
def buildkite_get_build_info(build_number: str) -> dict:
157-
"""Returns all the info associated with the provided build number.
158-
159-
Note: for unknown reasons, graphql returns no jobs for a given build,
160-
while this endpoint does, hence why this uses this API instead of graphql.
161-
162-
Args:
163-
build_number: which build number to fetch info for.
164-
165-
Returns:
166-
The info for the target build, a JSON dictionnary.
167-
"""
168-
169-
URL = "https://buildkite.com/llvm-project/github-pull-requests/builds/{}.json"
170-
return requests.get(URL.format(build_number)).json()
171-
172-
173-
def buildkite_get_incomplete_tasks(buildkite_token: str) -> list:
174-
"""Returns all the running/pending BuildKite builds.
175-
176-
Args:
177-
buildkite_token: the secret token to authenticate GraphQL requests.
178-
last_cursor: the cursor to stop at if set. If None, a full page is fetched.
179-
"""
180-
output = []
181-
cursor = None
182-
while True:
183-
page = buildkite_fetch_page_build_list(buildkite_token, cursor)
184-
if len(page) == 0:
185-
break
186-
cursor = page[-1]["cursor"]
187-
output += page
188-
return output
189-
190-
191-
def buildkite_get_metrics(
192-
buildkite_token: str, previously_incomplete: set[int]
193-
) -> (list[JobMetrics], set[int]):
194-
"""Returns a tuple with:
195-
196-
- the metrics recorded for newly completed workflow jobs.
197-
- the set of workflow still running now.
198-
199-
Args:
200-
buildkite_token: the secret token to authenticate GraphQL requests.
201-
previously_incomplete: the set of running workflows the last time this
202-
function was called.
203-
"""
204-
205-
running_builds = buildkite_get_incomplete_tasks(buildkite_token)
206-
incomplete_now = set([x["number"] for x in running_builds])
207-
output = []
208-
209-
for build_id in previously_incomplete:
210-
if build_id in incomplete_now:
211-
continue
212-
213-
info = buildkite_get_build_info(build_id)
214-
metric_timestamp = dateutil.parser.isoparse(info["finished_at"])
215-
for job in info["jobs"]:
216-
# This workflow is not interesting to us.
217-
if job["name"] not in BUILDKITE_WORKFLOW_TO_TRACK:
218-
continue
219-
220-
# Don't count canceled jobs.
221-
if job["canceled_at"]:
222-
continue
223-
224-
created_at = dateutil.parser.isoparse(job["created_at"])
225-
scheduled_at = dateutil.parser.isoparse(job["scheduled_at"])
226-
started_at = dateutil.parser.isoparse(job["started_at"])
227-
finished_at = dateutil.parser.isoparse(job["finished_at"])
228-
229-
job_name = BUILDKITE_WORKFLOW_TO_TRACK[job["name"]]
230-
queue_time = (started_at - scheduled_at).seconds
231-
run_time = (finished_at - started_at).seconds
232-
status = bool(job["passed"])
233-
234-
# Grafana will refuse to ingest metrics older than ~2 hours, so we
235-
# should avoid sending historical data.
236-
metric_age_mn = (
237-
datetime.datetime.now(datetime.timezone.utc) - metric_timestamp
238-
).total_seconds() / 60
239-
if metric_age_mn > GRAFANA_METRIC_MAX_AGE_MN:
240-
logging.warning(
241-
f"Job {job['name']} from workflow {build_id} dropped due"
242-
+ f" to staleness: {metric_age_mn}mn old."
243-
)
244-
continue
245-
246-
metric_timestamp_ns = int(metric_timestamp.timestamp()) * 10**9
247-
workflow_id = build_id
248-
workflow_name = "Github pull requests"
249-
output.append(
250-
JobMetrics(
251-
job_name,
252-
queue_time,
253-
run_time,
254-
status,
255-
metric_timestamp_ns,
256-
workflow_id,
257-
workflow_name,
258-
)
259-
)
260-
261-
return output, incomplete_now
262-
263-
26474
def github_get_metrics(
26575
github_repo: github.Repository, last_workflows_seen_as_completed: set[int]
26676
) -> tuple[list[JobMetrics], int]:
@@ -478,17 +288,13 @@ def upload_metrics(workflow_metrics, metrics_userid, api_key):
478288
def main():
479289
# Authenticate with Github
480290
github_auth = Auth.Token(os.environ["GITHUB_TOKEN"])
481-
buildkite_token = os.environ["BUILDKITE_TOKEN"]
482291
grafana_api_key = os.environ["GRAFANA_API_KEY"]
483292
grafana_metrics_userid = os.environ["GRAFANA_METRICS_USERID"]
484293

485294
# The last workflow this script processed.
486295
# Because the Github queries are broken, we'll simply log a 'processed'
487296
# bit for the last COUNT_TO_PROCESS workflows.
488297
gh_last_workflows_seen_as_completed = set()
489-
# Stores the list of pending/running builds in BuildKite we need to check
490-
# at the next iteration.
491-
bk_incomplete = set()
492298

493299
# Enter the main loop. Every five minutes we wake up and dump metrics for
494300
# the relevant jobs.
@@ -500,13 +306,8 @@ def main():
500306
github_repo, gh_last_workflows_seen_as_completed
501307
)
502308

503-
bk_metrics, bk_incomplete = buildkite_get_metrics(
504-
buildkite_token, bk_incomplete
505-
)
506-
507-
metrics = gh_metrics + bk_metrics
508-
upload_metrics(metrics, grafana_metrics_userid, grafana_api_key)
509-
logging.info(f"Uploaded {len(metrics)} metrics")
309+
upload_metrics(gh_metrics, grafana_metrics_userid, grafana_api_key)
310+
logging.info(f"Uploaded {len(gh_metrics)} metrics")
510311

511312
time.sleep(SCRAPE_INTERVAL_SECONDS)
512313

bolt/include/bolt/Core/MCPlusBuilder.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -405,7 +405,7 @@ class MCPlusBuilder {
405405

406406
bool equals(const MCExpr &A, const MCExpr &B, CompFuncTy Comp) const;
407407

408-
virtual bool equals(const MCTargetExpr &A, const MCTargetExpr &B,
408+
virtual bool equals(const MCSpecifierExpr &A, const MCSpecifierExpr &B,
409409
CompFuncTy Comp) const;
410410

411411
virtual bool isBranch(const MCInst &Inst) const {

bolt/lib/Core/HashUtilities.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ std::string hashExpr(BinaryContext &BC, const MCExpr &Expr) {
6767
.append(hashInteger(BinaryExpr.getOpcode()))
6868
.append(hashExpr(BC, *BinaryExpr.getRHS()));
6969
}
70+
case MCExpr::Specifier:
7071
case MCExpr::Target:
7172
return std::string();
7273
}

bolt/lib/Core/MCPlusBuilder.cpp

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -114,17 +114,19 @@ bool MCPlusBuilder::equals(const MCExpr &A, const MCExpr &B,
114114
equals(*BinaryA.getRHS(), *BinaryB.getRHS(), Comp);
115115
}
116116

117-
case MCExpr::Target: {
118-
const auto &TargetExprA = cast<MCTargetExpr>(A);
119-
const auto &TargetExprB = cast<MCTargetExpr>(B);
117+
case MCExpr::Specifier: {
118+
const auto &TargetExprA = cast<MCSpecifierExpr>(A);
119+
const auto &TargetExprB = cast<MCSpecifierExpr>(B);
120120
return equals(TargetExprA, TargetExprB, Comp);
121121
}
122+
case MCExpr::Target:
123+
llvm_unreachable("Not implemented");
122124
}
123125

124126
llvm_unreachable("Invalid expression kind!");
125127
}
126128

127-
bool MCPlusBuilder::equals(const MCTargetExpr &A, const MCTargetExpr &B,
129+
bool MCPlusBuilder::equals(const MCSpecifierExpr &A, const MCSpecifierExpr &B,
128130
CompFuncTy Comp) const {
129131
llvm_unreachable("target-specific expressions are unsupported");
130132
}

bolt/lib/Target/AArch64/AArch64MCPlusBuilder.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ class AArch64MCPlusBuilder : public MCPlusBuilder {
177177
return true;
178178
}
179179

180-
bool equals(const MCTargetExpr &A, const MCTargetExpr &B,
180+
bool equals(const MCSpecifierExpr &A, const MCSpecifierExpr &B,
181181
CompFuncTy Comp) const override {
182182
const auto &AArch64ExprA = cast<AArch64MCExpr>(A);
183183
const auto &AArch64ExprB = cast<AArch64MCExpr>(B);

bolt/lib/Target/RISCV/RISCVMCPlusBuilder.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ class RISCVMCPlusBuilder : public MCPlusBuilder {
3131
public:
3232
using MCPlusBuilder::MCPlusBuilder;
3333

34-
bool equals(const MCTargetExpr &A, const MCTargetExpr &B,
34+
bool equals(const MCSpecifierExpr &A, const MCSpecifierExpr &B,
3535
CompFuncTy Comp) const override {
3636
const auto &RISCVExprA = cast<RISCVMCExpr>(A);
3737
const auto &RISCVExprB = cast<RISCVMCExpr>(B);

clang-tools-extra/clang-doc/BitcodeReader.cpp

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -315,9 +315,13 @@ static llvm::Error parseRecord(const Record &R, unsigned ID,
315315

316316
static llvm::Error parseRecord(const Record &R, unsigned ID,
317317
llvm::StringRef Blob, CommentInfo *I) {
318+
llvm::SmallString<16> KindStr;
318319
switch (ID) {
319320
case COMMENT_KIND:
320-
return decodeRecord(R, I->Kind, Blob);
321+
if (llvm::Error Err = decodeRecord(R, KindStr, Blob))
322+
return Err;
323+
I->Kind = stringToCommentKind(KindStr);
324+
return llvm::Error::success();
321325
case COMMENT_TEXT:
322326
return decodeRecord(R, I->Text, Blob);
323327
case COMMENT_NAME:

clang-tools-extra/clang-doc/BitcodeWriter.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -484,8 +484,9 @@ void ClangDocBitcodeWriter::emitBlock(const MemberTypeInfo &T) {
484484

485485
void ClangDocBitcodeWriter::emitBlock(const CommentInfo &I) {
486486
StreamSubBlockGuard Block(Stream, BI_COMMENT_BLOCK_ID);
487+
// Handle Kind (enum) separately, since it is not a string.
488+
emitRecord(commentKindToString(I.Kind), COMMENT_KIND);
487489
for (const auto &L : std::vector<std::pair<llvm::StringRef, RecordId>>{
488-
{I.Kind, COMMENT_KIND},
489490
{I.Text, COMMENT_TEXT},
490491
{I.Name, COMMENT_NAME},
491492
{I.Direction, COMMENT_DIRECTION},

clang-tools-extra/clang-doc/HTMLGenerator.cpp

Lines changed: 21 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -635,7 +635,8 @@ genHTML(const Index &Index, StringRef InfoPath, bool IsOutermostList) {
635635
}
636636

637637
static std::unique_ptr<HTMLNode> genHTML(const CommentInfo &I) {
638-
if (I.Kind == "FullComment") {
638+
switch (I.Kind) {
639+
case CommentKind::CK_FullComment: {
639640
auto FullComment = std::make_unique<TagNode>(HTMLTag::TAG_DIV);
640641
for (const auto &Child : I.Children) {
641642
std::unique_ptr<HTMLNode> Node = genHTML(*Child);
@@ -645,7 +646,7 @@ static std::unique_ptr<HTMLNode> genHTML(const CommentInfo &I) {
645646
return std::move(FullComment);
646647
}
647648

648-
if (I.Kind == "ParagraphComment") {
649+
case CommentKind::CK_ParagraphComment: {
649650
auto ParagraphComment = std::make_unique<TagNode>(HTMLTag::TAG_P);
650651
for (const auto &Child : I.Children) {
651652
std::unique_ptr<HTMLNode> Node = genHTML(*Child);
@@ -657,7 +658,7 @@ static std::unique_ptr<HTMLNode> genHTML(const CommentInfo &I) {
657658
return std::move(ParagraphComment);
658659
}
659660

660-
if (I.Kind == "BlockCommandComment") {
661+
case CommentKind::CK_BlockCommandComment: {
661662
auto BlockComment = std::make_unique<TagNode>(HTMLTag::TAG_DIV);
662663
BlockComment->Children.emplace_back(
663664
std::make_unique<TagNode>(HTMLTag::TAG_DIV, I.Name));
@@ -670,12 +671,26 @@ static std::unique_ptr<HTMLNode> genHTML(const CommentInfo &I) {
670671
return nullptr;
671672
return std::move(BlockComment);
672673
}
673-
if (I.Kind == "TextComment") {
674-
if (I.Text == "")
674+
675+
case CommentKind::CK_TextComment: {
676+
if (I.Text.empty())
675677
return nullptr;
676678
return std::make_unique<TextNode>(I.Text);
677679
}
678-
return nullptr;
680+
681+
// For now, return nullptr for unsupported comment kinds
682+
case CommentKind::CK_InlineCommandComment:
683+
case CommentKind::CK_HTMLStartTagComment:
684+
case CommentKind::CK_HTMLEndTagComment:
685+
case CommentKind::CK_ParamCommandComment:
686+
case CommentKind::CK_TParamCommandComment:
687+
case CommentKind::CK_VerbatimBlockComment:
688+
case CommentKind::CK_VerbatimBlockLineComment:
689+
case CommentKind::CK_VerbatimLineComment:
690+
case CommentKind::CK_Unknown:
691+
return nullptr;
692+
}
693+
llvm_unreachable("Unhandled CommentKind");
679694
}
680695

681696
static std::unique_ptr<TagNode> genHTML(const std::vector<CommentInfo> &C) {

0 commit comments

Comments
 (0)