Skip to content

Commit 9e10123

Browse files
authored
Merge branch 'main' into feat/sink-gep-constant-offset
2 parents 362bf01 + b93e421 commit 9e10123

File tree

5,657 files changed

+297270
-173784
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

5,657 files changed

+297270
-173784
lines changed

.ci/metrics/metrics.py

Lines changed: 3 additions & 214 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,9 @@
11
import collections
22
import datetime
3-
import dateutil
43
import github
5-
import json
64
import logging
75
import os
86
import requests
9-
import sys
107
import time
118

129
from dataclasses import dataclass
@@ -55,18 +52,6 @@
5552
# by trial and error).
5653
GRAFANA_METRIC_MAX_AGE_MN = 120
5754

58-
# Lists the BuildKite jobs we want to track. Maps the BuildKite job name to
59-
# the metric name in Grafana. This is important not to lose metrics history
60-
# if the workflow name changes.
61-
BUILDKITE_WORKFLOW_TO_TRACK = {
62-
":linux: Linux x64": "buildkite_linux",
63-
":windows: Windows x64": "buildkite_windows",
64-
}
65-
66-
# Number of builds to fetch per page. Since we scrape regularly, this can
67-
# remain small.
68-
BUILDKITE_GRAPHQL_BUILDS_PER_PAGE = 50
69-
7055

7156
@dataclass
7257
class JobMetrics:
@@ -86,181 +71,6 @@ class GaugeMetric:
8671
time_ns: int
8772

8873

89-
def buildkite_fetch_page_build_list(
90-
buildkite_token: str, after_cursor: str = None
91-
) -> list[dict[str, str]]:
92-
"""Fetches a page of the build list using the GraphQL BuildKite API.
93-
94-
Returns the BUILDKITE_GRAPHQL_BUILDS_PER_PAGE last running/queued builds,
95-
or the BUILDKITE_GRAPHQL_BUILDS_PER_PAGE running/queued builds
96-
older than the one pointer by |after_cursor| if provided.
97-
The |after_cursor| value is taken from the previous page returned by the
98-
API.
99-
100-
Args:
101-
buildkite_token: the secret token to authenticate GraphQL requests.
102-
after_cursor: cursor after which to start the page fetch.
103-
104-
Returns:
105-
The most recent builds after cursor (if set) with the following format:
106-
[
107-
{
108-
"cursor": <value>,
109-
"number": <build-number>,
110-
}
111-
]
112-
"""
113-
114-
BUILDKITE_GRAPHQL_QUERY = """
115-
query OrganizationShowQuery {{
116-
organization(slug: "llvm-project") {{
117-
pipelines(search: "Github pull requests", first: 1) {{
118-
edges {{
119-
node {{
120-
builds (state: [CANCELING, CREATING, FAILING, RUNNING], first: {PAGE_SIZE}, after: {AFTER}) {{
121-
edges {{
122-
cursor
123-
node {{
124-
number
125-
}}
126-
}}
127-
}}
128-
}}
129-
}}
130-
}}
131-
}}
132-
}}
133-
"""
134-
query = BUILDKITE_GRAPHQL_QUERY.format(
135-
PAGE_SIZE=BUILDKITE_GRAPHQL_BUILDS_PER_PAGE,
136-
AFTER="null" if after_cursor is None else '"{}"'.format(after_cursor),
137-
)
138-
query = json.dumps({"query": query})
139-
url = "https://graphql.buildkite.com/v1"
140-
headers = {
141-
"Authorization": "Bearer " + buildkite_token,
142-
"Content-Type": "application/json",
143-
}
144-
data = requests.post(url, data=query, headers=headers).json()
145-
# De-nest the build list.
146-
if "errors" in data:
147-
logging.info("Failed to fetch BuildKite jobs: {}".format(data["errors"]))
148-
return []
149-
builds = data["data"]["organization"]["pipelines"]["edges"][0]["node"]["builds"][
150-
"edges"
151-
]
152-
# Fold cursor info into the node dictionnary.
153-
return [{**x["node"], "cursor": x["cursor"]} for x in builds]
154-
155-
156-
def buildkite_get_build_info(build_number: str) -> dict:
157-
"""Returns all the info associated with the provided build number.
158-
159-
Note: for unknown reasons, graphql returns no jobs for a given build,
160-
while this endpoint does, hence why this uses this API instead of graphql.
161-
162-
Args:
163-
build_number: which build number to fetch info for.
164-
165-
Returns:
166-
The info for the target build, a JSON dictionnary.
167-
"""
168-
169-
URL = "https://buildkite.com/llvm-project/github-pull-requests/builds/{}.json"
170-
return requests.get(URL.format(build_number)).json()
171-
172-
173-
def buildkite_get_incomplete_tasks(buildkite_token: str) -> list:
174-
"""Returns all the running/pending BuildKite builds.
175-
176-
Args:
177-
buildkite_token: the secret token to authenticate GraphQL requests.
178-
last_cursor: the cursor to stop at if set. If None, a full page is fetched.
179-
"""
180-
output = []
181-
cursor = None
182-
while True:
183-
page = buildkite_fetch_page_build_list(buildkite_token, cursor)
184-
if len(page) == 0:
185-
break
186-
cursor = page[-1]["cursor"]
187-
output += page
188-
return output
189-
190-
191-
def buildkite_get_metrics(
192-
buildkite_token: str, previously_incomplete: set[int]
193-
) -> (list[JobMetrics], set[int]):
194-
"""Returns a tuple with:
195-
196-
- the metrics recorded for newly completed workflow jobs.
197-
- the set of workflow still running now.
198-
199-
Args:
200-
buildkite_token: the secret token to authenticate GraphQL requests.
201-
previously_incomplete: the set of running workflows the last time this
202-
function was called.
203-
"""
204-
205-
running_builds = buildkite_get_incomplete_tasks(buildkite_token)
206-
incomplete_now = set([x["number"] for x in running_builds])
207-
output = []
208-
209-
for build_id in previously_incomplete:
210-
if build_id in incomplete_now:
211-
continue
212-
213-
info = buildkite_get_build_info(build_id)
214-
metric_timestamp = dateutil.parser.isoparse(info["finished_at"])
215-
for job in info["jobs"]:
216-
# This workflow is not interesting to us.
217-
if job["name"] not in BUILDKITE_WORKFLOW_TO_TRACK:
218-
continue
219-
220-
# Don't count canceled jobs.
221-
if job["canceled_at"]:
222-
continue
223-
224-
created_at = dateutil.parser.isoparse(job["created_at"])
225-
scheduled_at = dateutil.parser.isoparse(job["scheduled_at"])
226-
started_at = dateutil.parser.isoparse(job["started_at"])
227-
finished_at = dateutil.parser.isoparse(job["finished_at"])
228-
229-
job_name = BUILDKITE_WORKFLOW_TO_TRACK[job["name"]]
230-
queue_time = (started_at - scheduled_at).seconds
231-
run_time = (finished_at - started_at).seconds
232-
status = bool(job["passed"])
233-
234-
# Grafana will refuse to ingest metrics older than ~2 hours, so we
235-
# should avoid sending historical data.
236-
metric_age_mn = (
237-
datetime.datetime.now(datetime.timezone.utc) - metric_timestamp
238-
).total_seconds() / 60
239-
if metric_age_mn > GRAFANA_METRIC_MAX_AGE_MN:
240-
logging.warning(
241-
f"Job {job['name']} from workflow {build_id} dropped due"
242-
+ f" to staleness: {metric_age_mn}mn old."
243-
)
244-
continue
245-
246-
metric_timestamp_ns = int(metric_timestamp.timestamp()) * 10**9
247-
workflow_id = build_id
248-
workflow_name = "Github pull requests"
249-
output.append(
250-
JobMetrics(
251-
job_name,
252-
queue_time,
253-
run_time,
254-
status,
255-
metric_timestamp_ns,
256-
workflow_id,
257-
workflow_name,
258-
)
259-
)
260-
261-
return output, incomplete_now
262-
263-
26474
def github_get_metrics(
26575
github_repo: github.Repository, last_workflows_seen_as_completed: set[int]
26676
) -> tuple[list[JobMetrics], int]:
@@ -349,19 +159,7 @@ def github_get_metrics(
349159
running_count[metric_name] += 1
350160
continue
351161

352-
job_result = int(job.conclusion == "success")
353-
if job_result:
354-
# We still might want to mark the job as a failure if one of the steps
355-
# failed. This is required due to use setting continue-on-error in
356-
# the premerge pipeline to prevent sending emails while we are
357-
# testing the infrastructure.
358-
# TODO(boomanaiden154): Remove this once the premerge pipeline is no
359-
# longer in a testing state and we can directly assert the workflow
360-
# result.
361-
for step in job.steps:
362-
if step.conclusion != "success" and step.conclusion != "skipped":
363-
job_result = 0
364-
break
162+
job_result = int(job.conclusion == "success" or job.conclusion == "skipped")
365163

366164
created_at = job.created_at
367165
started_at = job.started_at
@@ -490,17 +288,13 @@ def upload_metrics(workflow_metrics, metrics_userid, api_key):
490288
def main():
491289
# Authenticate with Github
492290
github_auth = Auth.Token(os.environ["GITHUB_TOKEN"])
493-
buildkite_token = os.environ["BUILDKITE_TOKEN"]
494291
grafana_api_key = os.environ["GRAFANA_API_KEY"]
495292
grafana_metrics_userid = os.environ["GRAFANA_METRICS_USERID"]
496293

497294
# The last workflow this script processed.
498295
# Because the Github queries are broken, we'll simply log a 'processed'
499296
# bit for the last COUNT_TO_PROCESS workflows.
500297
gh_last_workflows_seen_as_completed = set()
501-
# Stores the list of pending/running builds in BuildKite we need to check
502-
# at the next iteration.
503-
bk_incomplete = set()
504298

505299
# Enter the main loop. Every five minutes we wake up and dump metrics for
506300
# the relevant jobs.
@@ -512,13 +306,8 @@ def main():
512306
github_repo, gh_last_workflows_seen_as_completed
513307
)
514308

515-
bk_metrics, bk_incomplete = buildkite_get_metrics(
516-
buildkite_token, bk_incomplete
517-
)
518-
519-
metrics = gh_metrics + bk_metrics
520-
upload_metrics(metrics, grafana_metrics_userid, grafana_api_key)
521-
logging.info(f"Uploaded {len(metrics)} metrics")
309+
upload_metrics(gh_metrics, grafana_metrics_userid, grafana_api_key)
310+
logging.info(f"Uploaded {len(gh_metrics)} metrics")
522311

523312
time.sleep(SCRAPE_INTERVAL_SECONDS)
524313

.ci/monolithic-linux.sh

Lines changed: 12 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ function at-exit {
3838

3939
ccache --print-stats > artifacts/ccache_stats.txt
4040
cp "${BUILD_DIR}"/.ninja_log artifacts/.ninja_log
41+
cp "${BUILD_DIR}"/test-results.*.xml artifacts/ || :
4142

4243
# If building fails there will be no results files.
4344
shopt -s nullglob
@@ -46,7 +47,7 @@ function at-exit {
4647
python3 "${MONOREPO_ROOT}"/.ci/generate_test_report_buildkite.py ":linux: Linux x64 Test Results" \
4748
"linux-x64-test-results" $retcode "${BUILD_DIR}"/test-results.*.xml
4849
else
49-
python3 "${MONOREPO_ROOT}"/.ci/generate_test_report_github.py ":linux: Linux x64 Test Results" \
50+
python3 "${MONOREPO_ROOT}"/.ci/generate_test_report_github.py ":penguin: Linux x64 Test Results" \
5051
$retcode "${BUILD_DIR}"/test-results.*.xml >> $GITHUB_STEP_SUMMARY
5152
fi
5253
}
@@ -101,51 +102,25 @@ if [[ "${runtimes}" != "" ]]; then
101102
exit 1
102103
fi
103104

104-
echo "--- ninja install-clang"
105-
106-
ninja -C ${BUILD_DIR} install-clang install-clang-resource-headers
107-
108-
RUNTIMES_BUILD_DIR="${MONOREPO_ROOT}/build-runtimes"
109-
INSTALL_DIR="${BUILD_DIR}/install"
110-
mkdir -p ${RUNTIMES_BUILD_DIR}
111-
112105
echo "--- cmake runtimes C++26"
113106

114-
rm -rf "${RUNTIMES_BUILD_DIR}"
115-
cmake -S "${MONOREPO_ROOT}/runtimes" -B "${RUNTIMES_BUILD_DIR}" -GNinja \
116-
-D CMAKE_C_COMPILER="${INSTALL_DIR}/bin/clang" \
117-
-D CMAKE_CXX_COMPILER="${INSTALL_DIR}/bin/clang++" \
118-
-D LLVM_ENABLE_RUNTIMES="${runtimes}" \
119-
-D LIBCXX_CXX_ABI=libcxxabi \
120-
-D CMAKE_BUILD_TYPE=RelWithDebInfo \
121-
-D CMAKE_INSTALL_PREFIX="${INSTALL_DIR}" \
122-
-D LIBCXX_TEST_PARAMS="std=c++26" \
123-
-D LIBCXXABI_TEST_PARAMS="std=c++26" \
124-
-D LLVM_LIT_ARGS="${lit_args}"
107+
cmake \
108+
-D LIBCXX_TEST_PARAMS="std=c++26" \
109+
-D LIBCXXABI_TEST_PARAMS="std=c++26" \
110+
"${BUILD_DIR}"
125111

126112
echo "--- ninja runtimes C++26"
127113

128-
ninja -vC "${RUNTIMES_BUILD_DIR}" ${runtime_targets}
114+
ninja -C "${BUILD_DIR}" ${runtime_targets}
129115

130116
echo "--- cmake runtimes clang modules"
131117

132-
# We don't need to do a clean build of runtimes, because LIBCXX_TEST_PARAMS
133-
# and LIBCXXABI_TEST_PARAMS only affect lit configuration, which successfully
134-
# propagates without a clean build. Other that those two variables, builds
135-
# are supposed to be the same.
136-
137-
cmake -S "${MONOREPO_ROOT}/runtimes" -B "${RUNTIMES_BUILD_DIR}" -GNinja \
138-
-D CMAKE_C_COMPILER="${INSTALL_DIR}/bin/clang" \
139-
-D CMAKE_CXX_COMPILER="${INSTALL_DIR}/bin/clang++" \
140-
-D LLVM_ENABLE_RUNTIMES="${runtimes}" \
141-
-D LIBCXX_CXX_ABI=libcxxabi \
142-
-D CMAKE_BUILD_TYPE=RelWithDebInfo \
143-
-D CMAKE_INSTALL_PREFIX="${INSTALL_DIR}" \
144-
-D LIBCXX_TEST_PARAMS="enable_modules=clang" \
145-
-D LIBCXXABI_TEST_PARAMS="enable_modules=clang" \
146-
-D LLVM_LIT_ARGS="${lit_args}"
118+
cmake \
119+
-D LIBCXX_TEST_PARAMS="enable_modules=clang" \
120+
-D LIBCXXABI_TEST_PARAMS="enable_modules=clang" \
121+
"${BUILD_DIR}"
147122

148123
echo "--- ninja runtimes clang modules"
149124

150-
ninja -vC "${RUNTIMES_BUILD_DIR}" ${runtime_targets}
125+
ninja -C "${BUILD_DIR}" ${runtime_targets}
151126
fi

.ci/monolithic-windows.sh

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ function at-exit {
3333
mkdir -p artifacts
3434
sccache --show-stats >> artifacts/sccache_stats.txt
3535
cp "${BUILD_DIR}"/.ninja_log artifacts/.ninja_log
36+
cp "${BUILD_DIR}"/test-results.*.xml artifacts/ || :
3637

3738
# If building fails there will be no results files.
3839
shopt -s nullglob
@@ -41,7 +42,7 @@ function at-exit {
4142
python "${MONOREPO_ROOT}"/.ci/generate_test_report_buildkite.py ":windows: Windows x64 Test Results" \
4243
"windows-x64-test-results" $retcode "${BUILD_DIR}"/test-results.*.xml
4344
else
44-
python "${MONOREPO_ROOT}"/.ci/generate_test_report_github.py ":windows: Windows x64 Test Results" \
45+
python "${MONOREPO_ROOT}"/.ci/generate_test_report_github.py ":window: Windows x64 Test Results" \
4546
$retcode "${BUILD_DIR}"/test-results.*.xml >> $GITHUB_STEP_SUMMARY
4647
fi
4748
}

.github/CODEOWNERS

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@
128128
/mlir/**/Transforms/SROA.* @moxinilian
129129

130130
# BOLT
131-
/bolt/ @aaupov @maksfb @rafaelauler @ayermolo @yota9
131+
/bolt/ @aaupov @maksfb @rafaelauler @ayermolo @yota9 @paschalis-mpeis
132132

133133
# Bazel build system.
134134
/utils/bazel/ @rupprecht @keith @aaronmondal

.github/new-prs-labeler.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -554,6 +554,12 @@ flang:fir-hlfir:
554554
flang:codegen:
555555
- flang/**/CodeGen/**
556556

557+
llvm:codegen:
558+
- llvm/lib/CodeGen/*
559+
- llvm/lib/CodeGen/MIRParser/*
560+
- llvm/lib/CodeGen/LiveDebugValues/*
561+
- llvm/lib/CodeGen/AsmPrinter/*
562+
557563
llvm:globalisel:
558564
- llvm/**/GlobalISel/**
559565
- llvm/utils/TableGen/GlobalISel*
@@ -703,6 +709,10 @@ mlgo:
703709
- llvm/test/CodeGen/MLRegAlloc/**
704710
- llvm/utils/mlgo-utils/**
705711
- llvm/docs/MLGO.rst
712+
- llvm/include/llvm/Analysis/IR2Vec.h
713+
- llvm/lib/Analysis/IR2Vec.cpp
714+
- llvm/lib/Analysis/models/**
715+
- llvm/test/Analysis/IR2Vec/**
706716

707717
tools:llvm-exegesis:
708718
- llvm/tools/llvm-exegesis/**

0 commit comments

Comments
 (0)