Skip to content

Commit ea35161

Browse files
committed
Merge branch 'main' into hgh/libcxx/P0448R4-spanstream-A-strstream-replacement-using-span-charT-as-buffer
2 parents 3102008 + 8091dce commit ea35161

File tree

12,473 files changed

+961878
-395897
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

12,473 files changed

+961878
-395897
lines changed

.ci/all_requirements.txt

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -194,9 +194,9 @@ ml-dtypes==0.5.1 ; python_version < "3.13" \
194194
--hash=sha256:d13755f8e8445b3870114e5b6240facaa7cb0c3361e54beba3e07fa912a6e12b \
195195
--hash=sha256:fd918d4e6a4e0c110e2e05be7a7814d10dc1b95872accbf6512b80a109b71ae1
196196
# via -r mlir/python/requirements.txt
197-
nanobind==2.7.0 \
198-
--hash=sha256:73b12d0e751d140d6c1bf4b215e18818a8debfdb374f08dc3776ad208d808e74 \
199-
--hash=sha256:f9f1b160580c50dcf37b6495a0fd5ec61dc0d95dae5f8004f87dd9ad7eb46b34
197+
nanobind==2.9.2 \
198+
--hash=sha256:c37957ffd5eac7eda349cff3622ecd32e5ee1244ecc912c99b5bc8188bafd16e \
199+
--hash=sha256:e7608472de99d375759814cab3e2c94aba3f9ec80e62cfef8ced495ca5c27d6e
200200
# via -r mlir/python/requirements.txt
201201
numpy==2.0.2 \
202202
--hash=sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a \
@@ -383,6 +383,10 @@ swig==4.3.1 \
383383
--hash=sha256:efec16327029f682f649a26da726bb0305be8800bd0f1fa3e81bf0769cf5b476 \
384384
--hash=sha256:fc496c0d600cf1bb2d91e28d3d6eae9c4301e5ea7a0dec5a4281b5efed4245a8
385385
# via -r lldb/test/requirements.txt
386+
typing-extensions==4.15.0 \
387+
--hash=sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466 \
388+
--hash=sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548
389+
# via -r mlir/python/requirements.txt
386390
urllib3==2.5.0 \
387391
--hash=sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760 \
388392
--hash=sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc

.ci/cache_lit_timing_files.py

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
import glob
1818

1919
from google.cloud import storage
20+
from google.api_core import exceptions
2021

2122
GCS_PARALLELISM = 100
2223

@@ -50,7 +51,14 @@ def _maybe_download_timing_file(blob):
5051

5152
def download_timing_files(storage_client, bucket_name: str):
5253
bucket = storage_client.bucket(bucket_name)
53-
blobs = bucket.list_blobs(prefix="lit_timing")
54+
try:
55+
blobs = bucket.list_blobs(prefix="lit_timing")
56+
except exceptions.ClientError as client_error:
57+
print(
58+
"::warning file=cache_lit_timing_files.py::Failed to list blobs "
59+
"in bucket."
60+
)
61+
sys.exit(0)
5462
with multiprocessing.pool.ThreadPool(GCS_PARALLELISM) as thread_pool:
5563
futures = []
5664
for timing_file_blob in blobs:
@@ -60,7 +68,13 @@ def download_timing_files(storage_client, bucket_name: str):
6068
)
6169
)
6270
for future in futures:
63-
future.get()
71+
future.wait()
72+
if not future.successful():
73+
print(
74+
"::warning file=cache_lit_timing_files.py::Failed to "
75+
"download lit timing file."
76+
)
77+
continue
6478
print("Done downloading")
6579

6680

.ci/compute_projects.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,6 +150,7 @@
150150
"mlir": "check-mlir",
151151
"openmp": "check-openmp",
152152
"polly": "check-polly",
153+
"lit": "check-lit",
153154
}
154155

155156
RUNTIMES = {"libcxx", "libcxxabi", "libunwind", "compiler-rt", "libc", "flang-rt"}
@@ -166,8 +167,12 @@
166167
("llvm", "utils", "gn"): "gn",
167168
(".github", "workflows", "premerge.yaml"): ".ci",
168169
("third-party",): ".ci",
170+
("llvm", "utils", "lit"): "lit",
169171
}
170172

173+
# Projects that should run tests but cannot be explicitly built.
174+
SKIP_BUILD_PROJECTS = ["CIR", "lit"]
175+
171176
# Projects that should not run any tests. These need to be metaprojects.
172177
SKIP_PROJECTS = ["docs", "gn"]
173178

@@ -315,7 +320,9 @@ def get_env_variables(modified_files: list[str], platform: str) -> Set[str]:
315320
# clang build, but it requires an explicit option to enable. We set that
316321
# option here, and remove it from the projects_to_build list.
317322
enable_cir = "ON" if "CIR" in projects_to_build else "OFF"
318-
projects_to_build.discard("CIR")
323+
# Remove any metaprojects from the list of projects to build.
324+
for project in SKIP_BUILD_PROJECTS:
325+
projects_to_build.discard(project)
319326

320327
# We use a semicolon to separate the projects/runtimes as they get passed
321328
# to the CMake invocation and thus we need to use the CMake list separator

.ci/compute_projects_test.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -413,6 +413,30 @@ def test_third_party_benchmark(self):
413413
"check-cxx check-cxxabi check-unwind",
414414
)
415415

416+
def test_lit(self):
417+
env_variables = compute_projects.get_env_variables(
418+
["llvm/utils/lit/CMakeLists.txt"], "Linux"
419+
)
420+
self.assertEqual(
421+
env_variables["projects_to_build"],
422+
"bolt;clang;clang-tools-extra;flang;lld;lldb;llvm;mlir;polly",
423+
)
424+
self.assertEqual(
425+
env_variables["project_check_targets"],
426+
"check-bolt check-clang check-clang-tools check-flang check-lit check-lld check-lldb check-llvm check-mlir check-polly",
427+
)
428+
self.assertEqual(
429+
env_variables["runtimes_to_build"], "libcxx;libcxxabi;libunwind"
430+
)
431+
self.assertEqual(
432+
env_variables["runtimes_check_targets"],
433+
"",
434+
)
435+
self.assertEqual(
436+
env_variables["runtimes_check_targets_needs_reconfig"],
437+
"check-cxx check-cxxabi check-unwind",
438+
)
439+
416440

417441
if __name__ == "__main__":
418442
unittest.main()

.ci/generate_test_report_github.py

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,15 @@
88

99
import generate_test_report_lib
1010

11-
PLATFORM_TITLES = {
12-
"Windows": ":window: Windows x64 Test Results",
13-
"Linux": ":penguin: Linux x64 Test Results",
14-
}
11+
def compute_platform_title() -> str:
12+
logo = ":window:" if platform.system() == "Windows" else ":penguin:"
13+
# On Linux the machine value is x86_64 on Windows it is AMD64.
14+
if platform.machine() == "x86_64" or platform.machine() == "AMD64":
15+
arch = "x64"
16+
else:
17+
arch = platform.machine()
18+
return f"{logo} {platform.system()} {arch} Test Results"
19+
1520

1621
if __name__ == "__main__":
1722
parser = argparse.ArgumentParser()
@@ -22,7 +27,7 @@
2227
args = parser.parse_args()
2328

2429
report = generate_test_report_lib.generate_report_from_files(
25-
PLATFORM_TITLES[platform.system()], args.return_code, args.build_test_logs
30+
compute_platform_title(), args.return_code, args.build_test_logs
2631
)
2732

2833
print(report)

.ci/generate_test_report_lib.py

Lines changed: 25 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,23 @@ def _format_ninja_failures(ninja_failures: list[tuple[str, str]]) -> list[str]:
9898
)
9999
return output
100100

101+
def get_failures(junit_objects) -> dict[str, list[tuple[str, str]]]:
102+
failures = {}
103+
for results in junit_objects:
104+
for testsuite in results:
105+
for test in testsuite:
106+
if (
107+
not test.is_passed
108+
and test.result
109+
and isinstance(test.result[0], Failure)
110+
):
111+
if failures.get(testsuite.name) is None:
112+
failures[testsuite.name] = []
113+
failures[testsuite.name].append(
114+
(test.classname + "/" + test.name, test.result[0].text)
115+
)
116+
return failures
117+
101118

102119
# Set size_limit to limit the byte size of the report. The default is 1MB as this
103120
# is the most that can be put into an annotation. If the generated report exceeds
@@ -113,7 +130,7 @@ def generate_report(
113130
size_limit=1024 * 1024,
114131
list_failures=True,
115132
):
116-
failures = {}
133+
failures = get_failures(junit_objects)
117134
tests_run = 0
118135
tests_skipped = 0
119136
tests_failed = 0
@@ -124,18 +141,6 @@ def generate_report(
124141
tests_skipped += testsuite.skipped
125142
tests_failed += testsuite.failures
126143

127-
for test in testsuite:
128-
if (
129-
not test.is_passed
130-
and test.result
131-
and isinstance(test.result[0], Failure)
132-
):
133-
if failures.get(testsuite.name) is None:
134-
failures[testsuite.name] = []
135-
failures[testsuite.name].append(
136-
(test.classname + "/" + test.name, test.result[0].text)
137-
)
138-
139144
report = [f"# {title}", ""]
140145

141146
if tests_run == 0:
@@ -258,7 +263,7 @@ def plural(num_tests):
258263
return report
259264

260265

261-
def generate_report_from_files(title, return_code, build_log_files):
266+
def load_info_from_files(build_log_files):
262267
junit_files = [
263268
junit_file for junit_file in build_log_files if junit_file.endswith(".xml")
264269
]
@@ -271,6 +276,9 @@ def generate_report_from_files(title, return_code, build_log_files):
271276
ninja_logs.append(
272277
[log_line.strip() for log_line in ninja_log_file_handle.readlines()]
273278
)
274-
return generate_report(
275-
title, return_code, [JUnitXml.fromfile(p) for p in junit_files], ninja_logs
276-
)
279+
return [JUnitXml.fromfile(p) for p in junit_files], ninja_logs
280+
281+
282+
def generate_report_from_files(title, return_code, build_log_files):
283+
junit_objects, ninja_logs = load_info_from_files(build_log_files)
284+
return generate_report(title, return_code, junit_objects, ninja_logs)

.ci/metrics/metrics.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@
4040
GITHUB_JOB_TO_TRACK = {
4141
"github_llvm_premerge_checks": {
4242
"Build and Test Linux": "premerge_linux",
43+
"Build and Test Linux AArch64": "premerge_linux_aarch64",
4344
"Build and Test Windows": "premerge_windows",
4445
},
4546
"github_libcxx_premerge_checks": {

.ci/monolithic-linux.sh

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,6 @@ enable_cir="${6}"
3232
lit_args="-v --xunit-xml-output ${BUILD_DIR}/test-results.xml --use-unique-output-file-name --timeout=1200 --time-tests --succinct"
3333

3434
start-group "CMake"
35-
export PIP_BREAK_SYSTEM_PACKAGES=1
36-
pip install -q -r "${MONOREPO_ROOT}"/.ci/all_requirements.txt
3735

3836
# Set the system llvm-symbolizer as preferred.
3937
export LLVM_SYMBOLIZER_PATH=`which llvm-symbolizer`
@@ -68,11 +66,13 @@ start-group "ninja"
6866

6967
# Targets are not escaped as they are passed as separate arguments.
7068
ninja -C "${BUILD_DIR}" -k 0 ${targets} |& tee ninja.log
69+
cp ${BUILD_DIR}/.ninja_log ninja.ninja_log
7170

7271
if [[ "${runtime_targets}" != "" ]]; then
7372
start-group "ninja Runtimes"
7473

7574
ninja -C "${BUILD_DIR}" ${runtime_targets} |& tee ninja_runtimes.log
75+
cp ${BUILD_DIR}/.ninja_log ninja_runtimes.ninja_log
7676
fi
7777

7878
# Compiling runtimes with just-built Clang and running their tests
@@ -89,6 +89,7 @@ if [[ "${runtime_targets_needs_reconfig}" != "" ]]; then
8989

9090
ninja -C "${BUILD_DIR}" ${runtime_targets_needs_reconfig} \
9191
|& tee ninja_runtimes_needs_reconfig1.log
92+
cp ${BUILD_DIR}/.ninja_log ninja_runtimes_needs_reconig.ninja_log
9293

9394
start-group "CMake Runtimes Clang Modules"
9495

@@ -101,4 +102,5 @@ if [[ "${runtime_targets_needs_reconfig}" != "" ]]; then
101102

102103
ninja -C "${BUILD_DIR}" ${runtime_targets_needs_reconfig} \
103104
|& tee ninja_runtimes_needs_reconfig2.log
105+
cp ${BUILD_DIR}/.ninja_log ninja_runtimes_needs_reconfig2.ninja_log
104106
fi

.ci/monolithic-windows.sh

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,9 +55,11 @@ start-group "ninja"
5555

5656
# Targets are not escaped as they are passed as separate arguments.
5757
ninja -C "${BUILD_DIR}" -k 0 ${targets} |& tee ninja.log
58+
cp ${BUILD_DIR}/.ninja_log ninja.ninja_log
5859

5960
if [[ "${runtime_targets}" != "" ]]; then
6061
start-group "ninja runtimes"
6162

6263
ninja -C "${BUILD_DIR}" -k 0 ${runtimes_targets} |& tee ninja_runtimes.log
64+
cp ${BUILD_DIR}/.ninja_log ninja_runtimes.ninja_log
6365
fi

.ci/premerge_advisor_upload.py

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
2+
# See https://llvm.org/LICENSE.txt for license information.
3+
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
4+
"""Script for uploading results to the premerge advisor."""
5+
6+
import argparse
7+
import os
8+
import platform
9+
import sys
10+
11+
import requests
12+
13+
import generate_test_report_lib
14+
15+
PREMERGE_ADVISOR_URL = (
16+
"http://premerge-advisor.premerge-advisor.svc.cluster.local:5000/upload"
17+
)
18+
19+
20+
def main(commit_sha, workflow_run_number, build_log_files):
21+
junit_objects, ninja_logs = generate_test_report_lib.load_info_from_files(
22+
build_log_files
23+
)
24+
test_failures = generate_test_report_lib.get_failures(junit_objects)
25+
source = "pull_request" if "GITHUB_ACTIONS" in os.environ else "postcommit"
26+
current_platform = f"{platform.system()}-{platform.machine()}".lower()
27+
failure_info = {
28+
"source_type": source,
29+
"base_commit_sha": commit_sha,
30+
"source_id": workflow_run_number,
31+
"failures": [],
32+
"platform": current_platform,
33+
}
34+
if test_failures:
35+
for _, failures in test_failures.items():
36+
for name, failure_message in failures:
37+
failure_info["failures"].append(
38+
{"name": name, "message": failure_message}
39+
)
40+
else:
41+
ninja_failures = generate_test_report_lib.find_failure_in_ninja_logs(ninja_logs)
42+
for name, failure_message in ninja_failures:
43+
failure_info["failures"].append({"name": name, "message": failure_message})
44+
requests.post(PREMERGE_ADVISOR_URL, json=failure_info)
45+
46+
47+
if __name__ == "__main__":
48+
parser = argparse.ArgumentParser()
49+
parser.add_argument("commit_sha", help="The base commit SHA for the test.")
50+
parser.add_argument("workflow_run_number", help="The run number from GHA.")
51+
parser.add_argument(
52+
"build_log_files", help="Paths to JUnit report files and ninja logs.", nargs="*"
53+
)
54+
args = parser.parse_args()
55+
56+
# Skip uploading results on AArch64 for now because the premerge advisor
57+
# service is not available on AWS currently.
58+
if platform.machine() == "arm64":
59+
sys.exit(0)
60+
61+
main(args.commit_sha, args.workflow_run_number, args.build_log_files)

0 commit comments

Comments
 (0)