Skip to content

Commit a5bbb38

Browse files
committed
Merge remote-tracking branch 'origin/main' into HEAD
2 parents a88a1f7 + d943efb commit a5bbb38

File tree

774 files changed

+197152
-4281
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

774 files changed

+197152
-4281
lines changed

.ci/all_requirements.txt

Lines changed: 213 additions & 11 deletions
Large diffs are not rendered by default.

.ci/cache_lit_timing_files.py

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
2+
# See https://llvm.org/LICENSE.txt for license information.
3+
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
4+
"""Caches .lit_test_times.txt files between premerge invocations.
5+
6+
.lit_test_times.txt files are used by lit to order tests to best take advantage
7+
of parallelism. Having them around and up to date can result in a ~15%
8+
improvement in test times. This script downloading cached test time files and
9+
uploading new versions to the GCS buckets used for caching.
10+
"""
11+
12+
import sys
13+
import os
14+
import logging
15+
import multiprocessing.pool
16+
import pathlib
17+
import platform
18+
import glob
19+
20+
from google.cloud import storage
21+
22+
GCS_PARALLELISM = 100
23+
24+
25+
def _get_blob_prefix():
26+
return f"lit_timing_{platform.system().lower()}"
27+
28+
29+
def _maybe_upload_timing_file(bucket, timing_file_path):
30+
if os.path.exists(timing_file_path):
31+
timing_file_blob = bucket.blob(_get_blob_prefix() + "/" + timing_file_path)
32+
timing_file_blob.upload_from_filename(timing_file_path)
33+
34+
35+
def upload_timing_files(storage_client, bucket_name: str):
36+
bucket = storage_client.bucket(bucket_name)
37+
with multiprocessing.pool.ThreadPool(GCS_PARALLELISM) as thread_pool:
38+
futures = []
39+
for timing_file_path in glob.glob("**/.lit_test_times.txt", recursive=True):
40+
futures.append(
41+
thread_pool.apply_async(
42+
_maybe_upload_timing_file, (bucket, timing_file_path)
43+
)
44+
)
45+
for future in futures:
46+
future.get()
47+
print("Done uploading")
48+
49+
50+
def _maybe_download_timing_file(blob):
51+
file_name = blob.name.removeprefix(_get_blob_prefix() + "/")
52+
pathlib.Path(os.path.dirname(file_name)).mkdir(parents=True, exist_ok=True)
53+
blob.download_to_filename(file_name)
54+
55+
56+
def download_timing_files(storage_client, bucket_name: str):
57+
bucket = storage_client.bucket(bucket_name)
58+
blobs = bucket.list_blobs(prefix=_get_blob_prefix())
59+
with multiprocessing.pool.ThreadPool(GCS_PARALLELISM) as thread_pool:
60+
futures = []
61+
for timing_file_blob in blobs:
62+
futures.append(
63+
thread_pool.apply_async(
64+
_maybe_download_timing_file, (timing_file_blob,)
65+
)
66+
)
67+
for future in futures:
68+
future.get()
69+
print("Done downloading")
70+
71+
72+
if __name__ == "__main__":
73+
if len(sys.argv) != 2:
74+
logging.fatal("Expected usage is cache_lit_timing_files.py <upload/download>")
75+
sys.exit(1)
76+
action = sys.argv[1]
77+
storage_client = storage.Client()
78+
bucket_name = os.environ["CACHE_GCS_BUCKET"]
79+
if action == "download":
80+
download_timing_files(storage_client, bucket_name)
81+
elif action == "upload":
82+
upload_timing_files(storage_client, bucket_name)
83+
else:
84+
logging.fatal("Expected usage is cache_lit_timing_files.py <upload/download>")
85+
sys.exit(1)

.ci/generate_test_report_lib.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,13 @@ def _parse_ninja_log(ninja_log: list[str]) -> list[tuple[str, str]]:
2727
# We hit the end of the log without finding a build failure, go to
2828
# the next log.
2929
return failures
30+
# If we are doing a build with LLVM_ENABLE_RUNTIMES, we can have nested
31+
# ninja invocations. The sub-ninja will print that a subcommand failed,
32+
# and then the outer ninja will list the command that failed. We should
33+
# ignore the outer failure.
34+
if ninja_log[index - 1].startswith("ninja: build stopped:"):
35+
index += 1
36+
continue
3037
# We are trying to parse cases like the following:
3138
#
3239
# [4/5] test/4.stamp

.ci/generate_test_report_lib_test.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,39 @@ def test_ninja_log_multiple_failures(self):
126126
),
127127
)
128128

129+
# Test that we can correctly handle the runtimes build. the LLVM runtimes
130+
# build will involve ninja invoking more ninja processes within the
131+
# runtimes directory. This means that we see two failures for a failure in
132+
# the runtimes build: one from the inner ninja containing the actual action
133+
# that failed, and one for the sub ninja invocation that failed.
134+
def test_ninja_log_runtimes_failure(self):
135+
failures = generate_test_report_lib.find_failure_in_ninja_logs(
136+
[
137+
[
138+
"[1/5] test/1.stamp",
139+
"[2/5] test/2.stamp",
140+
"FAILED: touch test/2.stamp",
141+
"Wow! This system is really broken!",
142+
"ninja: build stopped: subcommand failed.",
143+
"FAILED: running check-runtime failed.",
144+
"<some random command>",
145+
"ninja: build stopped: subcommand failed.",
146+
]
147+
]
148+
)
149+
self.assertEqual(len(failures), 1)
150+
self.assertEqual(
151+
failures[0],
152+
(
153+
"test/2.stamp",
154+
dedent(
155+
"""\
156+
FAILED: touch test/2.stamp
157+
Wow! This system is really broken!"""
158+
),
159+
),
160+
)
161+
129162
def test_title_only(self):
130163
self.assertEqual(
131164
generate_test_report_lib.generate_report("Foo", 0, [], []),

.ci/metrics/metrics.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@
7070
# by trial and error).
7171
GRAFANA_METRIC_MAX_AGE_MN = 120
7272

73+
7374
@dataclass
7475
class JobMetrics:
7576
job_name: str
@@ -243,6 +244,7 @@ def clean_up_libcxx_job_name(old_name: str) -> str:
243244
new_name = stage + "_" + remainder
244245
return new_name
245246

247+
246248
def github_get_metrics(
247249
github_repo: github.Repository, last_workflows_seen_as_completed: set[int]
248250
) -> tuple[list[JobMetrics], int]:

.ci/metrics/metrics_test.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -409,5 +409,6 @@ def test_clean_up_libcxx_job_name(self):
409409
out_name4 = metrics.clean_up_libcxx_job_name(bad_name)
410410
self.assertEqual(out_name4, bad_name)
411411

412+
412413
if __name__ == "__main__":
413414
unittest.main()

.ci/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
11
junitparser==3.2.0
2+
google-cloud-storage==3.3.0

.github/workflows/containers/github-action-ci/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ FROM docker.io/library/ubuntu:24.04 as base
22
ENV LLVM_SYSROOT=/opt/llvm
33

44
FROM base as stage1-toolchain
5-
ENV LLVM_VERSION=20.1.8
5+
ENV LLVM_VERSION=21.1.0
66

77
RUN apt-get update && \
88
apt-get install -y \

.github/workflows/libcxx-build-containers.yml

Lines changed: 20 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,14 @@ jobs:
3232
steps:
3333
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
3434

35+
# The default Docker storage location for GitHub Actions doesn't have
36+
# enough disk space, so change it to /mnt, which has more disk space.
37+
- name: Change Docker storage location
38+
run: |
39+
sudo mkdir /mnt/docker
40+
echo '{ "data-root": "/mnt/docker" }' | sudo tee /etc/docker/daemon.json
41+
sudo systemctl restart docker
42+
3543
- name: Build the Linux builder image
3644
working-directory: libcxx/utils/ci
3745
run: |
@@ -40,11 +48,11 @@ jobs:
4048
env:
4149
TAG: ${{ github.sha }}
4250

43-
# - name: Build the Android builder image
44-
# working-directory: libcxx/utils/ci
45-
# run: docker compose build android-buildkite-builder
46-
# env:
47-
# TAG: ${{ github.sha }}
51+
- name: Build the Android builder image
52+
working-directory: libcxx/utils/ci
53+
run: docker compose build android-buildkite-builder
54+
env:
55+
TAG: ${{ github.sha }}
4856

4957
- name: Log in to GitHub Container Registry
5058
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
@@ -62,10 +70,10 @@ jobs:
6270
env:
6371
TAG: ${{ github.sha }}
6472

65-
# - name: Push the Android builder image
66-
# if: github.event_name == 'push'
67-
# working-directory: libcxx/utils/ci
68-
# run: |
69-
# docker compose push android-buildkite-builder
70-
# env:
71-
# TAG: ${{ github.sha }}
73+
- name: Push the Android builder image
74+
if: github.event_name == 'push'
75+
working-directory: libcxx/utils/ci
76+
run: |
77+
docker compose push android-buildkite-builder
78+
env:
79+
TAG: ${{ github.sha }}

.github/workflows/pr-code-format.yml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,10 +52,13 @@ jobs:
5252
echo "Formatting files:"
5353
echo "$CHANGED_FILES"
5454
55+
# The clang format version should always be upgraded to the first version
56+
# of a release cycle (x.1.0) or the last version of a release cycle, or
57+
# if there have been relevant clang-format backports.
5558
- name: Install clang-format
5659
uses: aminya/setup-cpp@17c11551771948abc5752bbf3183482567c7caf0 # v1.1.1
5760
with:
58-
clangformat: 20.1.8
61+
clangformat: 21.1.0
5962

6063
- name: Setup Python env
6164
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0

0 commit comments

Comments
 (0)