Skip to content

Commit 8ad358a

Browse files
committed
Merge remote-tracking branch 'upstream/main' into lsp-support-to-llvm
2 parents c8d05f5 + da65685 commit 8ad358a

File tree

1,449 files changed

+93823
-29361
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,449 files changed

+93823
-29361
lines changed

.ci/all_requirements.txt

Lines changed: 213 additions & 11 deletions
Large diffs are not rendered by default.

.ci/cache_lit_timing_files.py

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
2+
# See https://llvm.org/LICENSE.txt for license information.
3+
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
4+
"""Caches .lit_test_times.txt files between premerge invocations.
5+
6+
.lit_test_times.txt files are used by lit to order tests to best take advantage
7+
of parallelism. Having them around and up to date can result in a ~15%
8+
improvement in test times. This script downloading cached test time files and
9+
uploading new versions to the GCS buckets used for caching.
10+
"""
11+
12+
import sys
13+
import os
14+
import logging
15+
import multiprocessing.pool
16+
import pathlib
17+
import glob
18+
19+
from google.cloud import storage
20+
21+
GCS_PARALLELISM = 100
22+
23+
24+
def _maybe_upload_timing_file(bucket, timing_file_path):
25+
if os.path.exists(timing_file_path):
26+
timing_file_blob = bucket.blob("lit_timing/" + timing_file_path)
27+
timing_file_blob.upload_from_filename(timing_file_path)
28+
29+
30+
def upload_timing_files(storage_client, bucket_name: str):
31+
bucket = storage_client.bucket(bucket_name)
32+
with multiprocessing.pool.ThreadPool(GCS_PARALLELISM) as thread_pool:
33+
futures = []
34+
for timing_file_path in glob.glob("**/.lit_test_times.txt", recursive=True):
35+
futures.append(
36+
thread_pool.apply_async(
37+
_maybe_upload_timing_file, (bucket, timing_file_path)
38+
)
39+
)
40+
for future in futures:
41+
future.get()
42+
print("Done uploading")
43+
44+
45+
def _maybe_download_timing_file(blob):
46+
file_name = blob.name.removeprefix("lit_timing/")
47+
pathlib.Path(os.path.dirname(file_name)).mkdir(parents=True, exist_ok=True)
48+
blob.download_to_filename(file_name)
49+
50+
51+
def download_timing_files(storage_client, bucket_name: str):
52+
bucket = storage_client.bucket(bucket_name)
53+
blobs = bucket.list_blobs(prefix="lit_timing")
54+
with multiprocessing.pool.ThreadPool(GCS_PARALLELISM) as thread_pool:
55+
futures = []
56+
for timing_file_blob in blobs:
57+
futures.append(
58+
thread_pool.apply_async(
59+
_maybe_download_timing_file, (timing_file_blob,)
60+
)
61+
)
62+
for future in futures:
63+
future.get()
64+
print("Done downloading")
65+
66+
67+
if __name__ == "__main__":
68+
if len(sys.argv) != 2:
69+
logging.fatal("Expected usage is cache_lit_timing_files.py <upload/download>")
70+
sys.exit(1)
71+
action = sys.argv[1]
72+
storage_client = storage.Client()
73+
bucket_name = os.environ["CACHE_GCS_BUCKET"]
74+
if action == "download":
75+
download_timing_files(storage_client, bucket_name)
76+
elif action == "upload":
77+
upload_timing_files(storage_client, bucket_name)
78+
else:
79+
logging.fatal("Expected usage is cache_lit_timing_files.py <upload/download>")
80+
sys.exit(1)

.ci/compute_projects.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -333,6 +333,7 @@ def get_env_variables(modified_files: list[str], platform: str) -> Set[str]:
333333
current_platform = platform.system()
334334
if len(sys.argv) == 2:
335335
current_platform = sys.argv[1]
336-
env_variables = get_env_variables(sys.stdin.readlines(), current_platform)
336+
changed_files = [line.strip() for line in sys.stdin.readlines()]
337+
env_variables = get_env_variables(changed_files, current_platform)
337338
for env_variable in env_variables:
338339
print(f"{env_variable}='{env_variables[env_variable]}'")

.ci/metrics/metrics.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@
7070
# by trial and error).
7171
GRAFANA_METRIC_MAX_AGE_MN = 120
7272

73+
7374
@dataclass
7475
class JobMetrics:
7576
job_name: str
@@ -243,6 +244,7 @@ def clean_up_libcxx_job_name(old_name: str) -> str:
243244
new_name = stage + "_" + remainder
244245
return new_name
245246

247+
246248
def github_get_metrics(
247249
github_repo: github.Repository, last_workflows_seen_as_completed: set[int]
248250
) -> tuple[list[JobMetrics], int]:

.ci/metrics/metrics_test.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -409,5 +409,6 @@ def test_clean_up_libcxx_job_name(self):
409409
out_name4 = metrics.clean_up_libcxx_job_name(bad_name)
410410
self.assertEqual(out_name4, bad_name)
411411

412+
412413
if __name__ == "__main__":
413414
unittest.main()

.ci/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
11
junitparser==3.2.0
2+
google-cloud-storage==3.3.0

.github/workflows/containers/github-action-ci/Dockerfile

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ FROM docker.io/library/ubuntu:24.04 as base
22
ENV LLVM_SYSROOT=/opt/llvm
33

44
FROM base as stage1-toolchain
5-
ENV LLVM_VERSION=20.1.8
5+
ENV LLVM_VERSION=21.1.0
66

77
RUN apt-get update && \
88
apt-get install -y \
@@ -73,8 +73,8 @@ RUN apt-get update && \
7373
# caching), so we manually install it here.
7474
# TODO(boomanaiden154): We should return to installing this from the apt
7575
# repository once a version containing the necessary bug fixes is available.
76-
RUN curl -L 'https://github.com/mozilla/sccache/releases/download/v0.10.0/sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz' > /tmp/sccache.tar.gz && \
77-
echo "1fbb35e135660d04a2d5e42b59c7874d39b3deb17de56330b25b713ec59f849b /tmp/sccache.tar.gz" | sha256sum -c && \
76+
RUN curl -L "https://github.com/mozilla/sccache/releases/download/v0.10.0/sccache-v0.10.0-$(arch)-unknown-linux-musl.tar.gz" > /tmp/sccache.tar.gz && \
77+
echo $( [ $(arch) = 'x86_64' ] && echo "1fbb35e135660d04a2d5e42b59c7874d39b3deb17de56330b25b713ec59f849b" || echo "d6a1ce4acd02b937cd61bc675a8be029a60f7bc167594c33d75732bbc0a07400") /tmp/sccache.tar.gz | sha256sum -c && \
7878
tar xzf /tmp/sccache.tar.gz -O --wildcards '*/sccache' > '/usr/local/bin/sccache' && \
7979
rm /tmp/sccache.tar.gz && \
8080
chmod +x /usr/local/bin/sccache

.github/workflows/premerge.yaml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,8 @@ jobs:
6565
# several test suites in a row and discard statistics that we want
6666
# to save in the end.
6767
export SCCACHE_IDLE_TIMEOUT=0
68-
sccache --start-server
68+
mkdir artifacts
69+
SCCACHE_LOG=info SCCACHE_ERROR_LOG=$(pwd)/artifacts/sccache.log sccache --start-server
6970
7071
./.ci/monolithic-linux.sh "${projects_to_build}" "${project_check_targets}" "${runtimes_to_build}" "${runtimes_check_targets}" "${runtimes_check_targets_needs_reconfig}" "${enable_cir}"
7172
- name: Upload Artifacts
@@ -117,7 +118,7 @@ jobs:
117118
call C:\\BuildTools\\Common7\\Tools\\VsDevCmd.bat -arch=amd64 -host_arch=amd64
118119
# See the comments above in the Linux job for why we define each of
119120
# these environment variables.
120-
bash -c "export SCCACHE_GCS_BUCKET=$CACHE_GCS_BUCKET; export SCCACHE_GCS_RW_MODE=READ_WRITE; export SCCACHE_IDLE_TIMEOUT=0; sccache --start-server; .ci/monolithic-windows.sh \"${{ steps.vars.outputs.windows-projects }}\" \"${{ steps.vars.outputs.windows-check-targets }}\""
121+
bash -c "export SCCACHE_GCS_BUCKET=$CACHE_GCS_BUCKET; export SCCACHE_GCS_RW_MODE=READ_WRITE; export SCCACHE_IDLE_TIMEOUT=0; mkdir artifacts; SCCACHE_LOG=info SCCACHE_ERROR_LOG=$(pwd)/artifacts/sccache.log sccache --start-server; .ci/monolithic-windows.sh \"${{ steps.vars.outputs.windows-projects }}\" \"${{ steps.vars.outputs.windows-check-targets }}\""
121122
- name: Upload Artifacts
122123
# In some cases, Github will fail to upload the artifact. We want to
123124
# continue anyways as a failed artifact upload is an infra failure, not

bolt/lib/Core/BinaryFunction.cpp

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3773,6 +3773,8 @@ MCSymbol *BinaryFunction::addEntryPointAtOffset(uint64_t Offset) {
37733773
assert(Offset && "cannot add primary entry point");
37743774

37753775
const uint64_t EntryPointAddress = getAddress() + Offset;
3776+
assert(!isInConstantIsland(EntryPointAddress) &&
3777+
"cannot add entry point that points to constant data");
37763778
MCSymbol *LocalSymbol = getOrCreateLocalLabel(EntryPointAddress);
37773779

37783780
MCSymbol *EntrySymbol = getSecondaryEntryPointSymbol(LocalSymbol);

bolt/lib/Rewrite/RewriteInstance.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2935,7 +2935,8 @@ void RewriteInstance::handleRelocation(const SectionRef &RelocatedSection,
29352935
ReferencedSymbol = nullptr;
29362936
ExtractedValue = Address;
29372937
} else if (RefFunctionOffset) {
2938-
if (ContainingBF && ContainingBF != ReferencedBF) {
2938+
if (ContainingBF && ContainingBF != ReferencedBF &&
2939+
!ReferencedBF->isInConstantIsland(Address)) {
29392940
ReferencedSymbol =
29402941
ReferencedBF->addEntryPointAtOffset(RefFunctionOffset);
29412942
} else {

0 commit comments

Comments
 (0)