Skip to content

Commit a91ba9d

Browse files
committed
Merge branch 'sycl' of https://github.com/lbushi25/llvm into sycl
2 parents 8f10be9 + 3cea0a8 commit a91ba9d

File tree

5,009 files changed

+218729
-114334
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

5,009 files changed

+218729
-114334
lines changed

.ci/generate-buildkite-pipeline-premerge

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -128,9 +128,8 @@ if [[ "${windows_projects}" != "" ]]; then
128128
limit: 2
129129
timeout_in_minutes: 150
130130
env:
131-
CC: 'cl'
132-
CXX: 'cl'
133-
LD: 'link'
131+
MAX_PARALLEL_COMPILE_JOBS: '16'
132+
MAX_PARALLEL_LINK_JOBS: '4'
134133
commands:
135134
- 'C:\\BuildTools\\Common7\\Tools\\VsDevCmd.bat -arch=amd64 -host_arch=amd64'
136135
- 'bash .ci/monolithic-windows.sh "$(echo ${windows_projects} | tr ' ' ';')" "$(echo ${windows_check_targets})"'

.ci/metrics/metrics.py

Lines changed: 38 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -130,34 +130,6 @@ def get_per_workflow_metrics(
130130
workflow_jobs = workflow_run.jobs()
131131
if workflow_jobs.totalCount == 0:
132132
continue
133-
if workflow_jobs.totalCount > 1:
134-
raise ValueError(
135-
f"Encountered an unexpected number of jobs: {workflow_jobs.totalCount}"
136-
)
137-
138-
created_at = workflow_jobs[0].created_at
139-
started_at = workflow_jobs[0].started_at
140-
completed_at = workflow_jobs[0].completed_at
141-
142-
job_result = int(workflow_jobs[0].conclusion == "success")
143-
if job_result:
144-
# We still might want to mark the job as a failure if one of the steps
145-
# failed. This is required due to use setting continue-on-error in
146-
# the premerge pipeline to prevent sending emails while we are
147-
# testing the infrastructure.
148-
# TODO(boomanaiden154): Remove this once the premerge pipeline is no
149-
# longer in a testing state and we can directly assert the workflow
150-
# result.
151-
for step in workflow_jobs[0].steps:
152-
if step.conclusion != "success":
153-
job_result = 0
154-
break
155-
156-
queue_time = started_at - created_at
157-
run_time = completed_at - started_at
158-
159-
if run_time.seconds == 0:
160-
continue
161133

162134
if (
163135
workflows_to_track[workflow_run.name] is None
@@ -170,20 +142,45 @@ def get_per_workflow_metrics(
170142
):
171143
break
172144

173-
# The timestamp associated with the event is expected by Grafana to be
174-
# in nanoseconds.
175-
created_at_ns = int(created_at.timestamp()) * 10**9
176-
177-
workflow_metrics.append(
178-
JobMetrics(
179-
workflow_run.name,
180-
queue_time.seconds,
181-
run_time.seconds,
182-
job_result,
183-
created_at_ns,
184-
workflow_run.id,
145+
for workflow_job in workflow_jobs:
146+
created_at = workflow_job.created_at
147+
started_at = workflow_job.started_at
148+
completed_at = workflow_job.completed_at
149+
150+
job_result = int(workflow_job.conclusion == "success")
151+
if job_result:
152+
# We still might want to mark the job as a failure if one of the steps
153+
# failed. This is required due to use setting continue-on-error in
154+
# the premerge pipeline to prevent sending emails while we are
155+
# testing the infrastructure.
156+
# TODO(boomanaiden154): Remove this once the premerge pipeline is no
157+
# longer in a testing state and we can directly assert the workflow
158+
# result.
159+
for step in workflow_job.steps:
160+
if step.conclusion != "success":
161+
job_result = 0
162+
break
163+
164+
queue_time = started_at - created_at
165+
run_time = completed_at - started_at
166+
167+
if run_time.seconds == 0:
168+
continue
169+
170+
# The timestamp associated with the event is expected by Grafana to be
171+
# in nanoseconds.
172+
created_at_ns = int(created_at.timestamp()) * 10**9
173+
174+
workflow_metrics.append(
175+
JobMetrics(
176+
workflow_run.name + "-" + workflow_job.name,
177+
queue_time.seconds,
178+
run_time.seconds,
179+
job_result,
180+
created_at_ns,
181+
workflow_run.id,
182+
)
185183
)
186-
)
187184

188185
return workflow_metrics
189186

.ci/monolithic-windows.sh

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,10 @@ echo "--- cmake"
5050
pip install -q -r "${MONOREPO_ROOT}"/mlir/python/requirements.txt
5151
pip install -q -r "${MONOREPO_ROOT}"/.ci/requirements.txt
5252

53+
export CC=cl
54+
export CXX=cl
55+
export LD=link
56+
5357
# The CMAKE_*_LINKER_FLAGS to disable the manifest come from research
5458
# on fixing a build reliability issue on the build server, please
5559
# see https://github.com/llvm/llvm-project/pull/82393 and
@@ -72,8 +76,8 @@ cmake -S "${MONOREPO_ROOT}"/llvm -B "${BUILD_DIR}" \
7276
-D CMAKE_EXE_LINKER_FLAGS="/MANIFEST:NO" \
7377
-D CMAKE_MODULE_LINKER_FLAGS="/MANIFEST:NO" \
7478
-D CMAKE_SHARED_LINKER_FLAGS="/MANIFEST:NO" \
75-
-D LLVM_PARALLEL_COMPILE_JOBS=16 \
76-
-D LLVM_PARALLEL_LINK_JOBS=4
79+
-D LLVM_PARALLEL_COMPILE_JOBS=${MAX_PARALLEL_COMPILE_JOBS} \
80+
-D LLVM_PARALLEL_LINK_JOBS=${MAX_PARALLEL_LINK_JOBS}
7781

7882
echo "--- ninja"
7983
# Targets are not escaped as they are passed as separate arguments.

.github/CODEOWNERS

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -193,9 +193,12 @@ llvm/include/llvm/Transforms/Instrumentation/AddressSanitizer.h @intel/dpcpp-san
193193
llvm/include/llvm/Transforms/Instrumentation/AddressSanitizerCommon.h @intel/dpcpp-sanitizers-review
194194
llvm/include/llvm/Transforms/Instrumentation/AddressSanitizerOptions.h @intel/dpcpp-sanitizers-review
195195
llvm/include/llvm/Transforms/Instrumentation/MemorySanitizer.h @intel/dpcpp-sanitizers-review
196+
llvm/include/llvm/Transforms/Instrumentation/ThreadSanitizer.h @intel/dpcpp-sanitizers-review
196197
llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp @intel/dpcpp-sanitizers-review
197198
llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp @intel/dpcpp-sanitizers-review
199+
llvm/lib/Transforms/Instrumentation/ThreadSanitizer.cpp @intel/dpcpp-sanitizers-review
198200
llvm/test/Instrumentation/AddressSanitizer/ @intel/dpcpp-sanitizers-review
199201
llvm/test/Instrumentation/MemorySanitizer/ @intel/dpcpp-sanitizers-review
202+
llvm/test/Instrumentation/ThreadSanitizer/ @intel/dpcpp-sanitizers-review
200203
sycl/test-e2e/AddressSanitizer/ @intel/dpcpp-sanitizers-review
201204
sycl/test-e2e/MemorySanitizer/ @intel/dpcpp-sanitizers-review

.github/workflows/containers/github-action-ci-windows/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ RUN choco install -y handle
108108
109109
RUN pip3 install pywin32 buildbot-worker==2.8.4
110110
111-
ARG RUNNER_VERSION=2.321.0
111+
ARG RUNNER_VERSION=2.322.0
112112
ENV RUNNER_VERSION=$RUNNER_VERSION
113113
114114
RUN powershell -Command \

.github/workflows/containers/github-action-ci/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ WORKDIR /home/gha
9696

9797
FROM ci-container as ci-container-agent
9898

99-
ENV GITHUB_RUNNER_VERSION=2.321.0
99+
ENV GITHUB_RUNNER_VERSION=2.322.0
100100

101101
RUN mkdir actions-runner && \
102102
cd actions-runner && \

.github/workflows/coverity.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ jobs:
5050
--cmake-opt="-DLLVM_EXPERIMENTAL_TARGETS_TO_BUILD=SPIRV"
5151
5252
- name: Build with coverity
53-
run: $GITHUB_WORKSPACE/cov-analysis-linux64-*/bin/cov-build --dir cov-int cmake --build $GITHUB_WORKSPACE/build
53+
run: $GITHUB_WORKSPACE/cov-analysis-linux64-*/bin/cov-build --dir cov-int cmake --build $GITHUB_WORKSPACE/build --target sycl-toolchain
5454

5555
- name: Compress results
5656
run: tar -I pigz -cf intel_llvm.tgz cov-int

.github/workflows/docs.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ jobs:
102102
with:
103103
fetch-depth: 1
104104
- name: Setup Python env
105-
uses: actions/setup-python@v5
105+
uses: actions/setup-python@v5.4.0
106106
with:
107107
python-version: '3.11'
108108
cache: 'pip'

.github/workflows/email-check.yaml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,9 @@ name: "Check for private emails used in PRs"
22

33
on:
44
pull_request:
5-
types:
6-
- opened
5+
branches:
6+
- sycl
7+
- sycl-rel-**
78

89
permissions:
910
contents: read

.github/workflows/libc-fullbuild-tests.yml

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,19 @@ on:
1111

1212
jobs:
1313
build:
14-
runs-on: ubuntu-24.04
14+
runs-on: ${{ matrix.os }}
1515
strategy:
1616
fail-fast: false
1717
matrix:
1818
include:
19-
- c_compiler: clang
19+
- os: ubuntu-24.04
20+
ccache-variant: sccache
21+
c_compiler: clang
22+
cpp_compiler: clang++
23+
# TODO: remove ccache logic when https://github.com/hendrikmuhs/ccache-action/issues/279 is resolved.
24+
- os: ubuntu-24.04-arm
25+
ccache-variant: ccache
26+
c_compiler: clang
2027
cpp_compiler: clang++
2128
# TODO: add back gcc build when it is fixed
2229
# - c_compiler: gcc
@@ -35,7 +42,7 @@ jobs:
3542
with:
3643
max-size: 1G
3744
key: libc_fullbuild_${{ matrix.c_compiler }}
38-
variant: sccache
45+
variant: ${{ matrix.ccache-variant }}
3946

4047
# Notice:
4148
# - MPFR is required by some of the mathlib tests.
@@ -62,8 +69,8 @@ jobs:
6269
-DCMAKE_CXX_COMPILER=${{ matrix.cpp_compiler }}
6370
-DCMAKE_C_COMPILER=${{ matrix.c_compiler }}
6471
-DCMAKE_BUILD_TYPE=MinSizeRel
65-
-DCMAKE_C_COMPILER_LAUNCHER=sccache
66-
-DCMAKE_CXX_COMPILER_LAUNCHER=sccache
72+
-DCMAKE_C_COMPILER_LAUNCHER=${{ matrix.ccache-variant }}
73+
-DCMAKE_CXX_COMPILER_LAUNCHER=${{ matrix.ccache-variant }}
6774
-DCMAKE_INSTALL_PREFIX=${{ steps.strings.outputs.build-install-dir }}
6875
-DLLVM_ENABLE_RUNTIMES="libc;compiler-rt"
6976
-DLLVM_LIBC_FULL_BUILD=ON

0 commit comments

Comments
 (0)