Skip to content

Commit ddcd61c

Browse files
Merge branch 'main' into llvm/atomic_01
2 parents 5eae752 + 27e6561 commit ddcd61c

File tree

9,409 files changed

+766045
-554869
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

9,409 files changed

+766045
-554869
lines changed

.ci/metrics/metrics.py

Lines changed: 90 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ class JobMetrics:
2424
status: int
2525
created_at_ns: int
2626
workflow_id: int
27+
workflow_name: str
2728

2829

2930
@dataclass
@@ -43,40 +44,60 @@ def get_sampled_workflow_metrics(github_repo: github.Repository):
4344
Returns a list of GaugeMetric objects, containing the relevant metrics about
4445
the workflow
4546
"""
47+
queued_job_counts = {}
48+
running_job_counts = {}
4649

4750
# Other states are available (pending, waiting, etc), but the meaning
4851
# is not documented (See #70540).
4952
# "queued" seems to be the info we want.
50-
queued_workflow_count = len(
51-
[
52-
x
53-
for x in github_repo.get_workflow_runs(status="queued")
54-
if x.name in WORKFLOWS_TO_TRACK
55-
]
56-
)
57-
running_workflow_count = len(
58-
[
59-
x
60-
for x in github_repo.get_workflow_runs(status="in_progress")
61-
if x.name in WORKFLOWS_TO_TRACK
62-
]
63-
)
53+
for queued_workflow in github_repo.get_workflow_runs(status="queued"):
54+
if queued_workflow.name not in WORKFLOWS_TO_TRACK:
55+
continue
56+
for queued_workflow_job in queued_workflow.jobs():
57+
job_name = queued_workflow_job.name
58+
# Workflows marked as queued can potentially only have some jobs
59+
# queued, so make sure to also count jobs currently in progress.
60+
if queued_workflow_job.status == "queued":
61+
if job_name not in queued_job_counts:
62+
queued_job_counts[job_name] = 1
63+
else:
64+
queued_job_counts[job_name] += 1
65+
elif queued_workflow_job.status == "in_progress":
66+
if job_name not in running_job_counts:
67+
running_job_counts[job_name] = 1
68+
else:
69+
running_job_counts[job_name] += 1
70+
71+
for running_workflow in github_repo.get_workflow_runs(status="in_progress"):
72+
if running_workflow.name not in WORKFLOWS_TO_TRACK:
73+
continue
74+
for running_workflow_job in running_workflow.jobs():
75+
job_name = running_workflow_job.name
76+
if running_workflow_job.status != "in_progress":
77+
continue
78+
79+
if job_name not in running_job_counts:
80+
running_job_counts[job_name] = 1
81+
else:
82+
running_job_counts[job_name] += 1
6483

6584
workflow_metrics = []
66-
workflow_metrics.append(
67-
GaugeMetric(
68-
"workflow_queue_size",
69-
queued_workflow_count,
70-
time.time_ns(),
85+
for queued_job in queued_job_counts:
86+
workflow_metrics.append(
87+
GaugeMetric(
88+
f"workflow_queue_size_{queued_job}",
89+
queued_job_counts[queued_job],
90+
time.time_ns(),
91+
)
7192
)
72-
)
73-
workflow_metrics.append(
74-
GaugeMetric(
75-
"running_workflow_count",
76-
running_workflow_count,
77-
time.time_ns(),
93+
for running_job in running_job_counts:
94+
workflow_metrics.append(
95+
GaugeMetric(
96+
f"running_workflow_count_{running_job}",
97+
running_job_counts[running_job],
98+
time.time_ns(),
99+
)
78100
)
79-
)
80101
# Always send a hearbeat metric so we can monitor is this container is still able to log to Grafana.
81102
workflow_metrics.append(
82103
GaugeMetric("metrics_container_heartbeat", 1, time.time_ns())
@@ -130,34 +151,6 @@ def get_per_workflow_metrics(
130151
workflow_jobs = workflow_run.jobs()
131152
if workflow_jobs.totalCount == 0:
132153
continue
133-
if workflow_jobs.totalCount > 1:
134-
raise ValueError(
135-
f"Encountered an unexpected number of jobs: {workflow_jobs.totalCount}"
136-
)
137-
138-
created_at = workflow_jobs[0].created_at
139-
started_at = workflow_jobs[0].started_at
140-
completed_at = workflow_jobs[0].completed_at
141-
142-
job_result = int(workflow_jobs[0].conclusion == "success")
143-
if job_result:
144-
# We still might want to mark the job as a failure if one of the steps
145-
# failed. This is required due to use setting continue-on-error in
146-
# the premerge pipeline to prevent sending emails while we are
147-
# testing the infrastructure.
148-
# TODO(boomanaiden154): Remove this once the premerge pipeline is no
149-
# longer in a testing state and we can directly assert the workflow
150-
# result.
151-
for step in workflow_jobs[0].steps:
152-
if step.conclusion != "success":
153-
job_result = 0
154-
break
155-
156-
queue_time = started_at - created_at
157-
run_time = completed_at - started_at
158-
159-
if run_time.seconds == 0:
160-
continue
161154

162155
if (
163156
workflows_to_track[workflow_run.name] is None
@@ -170,20 +163,46 @@ def get_per_workflow_metrics(
170163
):
171164
break
172165

173-
# The timestamp associated with the event is expected by Grafana to be
174-
# in nanoseconds.
175-
created_at_ns = int(created_at.timestamp()) * 10**9
176-
177-
workflow_metrics.append(
178-
JobMetrics(
179-
workflow_run.name,
180-
queue_time.seconds,
181-
run_time.seconds,
182-
job_result,
183-
created_at_ns,
184-
workflow_run.id,
166+
for workflow_job in workflow_jobs:
167+
created_at = workflow_job.created_at
168+
started_at = workflow_job.started_at
169+
completed_at = workflow_job.completed_at
170+
171+
job_result = int(workflow_job.conclusion == "success")
172+
if job_result:
173+
# We still might want to mark the job as a failure if one of the steps
174+
# failed. This is required due to use setting continue-on-error in
175+
# the premerge pipeline to prevent sending emails while we are
176+
# testing the infrastructure.
177+
# TODO(boomanaiden154): Remove this once the premerge pipeline is no
178+
# longer in a testing state and we can directly assert the workflow
179+
# result.
180+
for step in workflow_job.steps:
181+
if step.conclusion != "success" and step.conclusion != "skipped":
182+
job_result = 0
183+
break
184+
185+
queue_time = started_at - created_at
186+
run_time = completed_at - started_at
187+
188+
if run_time.seconds == 0:
189+
continue
190+
191+
# The timestamp associated with the event is expected by Grafana to be
192+
# in nanoseconds.
193+
created_at_ns = int(created_at.timestamp()) * 10**9
194+
195+
workflow_metrics.append(
196+
JobMetrics(
197+
workflow_run.name + "-" + workflow_job.name,
198+
queue_time.seconds,
199+
run_time.seconds,
200+
job_result,
201+
created_at_ns,
202+
workflow_run.id,
203+
workflow_run.name,
204+
)
185205
)
186-
)
187206

188207
return workflow_metrics
189208

@@ -238,8 +257,6 @@ def upload_metrics(workflow_metrics, metrics_userid, api_key):
238257
def main():
239258
# Authenticate with Github
240259
auth = Auth.Token(os.environ["GITHUB_TOKEN"])
241-
github_object = Github(auth=auth)
242-
github_repo = github_object.get_repo("llvm/llvm-project")
243260

244261
grafana_api_key = os.environ["GRAFANA_API_KEY"]
245262
grafana_metrics_userid = os.environ["GRAFANA_METRICS_USERID"]
@@ -251,20 +268,19 @@ def main():
251268
# Enter the main loop. Every five minutes we wake up and dump metrics for
252269
# the relevant jobs.
253270
while True:
271+
github_object = Github(auth=auth)
272+
github_repo = github_object.get_repo("llvm/llvm-project")
273+
254274
current_metrics = get_per_workflow_metrics(github_repo, workflows_to_track)
255275
current_metrics += get_sampled_workflow_metrics(github_repo)
256-
# Always send a hearbeat metric so we can monitor is this container is still able to log to Grafana.
257-
current_metrics.append(
258-
GaugeMetric("metrics_container_heartbeat", 1, time.time_ns())
259-
)
260276

261277
upload_metrics(current_metrics, grafana_metrics_userid, grafana_api_key)
262278
print(f"Uploaded {len(current_metrics)} metrics", file=sys.stderr)
263279

264280
for workflow_metric in reversed(current_metrics):
265281
if isinstance(workflow_metric, JobMetrics):
266282
workflows_to_track[
267-
workflow_metric.job_name
283+
workflow_metric.workflow_name
268284
] = workflow_metric.workflow_id
269285

270286
time.sleep(SCRAPE_INTERVAL_SECONDS)

.github/CODEOWNERS

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@
131131
/bolt/ @aaupov @maksfb @rafaelauler @ayermolo @dcci @yota9
132132

133133
# Bazel build system.
134-
/utils/bazel/ @rupprecht @keith
134+
/utils/bazel/ @rupprecht @keith @aaronmondal
135135

136136
# InstallAPI and TextAPI
137137
/llvm/**/TextAPI/ @cyndyishida

.github/new-prs-labeler.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -730,6 +730,9 @@ llvm:regalloc:
730730
lldb:
731731
- lldb/**
732732

733+
lldb-dap:
734+
- lldb/tools/lldb-dap/**
735+
733736
backend:AMDGPU:
734737
- '**/*amdgpu*'
735738
- '**/*AMDGPU*'

.github/workflows/build-ci-container-windows.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ jobs:
2727
container-filename: ${{ steps.vars.outputs.container-filename }}
2828
steps:
2929
- name: Checkout LLVM
30-
uses: actions/checkout@v4
30+
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
3131
with:
3232
sparse-checkout: .github/workflows/containers/github-action-ci-windows
3333
- name: Write Variables
@@ -46,7 +46,7 @@ jobs:
4646
run: |
4747
docker save ${{ steps.vars.outputs.container-name-tag }} > ${{ steps.vars.outputs.container-filename }}
4848
- name: Upload container image
49-
uses: actions/upload-artifact@v4
49+
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
5050
with:
5151
name: container
5252
path: ${{ steps.vars.outputs.container-filename }}
@@ -63,7 +63,7 @@ jobs:
6363
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
6464
steps:
6565
- name: Download container
66-
uses: actions/download-artifact@v4
66+
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
6767
with:
6868
name: container
6969
- name: Push Container

.github/workflows/build-ci-container.yml

Lines changed: 42 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -20,24 +20,31 @@ on:
2020
jobs:
2121
build-ci-container:
2222
if: github.repository_owner == 'llvm'
23-
runs-on: depot-ubuntu-22.04-16
24-
outputs:
25-
container-name: ${{ steps.vars.outputs.container-name }}
26-
container-name-agent: ${{ steps.vars.outputs.container-name-agent }}
27-
container-name-tag: ${{ steps.vars.outputs.container-name-tag }}
28-
container-name-agent-tag: ${{ steps.vars.outputs.container-name-agent-tag }}
29-
container-filename: ${{ steps.vars.outputs.container-filename }}
30-
container-agent-filename: ${{ steps.vars.outputs.container-agent-filename }}
23+
runs-on: ${{ matrix.runs-on }}
24+
strategy:
25+
matrix:
26+
include:
27+
# The arch names should match the names used on dockerhub.
28+
# See https://github.com/docker-library/official-images#architectures-other-than-amd64
29+
- arch: amd64
30+
runs-on: depot-ubuntu-22.04-16
31+
- arch: arm64v8
32+
runs-on: depot-ubuntu-22.04-arm-16
3133
steps:
3234
- name: Checkout LLVM
33-
uses: actions/checkout@v4
35+
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
3436
with:
3537
sparse-checkout: .github/workflows/containers/github-action-ci/
38+
# podman is not installed by default on the ARM64 images.
39+
- name: Install Podman
40+
if: runner.arch == 'ARM64'
41+
run: |
42+
sudo apt-get install podman
3643
- name: Write Variables
3744
id: vars
3845
run: |
39-
tag=`date +%s`
40-
container_name="ghcr.io/$GITHUB_REPOSITORY_OWNER/ci-ubuntu-22.04"
46+
tag=$(git rev-parse --short=12 HEAD)
47+
container_name="ghcr.io/$GITHUB_REPOSITORY_OWNER/${{ matrix.arch }}/ci-ubuntu-22.04"
4148
echo "container-name=$container_name" >> $GITHUB_OUTPUT
4249
echo "container-name-agent=$container_name-agent" >> $GITHUB_OUTPUT
4350
echo "container-name-tag=$container_name:$tag" >> $GITHUB_OUTPUT
@@ -59,9 +66,9 @@ jobs:
5966
podman save ${{ steps.vars.outputs.container-name-agent-tag }} > ${{ steps.vars.outputs.container-agent-filename }}
6067
6168
- name: Upload container image
62-
uses: actions/upload-artifact@v4
69+
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
6370
with:
64-
name: container
71+
name: container-${{ matrix.arch }}
6572
path: "*.tar"
6673
retention-days: 14
6774

@@ -83,19 +90,30 @@ jobs:
8390
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
8491
steps:
8592
- name: Download container
86-
uses: actions/download-artifact@v4
87-
with:
88-
name: container
93+
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
8994

9095
- name: Push Container
9196
run: |
92-
podman load -i ${{ needs.build-ci-container.outputs.container-filename }}
93-
podman tag ${{ needs.build-ci-container.outputs.container-name-tag }} ${{ needs.build-ci-container.outputs.container-name }}:latest
97+
function push_container {
98+
image_name=$1
99+
latest_name=$(echo $image_name | sed 's/:[a-f0-9]\+$/:latest/g')
100+
podman tag $image_name $latest_name
101+
echo "Pushing $image_name ..."
102+
podman push $image_name
103+
echo "Pushing $latest_name ..."
104+
podman push $latest_name
105+
}
106+
94107
podman login -u ${{ github.actor }} -p $GITHUB_TOKEN ghcr.io
95-
podman push ${{ needs.build-ci-container.outputs.container-name-tag }}
96-
podman push ${{ needs.build-ci-container.outputs.container-name }}:latest
108+
for f in $(find . -iname *.tar); do
109+
image_name=$(podman load -q -i $f | sed 's/Loaded image: //g')
110+
push_container $image_name
97111
98-
podman load -i ${{ needs.build-ci-container.outputs.container-agent-filename }}
99-
podman tag ${{ needs.build-ci-container.outputs.container-name-agent-tag }} ${{ needs.build-ci-container.outputs.container-name-agent }}:latest
100-
podman push ${{ needs.build-ci-container.outputs.container-name-agent-tag }}
101-
podman push ${{ needs.build-ci-container.outputs.container-name-agent }}:latest
112+
if echo $image_name | grep '/amd64/'; then
113+
# For amd64, create an alias with the arch component removed.
114+
# This matches the convention used on dockerhub.
115+
default_image_name=$(echo $(dirname $(dirname $image_name))/$(basename $image_name))
116+
podman tag $image_name $default_image_name
117+
push_container $default_image_name
118+
fi
119+
done

0 commit comments

Comments
 (0)