Skip to content

Commit 69ad292

Browse files
Merge branch 'main' into force-ci-west
2 parents a45d011 + a9b8dfe commit 69ad292

File tree

30 files changed

+766
-447
lines changed

30 files changed

+766
-447
lines changed

clang/lib/CIR/CodeGen/CIRGenExprAggregate.cpp

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -133,8 +133,7 @@ class AggExprEmitter : public StmtVisitor<AggExprEmitter> {
133133
}
134134
void VisitParenExpr(ParenExpr *pe) { Visit(pe->getSubExpr()); }
135135
void VisitGenericSelectionExpr(GenericSelectionExpr *ge) {
136-
cgf.cgm.errorNYI(ge->getSourceRange(),
137-
"AggExprEmitter: VisitGenericSelectionExpr");
136+
Visit(ge->getResultExpr());
138137
}
139138
void VisitCoawaitExpr(CoawaitExpr *e) {
140139
cgf.cgm.errorNYI(e->getSourceRange(), "AggExprEmitter: VisitCoawaitExpr");

clang/lib/CIR/CodeGen/CIRGenFunction.cpp

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -836,6 +836,8 @@ LValue CIRGenFunction::emitLValue(const Expr *e) {
836836
return emitCallExprLValue(cast<CallExpr>(e));
837837
case Expr::ParenExprClass:
838838
return emitLValue(cast<ParenExpr>(e)->getSubExpr());
839+
case Expr::GenericSelectionExprClass:
840+
return emitLValue(cast<GenericSelectionExpr>(e)->getResultExpr());
839841
case Expr::DeclRefExprClass:
840842
return emitDeclRefLValue(cast<DeclRefExpr>(e));
841843
case Expr::CStyleCastExprClass:

clang/test/CIR/CodeGen/struct.cpp

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -154,3 +154,32 @@ void choose_expr() {
154154
// OGCG: %[[B_ADDR:.*]] = alloca %struct.CompleteS, align 4
155155
// OGCG: %[[C_ADDR:.*]] = alloca %struct.CompleteS, align 4
156156
// OGCG: call void @llvm.memcpy.p0.p0.i64(ptr align 4 %[[C_ADDR]], ptr align 4 %[[A_ADDR]], i64 8, i1 false)
157+
158+
void generic_selection() {
159+
CompleteS a;
160+
CompleteS b;
161+
int c;
162+
CompleteS d = _Generic(c, int : a, default: b);
163+
}
164+
165+
// CIR: cir.func{{.*}} @_Z17generic_selectionv()
166+
// CIR: %[[A_ADDR:.*]] = cir.alloca !rec_CompleteS, !cir.ptr<!rec_CompleteS>, ["a"]
167+
// CIR: %[[B_ADDR:.*]] = cir.alloca !rec_CompleteS, !cir.ptr<!rec_CompleteS>, ["b"]
168+
// CIR: %[[C_ADDR:.*]] = cir.alloca !s32i, !cir.ptr<!s32i>, ["c"]
169+
// CIR: %[[D_ADDR:.*]] = cir.alloca !rec_CompleteS, !cir.ptr<!rec_CompleteS>, ["d", init]
170+
// TODO(cir): Call to default copy constructor should be replaced by `cir.copy` op
171+
// CIR: cir.call @_ZN9CompleteSC1ERKS_(%[[D_ADDR]], %[[A_ADDR]]) nothrow : (!cir.ptr<!rec_CompleteS>, !cir.ptr<!rec_CompleteS>) -> ()
172+
173+
// LLVM: define{{.*}} void @_Z17generic_selectionv()
174+
// LLVM: %1 = alloca %struct.CompleteS, i64 1, align 4
175+
// LLVM: %2 = alloca %struct.CompleteS, i64 1, align 4
176+
// LLVM: %3 = alloca i32, i64 1, align 4
177+
// LLVM: %4 = alloca %struct.CompleteS, i64 1, align 4
178+
// LLVM: call void @_ZN9CompleteSC1ERKS_(ptr %4, ptr %1)
179+
180+
// OGCG: define{{.*}} void @_Z17generic_selectionv()
181+
// OGCG: %[[A_ADDR:.*]] = alloca %struct.CompleteS, align 4
182+
// OGCG: %[[B_ADDR:.*]] = alloca %struct.CompleteS, align 4
183+
// OGCG: %[[C_ADDR:.*]] = alloca i32, align 4
184+
// OGCG: %[[D_ADDR:.*]] = alloca %struct.CompleteS, align 4
185+
// OGCG: call void @llvm.memcpy.p0.p0.i64(ptr align 4 %[[D_ADDR]], ptr align 4 %[[A_ADDR]], i64 8, i1 false)
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
2+
// REQUIRES: amdgpu-registered-target
3+
4+
// XFAIL: *
5+
6+
// RUN: %clang_cc1 -E -fopenmp -x c -triple amdgcn-amd-amdhsa -fopenmp-targets=amdgcn-amd-amdhsa -save-temps=cwd %s -o %t-openmp-amdgcn-amd-amdhsa-gfx90a.i
7+
// RUN: %clang_cc1 -fopenmp -x c -triple x86_64-unknown-unknown -fopenmp-targets=amdgcn-amd-amdhsa -save-temps=cwd -emit-llvm-bc %s -o %t-x86_64-unknown-unknown.bc
8+
// RUN: %clang_cc1 -fopenmp -x c -triple amdgcn-amd-amdhsa -fopenmp-targets=amdgcn-amd-amdhsa -save-temps=cwd -emit-llvm -fopenmp-is-target-device -x cpp-output %t-openmp-amdgcn-amd-amdhsa-gfx90a.i -fopenmp-host-ir-file-path %t-x86_64-unknown-unknown.bc -o - | FileCheck %s
9+
// expected-no-diagnostics
10+
#ifndef HEADER
11+
#define HEADER
12+
13+
#define N 1000
14+
15+
int test_amdgcn_save_temps() {
16+
int arr[N];
17+
#pragma omp target
18+
for (int i = 0; i < N; i++) {
19+
arr[i] = 1;
20+
}
21+
return arr[0];
22+
}
23+
#endif
24+
25+
// CHECK: define {{[^@]+}}@{{__omp_offloading_[0-9a-z]+_[0-9a-z]+}}_test_amdgcn_save_temps

compiler-rt/lib/builtins/CMakeLists.txt

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -816,14 +816,15 @@ set(s390x_SOURCES
816816
${GENERIC_TF_SOURCES}
817817
)
818818

819-
set(wasm32_SOURCES
820-
${GENERIC_TF_SOURCES}
821-
${GENERIC_SOURCES}
822-
)
823-
set(wasm64_SOURCES
819+
820+
set(wasm_SOURCES
821+
wasm/__c_longjmp.S
822+
wasm/__cpp_exceptions.S
824823
${GENERIC_TF_SOURCES}
825824
${GENERIC_SOURCES}
826825
)
826+
set(wasm32_SOURCES ${wasm_SOURCES})
827+
set(wasm64_SOURCES ${wasm_SOURCES})
827828

828829
set(ve_SOURCES
829830
ve/grow_stack.S
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
//===-- __c_longjmp.S - Implement __c_longjmp -----------------------------===//
2+
//
3+
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4+
// See https://llvm.org/LICENSE.txt for license information.
5+
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6+
//
7+
//===----------------------------------------------------------------------===//
8+
//
9+
// This file implements __c_longjmp which LLVM uses to implenmet setjmp/longjmp
10+
// when Wasm EH is enabled.
11+
//
12+
//===----------------------------------------------------------------------===//
13+
14+
#ifdef __wasm_exception_handling__
15+
16+
#ifdef __wasm64__
17+
#define PTR i64
18+
#else
19+
#define PTR i32
20+
#endif
21+
22+
.globl __c_longjmp
23+
.tagtype __c_longjmp PTR
24+
__c_longjmp:
25+
26+
#endif // !__wasm_exception_handling__
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
//===-- __cpp_exception.S - Implement __cpp_exception ---------------------===//
2+
//
3+
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4+
// See https://llvm.org/LICENSE.txt for license information.
5+
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6+
//
7+
//===----------------------------------------------------------------------===//
8+
//
9+
// This file implements __cpp_exception which LLVM uses to implement exception
10+
// handling when Wasm EH is enabled.
11+
//
12+
//===----------------------------------------------------------------------===//
13+
14+
#ifdef __wasm_exception_handling__
15+
16+
#ifdef __wasm64__
17+
#define PTR i64
18+
#else
19+
#define PTR i32
20+
#endif
21+
22+
.globl __cpp_exception
23+
.tagtype __cpp_exception PTR
24+
__cpp_exception:
25+
26+
#endif // !__wasm_exception_handling__

libcxx/utils/find-rerun-candidates

Lines changed: 242 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,242 @@
1+
#!/usr/bin/env python3
2+
3+
import argparse
4+
import datetime
5+
import functools
6+
import os
7+
import pathlib
8+
import re
9+
import statistics
10+
import subprocess
11+
import sys
12+
13+
import git
14+
import pandas
15+
import tqdm
16+
17+
@functools.total_ordering
18+
class Commit:
19+
"""
20+
This class represents a commit inside a given Git repository.
21+
"""
22+
23+
def __init__(self, git_repo, sha):
24+
self._git_repo = git_repo
25+
self._sha = sha
26+
27+
def __eq__(self, other):
28+
"""
29+
Return whether two commits refer to the same commit.
30+
31+
This doesn't take into account the content of the Git tree at those commits, only the
32+
'identity' of the commits themselves.
33+
"""
34+
return self.fullrev == other.fullrev
35+
36+
def __lt__(self, other):
37+
"""
38+
Return whether a commit is an ancestor of another commit in the Git repository.
39+
"""
40+
# Is self._sha an ancestor of other._sha?
41+
res = subprocess.run(['git', '-C', self._git_repo, 'merge-base', '--is-ancestor', self._sha, other._sha])
42+
if res.returncode not in (0, 1):
43+
raise RuntimeError(f'Error when trying to obtain the commit order for {self._sha} and {other._sha}')
44+
return res.returncode == 0
45+
46+
def __hash__(self):
47+
"""
48+
Return the full revision for this commit.
49+
"""
50+
return hash(self.fullrev)
51+
52+
@functools.cache
53+
def show(self, include_diff=False):
54+
"""
55+
Return the commit information equivalent to `git show` associated to this commit.
56+
"""
57+
cmd = ['git', '-C', self._git_repo, 'show', self._sha]
58+
if not include_diff:
59+
cmd.append('--no-patch')
60+
return subprocess.check_output(cmd, text=True)
61+
62+
@functools.cached_property
63+
def shortrev(self):
64+
"""
65+
Return the shortened version of the given SHA.
66+
"""
67+
return subprocess.check_output(['git', '-C', self._git_repo, 'rev-parse', '--short', self._sha], text=True).strip()
68+
69+
@functools.cached_property
70+
def fullrev(self):
71+
"""
72+
Return the full SHA associated to this commit.
73+
"""
74+
return subprocess.check_output(['git', '-C', self._git_repo, 'rev-parse', self._sha], text=True).strip()
75+
76+
@functools.cached_property
77+
def commit_date(self):
78+
"""
79+
Return the date of the commit as a `datetime.datetime` object.
80+
"""
81+
repo = git.Repo(self._git_repo)
82+
return datetime.datetime.fromtimestamp(repo.commit(self._sha).committed_date)
83+
84+
def prefetch(self):
85+
"""
86+
Prefetch cached properties associated to this commit object.
87+
88+
This makes it possible to control when time is spent recovering that information from Git for
89+
e.g. better reporting to the user.
90+
"""
91+
self.commit_date
92+
self.fullrev
93+
self.shortrev
94+
self.show()
95+
96+
def __str__(self):
97+
return self._sha
98+
99+
def directory_path(string):
100+
if os.path.isdir(string):
101+
return pathlib.Path(string)
102+
else:
103+
raise NotADirectoryError(string)
104+
105+
def parse_lnt(lines, aggregate=statistics.median):
106+
"""
107+
Parse lines in LNT format and return a list of dictionnaries of the form:
108+
109+
[
110+
{
111+
'benchmark': <benchmark1>,
112+
<metric1>: [float],
113+
<metric2>: [float],
114+
'data_points': int,
115+
...
116+
},
117+
{
118+
'benchmark': <benchmark2>,
119+
<metric1>: [float],
120+
<metric2>: [float],
121+
'data_points': int,
122+
...
123+
},
124+
...
125+
]
126+
127+
If a metric has multiple values associated to it, they are aggregated into a single
128+
value using the provided aggregation function.
129+
"""
130+
results = {}
131+
for line in lines:
132+
line = line.strip()
133+
if not line:
134+
continue
135+
136+
(identifier, value) = line.split(' ')
137+
(benchmark, metric) = identifier.split('.')
138+
if benchmark not in results:
139+
results[benchmark] = {'benchmark': benchmark}
140+
141+
entry = results[benchmark]
142+
if metric not in entry:
143+
entry[metric] = []
144+
entry[metric].append(float(value))
145+
146+
for (bm, entry) in results.items():
147+
metrics = [key for key in entry if isinstance(entry[key], list)]
148+
min_data_points = min(len(entry[metric]) for metric in metrics)
149+
for metric in metrics:
150+
entry[metric] = aggregate(entry[metric])
151+
entry['data_points'] = min_data_points
152+
153+
return list(results.values())
154+
155+
def sorted_revlist(git_repo, commits):
156+
"""
157+
Return the list of commits sorted by their chronological order (from oldest to newest) in the
158+
provided Git repository. Items earlier in the list are older than items later in the list.
159+
"""
160+
revlist_cmd = ['git', '-C', git_repo, 'rev-list', '--no-walk'] + list(commits)
161+
revlist = subprocess.check_output(revlist_cmd, text=True).strip().splitlines()
162+
return list(reversed(revlist))
163+
164+
def main(argv):
165+
parser = argparse.ArgumentParser(
166+
prog='find-rerun-candidates',
167+
description='Find benchmarking data points that are good candidates for additional runs, to reduce noise.')
168+
parser.add_argument('directory', type=directory_path,
169+
help='Path to a valid directory containing benchmark data in LNT format, each file being named <commit>.lnt. '
170+
'This is also the format generated by the `benchmark-historical` utility.')
171+
parser.add_argument('--metric', type=str, default='execution_time',
172+
help='The metric to analyze. LNT data may contain multiple metrics (e.g. code size, execution time, etc) -- '
173+
'this option allows selecting which metric is analyzed for rerun candidates. The default is "execution_time".')
174+
parser.add_argument('--filter', type=str, required=False,
175+
help='An optional regular expression used to filter the benchmarks included in the analysis. '
176+
'Only benchmarks whose names match the regular expression will be analyzed.')
177+
parser.add_argument('--outlier-threshold', metavar='FLOAT', type=float, default=0.1,
178+
help='Relative difference from the previous points for considering a data point as an outlier. This threshold is '
179+
'expressed as a floating point number, e.g. 0.25 will detect points that differ by more than 25%% from their '
180+
'previous result.')
181+
parser.add_argument('--data-points-threshold', type=int, required=False,
182+
help='Number of data points above which an outlier is not considered an outlier. If an outlier has more than '
183+
'that number of data points yet its relative difference is above the threshold, it is not considered an '
184+
'outlier. This can be used to re-run noisy data points until we have at least N samples, at which point '
185+
'we consider the data to be accurate, even if the result is beyond the threshold. By default, there is '
186+
'no limit on the number of data points.')
187+
parser.add_argument('--git-repo', type=directory_path, default=pathlib.Path(os.getcwd()),
188+
help='Path to the git repository to use for ordering commits in time. '
189+
'By default, the current working directory is used.')
190+
args = parser.parse_args(argv)
191+
192+
# Extract benchmark data from the directory.
193+
data = {}
194+
files = [f for f in args.directory.glob('*.lnt')]
195+
for file in tqdm.tqdm(files, desc='Parsing LNT files'):
196+
rows = parse_lnt(file.read_text().splitlines())
197+
(commit, _) = os.path.splitext(os.path.basename(file))
198+
commit = Commit(args.git_repo, commit)
199+
data[commit] = rows
200+
201+
# Obtain commit information which is then cached throughout the program. Do this
202+
# eagerly so we can provide a progress bar.
203+
for commit in tqdm.tqdm(data.keys(), desc='Prefetching Git information'):
204+
commit.prefetch()
205+
206+
# Create a dataframe from the raw data and add some columns to it:
207+
# - 'commit' represents the Commit object associated to the results in that row
208+
# - `revlist_order` represents the order of the commit within the Git repository.
209+
revlist = sorted_revlist(args.git_repo, [c.fullrev for c in data.keys()])
210+
data = pandas.DataFrame([row | {'commit': c} for (c, rows) in data.items() for row in rows])
211+
data = data.join(pandas.DataFrame([{'revlist_order': revlist.index(c.fullrev)} for c in data['commit']]))
212+
213+
# Filter the benchmarks if needed.
214+
if args.filter is not None:
215+
keeplist = [b for b in data['benchmark'] if re.search(args.filter, b) is not None]
216+
data = data[data['benchmark'].isin(keeplist)]
217+
218+
# Detect outliers by selecting all benchmarks whose change percentage is beyond the threshold.
219+
# If we have a max number of points, also take that into account.
220+
if args.data_points_threshold is not None:
221+
print(f'Generating outliers with more than {args.outlier_threshold * 100}% relative difference and less than {args.data_points_threshold} data points')
222+
else:
223+
print(f'Generating outliers with more than {args.outlier_threshold * 100}% relative difference')
224+
225+
overall = set()
226+
for (benchmark, series) in data.sort_values(by='revlist_order').groupby('benchmark'):
227+
pct_change = series[args.metric].pct_change()
228+
outliers = series[pct_change.abs() > args.outlier_threshold]
229+
if args.data_points_threshold is not None:
230+
outliers = outliers[outliers['data_points'] < args.data_points_threshold]
231+
outliers = set(outliers['commit'])
232+
overall |= outliers
233+
if len(outliers) > 0:
234+
print(f'{benchmark}: {" ".join(c.shortrev for c in outliers)}')
235+
236+
if len(overall) > 0:
237+
print(f'Summary: {" ".join(c.shortrev for c in overall)}')
238+
else:
239+
print(f'No outliers')
240+
241+
if __name__ == '__main__':
242+
main(sys.argv[1:])

0 commit comments

Comments
 (0)