@@ -235,16 +235,111 @@ void test_and_set_dynamic(char *ptr, int order) {
235235// CHECK-SAME: ) #[[ATTR0]] {
236236// CHECK-NEXT: [[ENTRY:.*:]]
237237// CHECK-NEXT: [[X:%.*]] = alloca [10 x i32], align 4
238- // CHECK-NEXT: [[ATOMIC_TEMP:%.*]] = alloca i32 , align 4
238+ // CHECK-NEXT: [[ATOMIC_TEMP:%.*]] = alloca i8 , align 1
239239// CHECK-NEXT: [[ARRAYDECAY:%.*]] = getelementptr inbounds [10 x i32], ptr [[X]], i64 0, i64 0
240240// CHECK-NEXT: [[TMP0:%.*]] = atomicrmw volatile xchg ptr [[ARRAYDECAY]], i8 1 seq_cst, align 4
241241// CHECK-NEXT: [[TOBOOL:%.*]] = icmp ne i8 [[TMP0]], 0
242- // CHECK-NEXT: store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 4
243- // CHECK-NEXT: [[TMP1:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 4
242+ // CHECK-NEXT: store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
243+ // CHECK-NEXT: [[TMP1:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
244244// CHECK-NEXT: [[LOADEDV:%.*]] = trunc i8 [[TMP1]] to i1
245245// CHECK-NEXT: ret void
246246//
247247void test_and_set_array () {
248248 volatile int x [10 ];
249249 __atomic_test_and_set (x , memory_order_seq_cst );
250250}
251+
252+ // These intrinsics accept any pointer type, including void and incomplete
253+ // structs, and always access the first byte regardless of the actual type
254+ // size.
255+
256+ struct incomplete ;
257+
258+ // CHECK-LABEL: define dso_local void @clear_int(
259+ // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
260+ // CHECK-NEXT: [[ENTRY:.*:]]
261+ // CHECK-NEXT: [[PTR_ADDR:%.*]] = alloca ptr, align 8
262+ // CHECK-NEXT: store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
263+ // CHECK-NEXT: [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
264+ // CHECK-NEXT: store atomic i8 0, ptr [[TMP0]] monotonic, align 4
265+ // CHECK-NEXT: ret void
266+ //
267+ void clear_int (int * ptr ) {
268+ __atomic_clear (ptr , memory_order_relaxed );
269+ }
270+ // CHECK-LABEL: define dso_local void @clear_void(
271+ // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
272+ // CHECK-NEXT: [[ENTRY:.*:]]
273+ // CHECK-NEXT: [[PTR_ADDR:%.*]] = alloca ptr, align 8
274+ // CHECK-NEXT: store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
275+ // CHECK-NEXT: [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
276+ // CHECK-NEXT: store atomic i8 0, ptr [[TMP0]] monotonic, align 1
277+ // CHECK-NEXT: ret void
278+ //
279+ void clear_void (void * ptr ) {
280+ __atomic_clear (ptr , memory_order_relaxed );
281+ }
282+ // CHECK-LABEL: define dso_local void @clear_incomplete(
283+ // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
284+ // CHECK-NEXT: [[ENTRY:.*:]]
285+ // CHECK-NEXT: [[PTR_ADDR:%.*]] = alloca ptr, align 8
286+ // CHECK-NEXT: store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
287+ // CHECK-NEXT: [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
288+ // CHECK-NEXT: store atomic i8 0, ptr [[TMP0]] monotonic, align 1
289+ // CHECK-NEXT: ret void
290+ //
291+ void clear_incomplete (struct incomplete * ptr ) {
292+ __atomic_clear (ptr , memory_order_relaxed );
293+ }
294+
295+ // CHECK-LABEL: define dso_local void @test_and_set_int(
296+ // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
297+ // CHECK-NEXT: [[ENTRY:.*:]]
298+ // CHECK-NEXT: [[PTR_ADDR:%.*]] = alloca ptr, align 8
299+ // CHECK-NEXT: [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
300+ // CHECK-NEXT: store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
301+ // CHECK-NEXT: [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
302+ // CHECK-NEXT: [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 monotonic, align 4
303+ // CHECK-NEXT: [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
304+ // CHECK-NEXT: store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
305+ // CHECK-NEXT: [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
306+ // CHECK-NEXT: [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
307+ // CHECK-NEXT: ret void
308+ //
309+ void test_and_set_int (int * ptr ) {
310+ __atomic_test_and_set (ptr , memory_order_relaxed );
311+ }
312+ // CHECK-LABEL: define dso_local void @test_and_set_void(
313+ // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
314+ // CHECK-NEXT: [[ENTRY:.*:]]
315+ // CHECK-NEXT: [[PTR_ADDR:%.*]] = alloca ptr, align 8
316+ // CHECK-NEXT: [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
317+ // CHECK-NEXT: store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
318+ // CHECK-NEXT: [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
319+ // CHECK-NEXT: [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 monotonic, align 1
320+ // CHECK-NEXT: [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
321+ // CHECK-NEXT: store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
322+ // CHECK-NEXT: [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
323+ // CHECK-NEXT: [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
324+ // CHECK-NEXT: ret void
325+ //
326+ void test_and_set_void (void * ptr ) {
327+ __atomic_test_and_set (ptr , memory_order_relaxed );
328+ }
329+ // CHECK-LABEL: define dso_local void @test_and_set_incomplete(
330+ // CHECK-SAME: ptr noundef [[PTR:%.*]]) #[[ATTR0]] {
331+ // CHECK-NEXT: [[ENTRY:.*:]]
332+ // CHECK-NEXT: [[PTR_ADDR:%.*]] = alloca ptr, align 8
333+ // CHECK-NEXT: [[ATOMIC_TEMP:%.*]] = alloca i8, align 1
334+ // CHECK-NEXT: store ptr [[PTR]], ptr [[PTR_ADDR]], align 8
335+ // CHECK-NEXT: [[TMP0:%.*]] = load ptr, ptr [[PTR_ADDR]], align 8
336+ // CHECK-NEXT: [[TMP1:%.*]] = atomicrmw xchg ptr [[TMP0]], i8 1 monotonic, align 1
337+ // CHECK-NEXT: [[TOBOOL:%.*]] = icmp ne i8 [[TMP1]], 0
338+ // CHECK-NEXT: store i1 [[TOBOOL]], ptr [[ATOMIC_TEMP]], align 1
339+ // CHECK-NEXT: [[TMP2:%.*]] = load i8, ptr [[ATOMIC_TEMP]], align 1
340+ // CHECK-NEXT: [[LOADEDV:%.*]] = trunc i8 [[TMP2]] to i1
341+ // CHECK-NEXT: ret void
342+ //
343+ void test_and_set_incomplete (struct incomplete * ptr ) {
344+ __atomic_test_and_set (ptr , memory_order_relaxed );
345+ }
0 commit comments