@@ -12,18 +12,15 @@ define <4 x i64> @test_mm256_abs_epi8(<4 x i64> noundef %a) local_unnamed_addr #
12
12
; CHECK-NEXT: entry:
13
13
; CHECK-NEXT: [[TMP0:%.*]] = load <4 x i64>, <4 x i64>* bitcast ([100 x i64]* @__msan_param_tls to <4 x i64>*), align 8
14
14
; ORIGIN-NEXT: [[TMP1:%.*]] = load i32, i32* getelementptr inbounds ([200 x i32], [200 x i32]* @__msan_param_origin_tls, i32 0, i32 0), align 4
15
- ; CHECK: [[TMP2:%.*]] = bitcast <4 x i64> [[TMP0]] to <32 x i8>
15
+ ; CHECK: call void @llvm.donothing()
16
+ ; CHECK-NEXT: [[TMP2:%.*]] = bitcast <4 x i64> [[TMP0]] to <32 x i8>
16
17
; CHECK-NEXT: [[TMP3:%.*]] = bitcast <4 x i64> [[A:%.*]] to <32 x i8>
17
- ; CHECK-NEXT: [[TMP4:%.*]] = bitcast <32 x i8> [[TMP2]] to i256
18
- ; CHECK-NEXT: [[_MSCMP:%.*]] = icmp ne i256 [[TMP4]], 0
19
- ; CHECK-NEXT: br i1 [[_MSCMP]], label [[TMP5:%.*]], label [[TMP6:%.*]], !prof !2
20
- ; CHECK: call void @__msan_warning_with_origin_noreturn
21
- ; CHECK: unreachable
22
- ; CHECK: [[TMP7:%.*]] = tail call <32 x i8> @llvm.abs.v32i8(<32 x i8> [[TMP3]], i1 false)
23
- ; CHECK-NEXT: [[TMP8:%.*]] = bitcast <32 x i8> [[TMP7]] to <4 x i64>
24
- ; CHECK-NEXT: store <4 x i64> zeroinitializer, <4 x i64>* bitcast ([100 x i64]* @__msan_retval_tls to <4 x i64>*), align 8
25
- ; ORIGIN-NEXT: store i32 0, i32* @__msan_retval_origin_tls, align 4
26
- ; CHECK: ret <4 x i64> [[TMP8]]
18
+ ; CHECK-NEXT: [[TMP4:%.*]] = tail call <32 x i8> @llvm.abs.v32i8(<32 x i8> [[TMP3]], i1 false)
19
+ ; CHECK-NEXT: [[TMP5:%.*]] = bitcast <32 x i8> [[TMP2]] to <4 x i64>
20
+ ; CHECK-NEXT: [[TMP6:%.*]] = bitcast <32 x i8> [[TMP4]] to <4 x i64>
21
+ ; CHECK-NEXT: store <4 x i64> [[TMP5]], <4 x i64>* bitcast ([100 x i64]* @__msan_retval_tls to <4 x i64>*), align 8
22
+ ; ORIGIN-NEXT: store i32 [[TMP1]], i32* @__msan_retval_origin_tls, align 4
23
+ ; CHECK: ret <4 x i64> [[TMP6]]
27
24
;
28
25
entry:
29
26
%0 = bitcast <4 x i64 > %a to <32 x i8 >
@@ -37,18 +34,15 @@ define <4 x i64> @test_mm256_abs_epi16(<4 x i64> %a) local_unnamed_addr #0 {
37
34
; CHECK-NEXT: entry:
38
35
; CHECK-NEXT: [[TMP0:%.*]] = load <4 x i64>, <4 x i64>* bitcast ([100 x i64]* @__msan_param_tls to <4 x i64>*), align 8
39
36
; ORIGIN-NEXT: [[TMP1:%.*]] = load i32, i32* getelementptr inbounds ([200 x i32], [200 x i32]* @__msan_param_origin_tls, i32 0, i32 0), align 4
40
- ; CHECK: [[TMP2:%.*]] = bitcast <4 x i64> [[TMP0]] to <16 x i16>
37
+ ; CHECK: call void @llvm.donothing()
38
+ ; CHECK-NEXT: [[TMP2:%.*]] = bitcast <4 x i64> [[TMP0]] to <16 x i16>
41
39
; CHECK-NEXT: [[TMP3:%.*]] = bitcast <4 x i64> [[A:%.*]] to <16 x i16>
42
- ; CHECK-NEXT: [[TMP4:%.*]] = bitcast <16 x i16> [[TMP2]] to i256
43
- ; CHECK-NEXT: [[_MSCMP:%.*]] = icmp ne i256 [[TMP4]], 0
44
- ; CHECK-NEXT: br i1 [[_MSCMP]], label [[TMP5:%.*]], label [[TMP6:%.*]], !prof !2
45
- ; CHECK: call void @__msan_warning_with_origin_noreturn
46
- ; CHECK: unreachable
47
- ; CHECK: [[TMP7:%.*]] = tail call <16 x i16> @llvm.abs.v16i16(<16 x i16> [[TMP3]], i1 false)
48
- ; CHECK-NEXT: [[TMP8:%.*]] = bitcast <16 x i16> [[TMP7]] to <4 x i64>
49
- ; CHECK-NEXT: store <4 x i64> zeroinitializer, <4 x i64>* bitcast ([100 x i64]* @__msan_retval_tls to <4 x i64>*), align 8
50
- ; ORIGIN-NEXT: store i32 0, i32* @__msan_retval_origin_tls, align 4
51
- ; CHECK: ret <4 x i64> [[TMP8]]
40
+ ; CHECK-NEXT: [[TMP4:%.*]] = tail call <16 x i16> @llvm.abs.v16i16(<16 x i16> [[TMP3]], i1 false)
41
+ ; CHECK-NEXT: [[TMP5:%.*]] = bitcast <16 x i16> [[TMP2]] to <4 x i64>
42
+ ; CHECK-NEXT: [[TMP6:%.*]] = bitcast <16 x i16> [[TMP4]] to <4 x i64>
43
+ ; CHECK-NEXT: store <4 x i64> [[TMP5]], <4 x i64>* bitcast ([100 x i64]* @__msan_retval_tls to <4 x i64>*), align 8
44
+ ; ORIGIN-NEXT: store i32 [[TMP1]], i32* @__msan_retval_origin_tls, align 4
45
+ ; CHECK: ret <4 x i64> [[TMP6]]
52
46
;
53
47
entry:
54
48
%0 = bitcast <4 x i64 > %a to <16 x i16 >
@@ -62,18 +56,15 @@ define <4 x i64> @test_mm256_abs_epi32(<4 x i64> %a) local_unnamed_addr #0 {
62
56
; CHECK-NEXT: entry:
63
57
; CHECK-NEXT: [[TMP0:%.*]] = load <4 x i64>, <4 x i64>* bitcast ([100 x i64]* @__msan_param_tls to <4 x i64>*), align 8
64
58
; ORIGIN-NEXT: [[TMP1:%.*]] = load i32, i32* getelementptr inbounds ([200 x i32], [200 x i32]* @__msan_param_origin_tls, i32 0, i32 0), align 4
65
- ; CHECK: [[TMP2:%.*]] = bitcast <4 x i64> [[TMP0]] to <8 x i32>
59
+ ; CHECK: call void @llvm.donothing()
60
+ ; CHECK-NEXT: [[TMP2:%.*]] = bitcast <4 x i64> [[TMP0]] to <8 x i32>
66
61
; CHECK-NEXT: [[TMP3:%.*]] = bitcast <4 x i64> [[A:%.*]] to <8 x i32>
67
- ; CHECK-NEXT: [[TMP4:%.*]] = bitcast <8 x i32> [[TMP2]] to i256
68
- ; CHECK-NEXT: [[_MSCMP:%.*]] = icmp ne i256 [[TMP4]], 0
69
- ; CHECK-NEXT: br i1 [[_MSCMP]], label [[TMP5:%.*]], label [[TMP6:%.*]], !prof !2
70
- ; CHECK: call void @__msan_warning_with_origin_noreturn
71
- ; CHECK: unreachable
72
- ; CHECK: [[TMP7:%.*]] = tail call <8 x i32> @llvm.abs.v8i32(<8 x i32> [[TMP3]], i1 false)
73
- ; CHECK-NEXT: [[TMP8:%.*]] = bitcast <8 x i32> [[TMP7]] to <4 x i64>
74
- ; CHECK-NEXT: store <4 x i64> zeroinitializer, <4 x i64>* bitcast ([100 x i64]* @__msan_retval_tls to <4 x i64>*), align 8
75
- ; ORIGIN-NEXT: store i32 0, i32* @__msan_retval_origin_tls, align 4
76
- ; CHECK: ret <4 x i64> [[TMP8]]
62
+ ; CHECK-NEXT: [[TMP4:%.*]] = tail call <8 x i32> @llvm.abs.v8i32(<8 x i32> [[TMP3]], i1 false)
63
+ ; CHECK-NEXT: [[TMP5:%.*]] = bitcast <8 x i32> [[TMP2]] to <4 x i64>
64
+ ; CHECK-NEXT: [[TMP6:%.*]] = bitcast <8 x i32> [[TMP4]] to <4 x i64>
65
+ ; CHECK-NEXT: store <4 x i64> [[TMP5]], <4 x i64>* bitcast ([100 x i64]* @__msan_retval_tls to <4 x i64>*), align 8
66
+ ; ORIGIN-NEXT: store i32 [[TMP1]], i32* @__msan_retval_origin_tls, align 4
67
+ ; CHECK: ret <4 x i64> [[TMP6]]
77
68
;
78
69
entry:
79
70
%0 = bitcast <4 x i64 > %a to <8 x i32 >
0 commit comments