@@ -13,6 +13,17 @@ define <16 x i8> @combineXorAeseZeroARM64(<16 x i8> %data, <16 x i8> %key) {
1313 ret <16 x i8 > %data.aes
1414}
1515
16+ define <16 x i8 > @combineXorAeseZeroLhsARM64 (<16 x i8 > %data , <16 x i8 > %key ) {
17+ ; CHECK-LABEL: define <16 x i8> @combineXorAeseZeroLhsARM64(
18+ ; CHECK-SAME: <16 x i8> [[DATA:%.*]], <16 x i8> [[KEY:%.*]]) {
19+ ; CHECK-NEXT: [[DATA_AES:%.*]] = tail call <16 x i8> @llvm.aarch64.crypto.aese(<16 x i8> [[DATA]], <16 x i8> [[KEY]])
20+ ; CHECK-NEXT: ret <16 x i8> [[DATA_AES]]
21+ ;
22+ %data.xor = xor <16 x i8 > %data , %key
23+ %data.aes = tail call <16 x i8 > @llvm.aarch64.crypto.aese (<16 x i8 > zeroinitializer , <16 x i8 > %data.xor )
24+ ret <16 x i8 > %data.aes
25+ }
26+
1627define <16 x i8 > @combineXorAeseNonZeroARM64 (<16 x i8 > %data , <16 x i8 > %key ) {
1728; CHECK-LABEL: define <16 x i8> @combineXorAeseNonZeroARM64(
1829; CHECK-SAME: <16 x i8> [[DATA:%.*]], <16 x i8> [[KEY:%.*]]) {
@@ -36,6 +47,17 @@ define <16 x i8> @combineXorAesdZeroARM64(<16 x i8> %data, <16 x i8> %key) {
3647 ret <16 x i8 > %data.aes
3748}
3849
50+ define <16 x i8 > @combineXorAesdZeroLhsARM64 (<16 x i8 > %data , <16 x i8 > %key ) {
51+ ; CHECK-LABEL: define <16 x i8> @combineXorAesdZeroLhsARM64(
52+ ; CHECK-SAME: <16 x i8> [[DATA:%.*]], <16 x i8> [[KEY:%.*]]) {
53+ ; CHECK-NEXT: [[DATA_AES:%.*]] = tail call <16 x i8> @llvm.aarch64.crypto.aesd(<16 x i8> [[DATA]], <16 x i8> [[KEY]])
54+ ; CHECK-NEXT: ret <16 x i8> [[DATA_AES]]
55+ ;
56+ %data.xor = xor <16 x i8 > %data , %key
57+ %data.aes = tail call <16 x i8 > @llvm.aarch64.crypto.aesd (<16 x i8 > zeroinitializer , <16 x i8 > %data.xor )
58+ ret <16 x i8 > %data.aes
59+ }
60+
3961define <16 x i8 > @combineXorAesdNonZeroARM64 (<16 x i8 > %data , <16 x i8 > %key ) {
4062; CHECK-LABEL: define <16 x i8> @combineXorAesdNonZeroARM64(
4163; CHECK-SAME: <16 x i8> [[DATA:%.*]], <16 x i8> [[KEY:%.*]]) {
@@ -51,3 +73,51 @@ define <16 x i8> @combineXorAesdNonZeroARM64(<16 x i8> %data, <16 x i8> %key) {
5173declare <16 x i8 > @llvm.aarch64.crypto.aese (<16 x i8 >, <16 x i8 >) #0
5274declare <16 x i8 > @llvm.aarch64.crypto.aesd (<16 x i8 >, <16 x i8 >) #0
5375
76+ ; SVE
77+
78+ define <vscale x 16 x i8 > @combineXorAeseZeroLhsSVE (<vscale x 16 x i8 > %data , <vscale x 16 x i8 > %key ) {
79+ ; CHECK-LABEL: define <vscale x 16 x i8> @combineXorAeseZeroLhsSVE(
80+ ; CHECK-SAME: <vscale x 16 x i8> [[DATA:%.*]], <vscale x 16 x i8> [[KEY:%.*]]) {
81+ ; CHECK-NEXT: [[DATA_AES:%.*]] = tail call <vscale x 16 x i8> @llvm.aarch64.sve.aese(<vscale x 16 x i8> [[DATA]], <vscale x 16 x i8> [[KEY]])
82+ ; CHECK-NEXT: ret <vscale x 16 x i8> [[DATA_AES]]
83+ ;
84+ %data.xor = xor <vscale x 16 x i8 > %data , %key
85+ %data.aes = tail call <vscale x 16 x i8 > @llvm.aarch64.sve.aese (<vscale x 16 x i8 > zeroinitializer , <vscale x 16 x i8 > %data.xor )
86+ ret <vscale x 16 x i8 > %data.aes
87+ }
88+
89+ define <vscale x 16 x i8 > @combineXorAeseZeroRhsSVE (<vscale x 16 x i8 > %data , <vscale x 16 x i8 > %key ) {
90+ ; CHECK-LABEL: define <vscale x 16 x i8> @combineXorAeseZeroRhsSVE(
91+ ; CHECK-SAME: <vscale x 16 x i8> [[DATA:%.*]], <vscale x 16 x i8> [[KEY:%.*]]) {
92+ ; CHECK-NEXT: [[DATA_AES:%.*]] = tail call <vscale x 16 x i8> @llvm.aarch64.sve.aese(<vscale x 16 x i8> [[DATA]], <vscale x 16 x i8> [[KEY]])
93+ ; CHECK-NEXT: ret <vscale x 16 x i8> [[DATA_AES]]
94+ ;
95+ %data.xor = xor <vscale x 16 x i8 > %data , %key
96+ %data.aes = tail call <vscale x 16 x i8 > @llvm.aarch64.sve.aese (<vscale x 16 x i8 > %data.xor , <vscale x 16 x i8 > zeroinitializer )
97+ ret <vscale x 16 x i8 > %data.aes
98+ }
99+
100+ define <vscale x 16 x i8 > @combineXorAesdZeroLhsSVE (<vscale x 16 x i8 > %data , <vscale x 16 x i8 > %key ) {
101+ ; CHECK-LABEL: define <vscale x 16 x i8> @combineXorAesdZeroLhsSVE(
102+ ; CHECK-SAME: <vscale x 16 x i8> [[DATA:%.*]], <vscale x 16 x i8> [[KEY:%.*]]) {
103+ ; CHECK-NEXT: [[DATA_AES:%.*]] = tail call <vscale x 16 x i8> @llvm.aarch64.sve.aesd(<vscale x 16 x i8> [[DATA]], <vscale x 16 x i8> [[KEY]])
104+ ; CHECK-NEXT: ret <vscale x 16 x i8> [[DATA_AES]]
105+ ;
106+ %data.xor = xor <vscale x 16 x i8 > %data , %key
107+ %data.aes = tail call <vscale x 16 x i8 > @llvm.aarch64.sve.aesd (<vscale x 16 x i8 > zeroinitializer , <vscale x 16 x i8 > %data.xor )
108+ ret <vscale x 16 x i8 > %data.aes
109+ }
110+
111+ define <vscale x 16 x i8 > @combineXorAesdZeroRhsSVE (<vscale x 16 x i8 > %data , <vscale x 16 x i8 > %key ) {
112+ ; CHECK-LABEL: define <vscale x 16 x i8> @combineXorAesdZeroRhsSVE(
113+ ; CHECK-SAME: <vscale x 16 x i8> [[DATA:%.*]], <vscale x 16 x i8> [[KEY:%.*]]) {
114+ ; CHECK-NEXT: [[DATA_AES:%.*]] = tail call <vscale x 16 x i8> @llvm.aarch64.sve.aesd(<vscale x 16 x i8> [[DATA]], <vscale x 16 x i8> [[KEY]])
115+ ; CHECK-NEXT: ret <vscale x 16 x i8> [[DATA_AES]]
116+ ;
117+ %data.xor = xor <vscale x 16 x i8 > %data , %key
118+ %data.aes = tail call <vscale x 16 x i8 > @llvm.aarch64.sve.aesd (<vscale x 16 x i8 > %data.xor , <vscale x 16 x i8 > zeroinitializer )
119+ ret <vscale x 16 x i8 > %data.aes
120+ }
121+
122+ declare <vscale x 16 x i8 > @llvm.aarch64.sve.aese (<vscale x 16 x i8 >, <vscale x 16 x i8 >) #0
123+ declare <vscale x 16 x i8 > @llvm.aarch64.sve.aesd (<vscale x 16 x i8 >, <vscale x 16 x i8 >) #0
0 commit comments