@@ -27,7 +27,7 @@ define i32 @mul_4xi8_zc(<4 x i8> %a, i32 %c) {
2727; AVXVNNI-LABEL: mul_4xi8_zc:
2828; AVXVNNI: # %bb.0: # %entry
2929; AVXVNNI-NEXT: vpxor %xmm1, %xmm1, %xmm1
30- ; AVXVNNI-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1 ],xmm1[2,3,4,5,6,7 ]
30+ ; AVXVNNI-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1, 2,3]
3131; AVXVNNI-NEXT: {vex} vpdpbusd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1
3232; AVXVNNI-NEXT: vmovd %xmm1, %eax
3333; AVXVNNI-NEXT: addl %edi, %eax
@@ -36,7 +36,7 @@ define i32 @mul_4xi8_zc(<4 x i8> %a, i32 %c) {
3636; AVX512VNNI-LABEL: mul_4xi8_zc:
3737; AVX512VNNI: # %bb.0: # %entry
3838; AVX512VNNI-NEXT: vpxor %xmm1, %xmm1, %xmm1
39- ; AVX512VNNI-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1 ],xmm1[2,3,4,5,6,7 ]
39+ ; AVX512VNNI-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1, 2,3]
4040; AVX512VNNI-NEXT: vpxor %xmm1, %xmm1, %xmm1
4141; AVX512VNNI-NEXT: vpdpbusd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
4242; AVX512VNNI-NEXT: vmovd %xmm1, %eax
@@ -47,7 +47,7 @@ define i32 @mul_4xi8_zc(<4 x i8> %a, i32 %c) {
4747; AVX512VLVNNI-LABEL: mul_4xi8_zc:
4848; AVX512VLVNNI: # %bb.0: # %entry
4949; AVX512VLVNNI-NEXT: vpxor %xmm1, %xmm1, %xmm1
50- ; AVX512VLVNNI-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1 ],xmm1[2,3,4,5,6,7 ]
50+ ; AVX512VLVNNI-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1, 2,3]
5151; AVX512VLVNNI-NEXT: vpxor %xmm1, %xmm1, %xmm1
5252; AVX512VLVNNI-NEXT: vpdpbusd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1
5353; AVX512VLVNNI-NEXT: vmovd %xmm1, %eax
@@ -67,7 +67,7 @@ define i32 @mul_4xi4_cz(<4 x i4> %a, i32 %c) {
6767; AVXVNNI-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,u,u,u,u,u,u,u,u,u,u,u,u]
6868; AVXVNNI-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
6969; AVXVNNI-NEXT: vpxor %xmm1, %xmm1, %xmm1
70- ; AVXVNNI-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1 ],xmm1[2,3,4,5,6,7 ]
70+ ; AVXVNNI-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1, 2,3]
7171; AVXVNNI-NEXT: {vex} vpdpbusd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1
7272; AVXVNNI-NEXT: vmovd %xmm1, %eax
7373; AVXVNNI-NEXT: addl %edi, %eax
@@ -78,7 +78,7 @@ define i32 @mul_4xi4_cz(<4 x i4> %a, i32 %c) {
7878; AVX512VNNI-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,u,u,u,u,u,u,u,u,u,u,u,u]
7979; AVX512VNNI-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
8080; AVX512VNNI-NEXT: vpxor %xmm1, %xmm1, %xmm1
81- ; AVX512VNNI-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1 ],xmm1[2,3,4,5,6,7 ]
81+ ; AVX512VNNI-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1, 2,3]
8282; AVX512VNNI-NEXT: vpxor %xmm1, %xmm1, %xmm1
8383; AVX512VNNI-NEXT: vpdpbusd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
8484; AVX512VNNI-NEXT: vmovd %xmm1, %eax
@@ -107,7 +107,7 @@ define i32 @mul_4xi8_cs(<4 x i8> %a, i32 %c) {
107107; AVXVNNI-LABEL: mul_4xi8_cs:
108108; AVXVNNI: # %bb.0: # %entry
109109; AVXVNNI-NEXT: vpxor %xmm1, %xmm1, %xmm1
110- ; AVXVNNI-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1 ],xmm1[2,3,4,5,6,7 ]
110+ ; AVXVNNI-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1, 2,3]
111111; AVXVNNI-NEXT: vmovd {{.*#+}} xmm2 = [16,1,2,255,0,0,0,0,0,0,0,0,0,0,0,0]
112112; AVXVNNI-NEXT: {vex} vpdpbusd %xmm0, %xmm2, %xmm1
113113; AVXVNNI-NEXT: vmovd %xmm1, %eax
@@ -117,7 +117,7 @@ define i32 @mul_4xi8_cs(<4 x i8> %a, i32 %c) {
117117; AVX512VNNI-LABEL: mul_4xi8_cs:
118118; AVX512VNNI: # %bb.0: # %entry
119119; AVX512VNNI-NEXT: vpxor %xmm1, %xmm1, %xmm1
120- ; AVX512VNNI-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1 ],xmm1[2,3,4,5,6,7 ]
120+ ; AVX512VNNI-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1, 2,3]
121121; AVX512VNNI-NEXT: vmovd {{.*#+}} xmm1 = [16,1,2,255,0,0,0,0,0,0,0,0,0,0,0,0]
122122; AVX512VNNI-NEXT: vpxor %xmm2, %xmm2, %xmm2
123123; AVX512VNNI-NEXT: vpdpbusd %zmm0, %zmm1, %zmm2
@@ -129,7 +129,7 @@ define i32 @mul_4xi8_cs(<4 x i8> %a, i32 %c) {
129129; AVX512VLVNNI-LABEL: mul_4xi8_cs:
130130; AVX512VLVNNI: # %bb.0: # %entry
131131; AVX512VLVNNI-NEXT: vpxor %xmm1, %xmm1, %xmm1
132- ; AVX512VLVNNI-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1 ],xmm1[2,3,4,5,6,7 ]
132+ ; AVX512VLVNNI-NEXT: vpblendd {{.*#+}} xmm0 = xmm0[0],xmm1[1, 2,3]
133133; AVX512VLVNNI-NEXT: vmovd {{.*#+}} xmm1 = [16,1,2,255,0,0,0,0,0,0,0,0,0,0,0,0]
134134; AVX512VLVNNI-NEXT: vpxor %xmm2, %xmm2, %xmm2
135135; AVX512VLVNNI-NEXT: vpdpbusd %xmm0, %xmm1, %xmm2
0 commit comments