@@ -45,21 +45,17 @@ define void @store_i32_stride6_vf2(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
4545; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
4646; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
4747; AVX-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
48- ; AVX-NEXT: vmovlhps {{.*#+}} xmm2 = xmm1[0],xmm0[0]
48+ ; AVX-NEXT: vmovlhps {{.*#+}} xmm0 = xmm1[0],xmm0[0]
49+ ; AVX-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
50+ ; AVX-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
51+ ; AVX-NEXT: vmovlhps {{.*#+}} xmm1 = xmm2[0],xmm1[0]
52+ ; AVX-NEXT: vmovsd {{.*#+}} xmm2 = mem[0],zero
4953; AVX-NEXT: vmovsd {{.*#+}} xmm3 = mem[0],zero
50- ; AVX-NEXT: vmovsd {{.*#+}} xmm4 = mem[0],zero
51- ; AVX-NEXT: vmovlhps {{.*#+}} xmm3 = xmm4[0],xmm3[0]
52- ; AVX-NEXT: vmovsd {{.*#+}} xmm4 = mem[0],zero
53- ; AVX-NEXT: vmovsd {{.*#+}} xmm5 = mem[0],zero
54- ; AVX-NEXT: vmovlhps {{.*#+}} xmm4 = xmm5[0],xmm4[0]
55- ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2
56- ; AVX-NEXT: vpermilps {{.*#+}} ymm2 = ymm2[u,u,0,2,u,u,5,7]
57- ; AVX-NEXT: vunpcklps {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
58- ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1],ymm2[2,3,4,5,6,7]
59- ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm4[0,2,2,3]
60- ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm1
61- ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5],ymm0[6,7]
62- ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm3[1,3],xmm4[1,3]
54+ ; AVX-NEXT: vmovlhps {{.*#+}} xmm2 = xmm3[0],xmm2[0]
55+ ; AVX-NEXT: vshufps {{.*#+}} xmm3 = xmm2[0,2],xmm0[1,3]
56+ ; AVX-NEXT: vshufps {{.*#+}} xmm0 = xmm0[0,2],xmm1[0,2]
57+ ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
58+ ; AVX-NEXT: vshufps {{.*#+}} xmm1 = xmm1[1,3],xmm2[1,3]
6359; AVX-NEXT: vmovaps %xmm1, 32(%rax)
6460; AVX-NEXT: vmovaps %ymm0, (%rax)
6561; AVX-NEXT: vzeroupper
@@ -363,39 +359,36 @@ define void @store_i32_stride6_vf4(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
363359; AVX: # %bb.0:
364360; AVX-NEXT: movq {{[0-9]+}}(%rsp), %rax
365361; AVX-NEXT: vmovaps (%rdi), %xmm0
366- ; AVX-NEXT: vmovaps (%rsi), %xmm2
367- ; AVX-NEXT: vmovaps (%rdx), %xmm1
362+ ; AVX-NEXT: vmovaps (%rsi), %xmm1
363+ ; AVX-NEXT: vmovaps (%rdx), %xmm2
368364; AVX-NEXT: vmovaps (%rcx), %xmm3
369365; AVX-NEXT: vmovaps (%r8), %xmm4
370366; AVX-NEXT: vmovaps (%r9), %xmm5
371- ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm6
367+ ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm6
368+ ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm7
369+ ; AVX-NEXT: vunpckhpd {{.*#+}} ymm6 = ymm6[1],ymm7[1],ymm6[3],ymm7[3]
372370; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm7
373- ; AVX-NEXT: vinsertf128 $1, %xmm5, %ymm4, %ymm8
374- ; AVX-NEXT: vinsertf128 $1, %xmm4, %ymm5, %ymm9
375- ; AVX-NEXT: vunpcklps {{.*#+}} ymm10 = ymm9[0],ymm8[0],ymm9[1],ymm8[1],ymm9[4],ymm8[4],ymm9[5],ymm8[5]
376- ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm2, %ymm11
377- ; AVX-NEXT: vunpcklpd {{.*#+}} ymm12 = ymm6[0],ymm11[0],ymm6[2],ymm11[2]
378- ; AVX-NEXT: vshufps {{.*#+}} ymm12 = ymm12[0,2,3,1,4,6,7,5]
379- ; AVX-NEXT: vshufps {{.*#+}} xmm13 = xmm3[0,0],xmm1[0,0]
380- ; AVX-NEXT: vshufps {{.*#+}} xmm13 = xmm13[0,1,2,0]
381- ; AVX-NEXT: vblendps {{.*#+}} ymm12 = ymm12[0,1],ymm13[2,3],ymm12[4,5,6,7]
382- ; AVX-NEXT: vblendps {{.*#+}} ymm10 = ymm12[0,1,2,3],ymm10[4,5],ymm12[6,7]
383- ; AVX-NEXT: vunpckhps {{.*#+}} ymm6 = ymm11[2],ymm6[2],ymm11[3],ymm6[3],ymm11[6],ymm6[6],ymm11[7],ymm6[7]
384- ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm3, %ymm11
385- ; AVX-NEXT: vshufps {{.*#+}} ymm7 = ymm7[1,2],ymm11[1,2],ymm7[5,6],ymm11[5,6]
386- ; AVX-NEXT: vshufps {{.*#+}} ymm7 = ymm7[0,2,3,1,4,6,7,5]
387- ; AVX-NEXT: vblendps {{.*#+}} ymm6 = ymm7[0,1,2,3],ymm6[4,5],ymm7[6,7]
388- ; AVX-NEXT: vunpcklps {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
389- ; AVX-NEXT: vblendps {{.*#+}} ymm4 = ymm6[0,1],ymm4[2,3],ymm6[4,5,6,7]
390- ; AVX-NEXT: vunpckhpd {{.*#+}} ymm5 = ymm8[1],ymm9[1],ymm8[3],ymm9[3]
391- ; AVX-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
392- ; AVX-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0
393- ; AVX-NEXT: vshufps {{.*#+}} ymm0 = ymm0[3,3],ymm2[3,3],ymm0[7,7],ymm2[7,7]
394- ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm5[0],ymm0[1],ymm5[2],ymm0[3,4],ymm5[5],ymm0[6],ymm5[7]
395- ; AVX-NEXT: vpermilps {{.*#+}} ymm0 = ymm0[0,2,1,3,4,6,7,5]
396- ; AVX-NEXT: vmovaps %ymm0, 64(%rax)
397- ; AVX-NEXT: vmovaps %ymm4, 32(%rax)
398- ; AVX-NEXT: vmovaps %ymm10, (%rax)
371+ ; AVX-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm8
372+ ; AVX-NEXT: vshufps {{.*#+}} ymm7 = ymm8[3,3],ymm7[3,3],ymm8[7,7],ymm7[7,7]
373+ ; AVX-NEXT: vblendps {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2],ymm7[3,4],ymm6[5],ymm7[6],ymm6[7]
374+ ; AVX-NEXT: vpermilps {{.*#+}} ymm6 = ymm6[0,2,1,3,4,6,7,5]
375+ ; AVX-NEXT: vunpcklps {{.*#+}} xmm7 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
376+ ; AVX-NEXT: vunpcklps {{.*#+}} xmm8 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
377+ ; AVX-NEXT: vblendps {{.*#+}} xmm7 = xmm7[0,1],xmm8[2,3]
378+ ; AVX-NEXT: vmovlhps {{.*#+}} xmm8 = xmm3[0],xmm2[0]
379+ ; AVX-NEXT: vunpcklps {{.*#+}} xmm9 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
380+ ; AVX-NEXT: vshufps {{.*#+}} xmm8 = xmm9[0,1],xmm8[2,0]
381+ ; AVX-NEXT: vinsertf128 $1, %xmm7, %ymm8, %ymm7
382+ ; AVX-NEXT: vunpckhps {{.*#+}} xmm0 = xmm0[2],xmm1[2],xmm0[3],xmm1[3]
383+ ; AVX-NEXT: vinsertps {{.*#+}} xmm1 = zero,zero,xmm2[2],xmm3[2]
384+ ; AVX-NEXT: vblendps {{.*#+}} xmm0 = xmm0[0,1],xmm1[2,3]
385+ ; AVX-NEXT: vinsertps {{.*#+}} xmm1 = xmm2[1],xmm3[1],zero,zero
386+ ; AVX-NEXT: vunpcklps {{.*#+}} xmm2 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
387+ ; AVX-NEXT: vblendps {{.*#+}} xmm1 = xmm1[0,1],xmm2[2,3]
388+ ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
389+ ; AVX-NEXT: vmovaps %ymm0, 32(%rax)
390+ ; AVX-NEXT: vmovaps %ymm7, (%rax)
391+ ; AVX-NEXT: vmovaps %ymm6, 64(%rax)
399392; AVX-NEXT: vzeroupper
400393; AVX-NEXT: retq
401394;
0 commit comments