@@ -303,7 +303,7 @@ define <8 x half> @test14(half %x) {
303303; X64-LABEL: test14:
304304; X64: # %bb.0:
305305; X64-NEXT: vxorps %xmm1, %xmm1, %xmm1
306- ; X64-NEXT: vmovsh % xmm0, %xmm1, % xmm0
306+ ; X64-NEXT: vmovsh {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
307307; X64-NEXT: retq
308308;
309309; X86-LABEL: test14:
@@ -318,7 +318,7 @@ define <16 x half> @test14b(half %x) {
318318; X64VL-LABEL: test14b:
319319; X64VL: # %bb.0:
320320; X64VL-NEXT: vxorps %xmm1, %xmm1, %xmm1
321- ; X64VL-NEXT: vmovsh % xmm0, %xmm1, % xmm0
321+ ; X64VL-NEXT: vmovsh {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
322322; X64VL-NEXT: retq
323323;
324324; X86-LABEL: test14b:
@@ -329,7 +329,7 @@ define <16 x half> @test14b(half %x) {
329329; X64-NOVL-LABEL: test14b:
330330; X64-NOVL: # %bb.0:
331331; X64-NOVL-NEXT: vxorps %xmm1, %xmm1, %xmm1
332- ; X64-NOVL-NEXT: vmovsh % xmm0, %xmm1, % xmm0
332+ ; X64-NOVL-NEXT: vmovsh {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
333333; X64-NOVL-NEXT: vxorps %xmm1, %xmm1, %xmm1
334334; X64-NOVL-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
335335; X64-NOVL-NEXT: retq
@@ -341,7 +341,7 @@ define <32 x half> @test14c(half %x) {
341341; X64VL-LABEL: test14c:
342342; X64VL: # %bb.0:
343343; X64VL-NEXT: vxorps %xmm1, %xmm1, %xmm1
344- ; X64VL-NEXT: vmovsh % xmm0, %xmm1, % xmm0
344+ ; X64VL-NEXT: vmovsh {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
345345; X64VL-NEXT: retq
346346;
347347; X86-LABEL: test14c:
@@ -352,7 +352,7 @@ define <32 x half> @test14c(half %x) {
352352; X64-NOVL-LABEL: test14c:
353353; X64-NOVL: # %bb.0:
354354; X64-NOVL-NEXT: vxorps %xmm1, %xmm1, %xmm1
355- ; X64-NOVL-NEXT: vmovsh % xmm0, %xmm1, % xmm0
355+ ; X64-NOVL-NEXT: vmovsh {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
356356; X64-NOVL-NEXT: vxorps %xmm1, %xmm1, %xmm1
357357; X64-NOVL-NEXT: vinsertf32x4 $0, %xmm0, %zmm1, %zmm0
358358; X64-NOVL-NEXT: retq
@@ -1464,21 +1464,21 @@ define <8 x half> @movsh(<8 x half> %a, <8 x half> %b) {
14641464; X64VL-LABEL: movsh:
14651465; X64VL: # %bb.0:
14661466; X64VL-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,14,15,0,1,2,3,4,5,6,7,14,15,10,11]
1467- ; X64VL-NEXT: vmovsh % xmm0, %xmm1, % xmm0
1467+ ; X64VL-NEXT: vmovsh {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
14681468; X64VL-NEXT: vaddph %xmm0, %xmm2, %xmm0
14691469; X64VL-NEXT: retq
14701470;
14711471; X86-LABEL: movsh:
14721472; X86: # %bb.0:
14731473; X86-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,14,15,0,1,2,3,4,5,6,7,14,15,10,11]
1474- ; X86-NEXT: vmovsh % xmm0, %xmm1, % xmm0
1474+ ; X86-NEXT: vmovsh {{.*#+}} xmm0 = xmm0[0],xmm1[1,2,3,4,5,6,7]
14751475; X86-NEXT: vaddph %xmm0, %xmm2, %xmm0
14761476; X86-NEXT: retl
14771477;
14781478; X64-NOVL-LABEL: movsh:
14791479; X64-NOVL: # %bb.0:
14801480; X64-NOVL-NEXT: vpshufb {{.*#+}} xmm2 = xmm0[0,1,14,15,0,1,2,3,4,5,6,7,14,15,10,11]
1481- ; X64-NOVL-NEXT: vmovsh % xmm0, % xmm1, %xmm3
1481+ ; X64-NOVL-NEXT: vmovsh {{.*#+}} xmm3 = xmm0[0], xmm1[1,2,3,4,5,6,7]
14821482; X64-NOVL-NEXT: vpsrldq {{.*#+}} xmm4 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
14831483; X64-NOVL-NEXT: vpsrldq {{.*#+}} xmm5 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
14841484; X64-NOVL-NEXT: vaddsh %xmm4, %xmm5, %xmm4
@@ -2311,7 +2311,7 @@ define <8 x half> @test21(half %a, half %b, half %c) nounwind {
23112311; X64-LABEL: test21:
23122312; X64: # %bb.0:
23132313; X64-NEXT: vxorps %xmm3, %xmm3, %xmm3
2314- ; X64-NEXT: vmovsh % xmm2, %xmm3, % xmm2
2314+ ; X64-NEXT: vmovsh {{.*#+}} xmm2 = xmm2[0],xmm3[1,2,3,4,5,6,7]
23152315; X64-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
23162316; X64-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0],xmm2[0],zero,zero
23172317; X64-NEXT: retq
@@ -2427,7 +2427,7 @@ define <16 x i32> @pr52561(<16 x i32> %a, <16 x i32> %b) "min-legal-vector-width
24272427; X64VL-NEXT: vpaddd %ymm2, %ymm1, %ymm1
24282428; X64VL-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm1
24292429; X64VL-NEXT: vpxor %xmm2, %xmm2, %xmm2
2430- ; X64VL-NEXT: vmovsh % xmm0, %xmm2, % xmm0
2430+ ; X64VL-NEXT: vmovsh {{.*#+}} xmm0 = xmm0[0],xmm2[1,2,3,4,5,6,7]
24312431; X64VL-NEXT: retq
24322432;
24332433; X86-LABEL: pr52561:
@@ -2443,7 +2443,7 @@ define <16 x i32> @pr52561(<16 x i32> %a, <16 x i32> %b) "min-legal-vector-width
24432443; X86-NEXT: vpaddd %ymm2, %ymm1, %ymm1
24442444; X86-NEXT: vpand {{\.?LCPI[0-9]+_[0-9]+}}, %ymm1, %ymm1
24452445; X86-NEXT: vpxor %xmm2, %xmm2, %xmm2
2446- ; X86-NEXT: vmovsh % xmm0, %xmm2, % xmm0
2446+ ; X86-NEXT: vmovsh {{.*#+}} xmm0 = xmm0[0],xmm2[1,2,3,4,5,6,7]
24472447; X86-NEXT: movl %ebp, %esp
24482448; X86-NEXT: popl %ebp
24492449; X86-NEXT: retl
@@ -2474,7 +2474,7 @@ define <8 x i16> @pr59628_xmm(i16 %arg) {
24742474; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
24752475; X86-NEXT: vxorps %xmm0, %xmm0, %xmm0
24762476; X86-NEXT: vpbroadcastw %eax, %xmm1
2477- ; X86-NEXT: vmovsh %xmm1, % xmm0, % xmm0
2477+ ; X86-NEXT: vmovsh {{.*#+}} xmm0 = xmm1[0], xmm0[1,2,3,4,5,6,7]
24782478; X86-NEXT: vpcmpneqw {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1, %k1
24792479; X86-NEXT: vmovdqu16 %xmm0, %xmm0 {%k1} {z}
24802480; X86-NEXT: retl
0 commit comments