Skip to content

Commit 069f0fe

Browse files
authored
[X86] canCreateUndefOrPoisonForTargetNode - SSE PINSR/PEXTR vector element insert/extract are never out of bounds (#149822)
The immediate index is guaranteed to be treated as modulo
1 parent d8adb57 commit 069f0fe

File tree

2 files changed

+50
-46
lines changed

2 files changed

+50
-46
lines changed

llvm/lib/Target/X86/X86ISelLowering.cpp

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -45098,27 +45098,34 @@ bool X86TargetLowering::canCreateUndefOrPoisonForTargetNode(
4509845098
bool PoisonOnly, bool ConsiderFlags, unsigned Depth) const {
4509945099

4510045100
switch (Op.getOpcode()) {
45101+
// SSE vector insert/extracts use modulo indices.
45102+
case X86ISD::PINSRB:
45103+
case X86ISD::PINSRW:
45104+
case X86ISD::PEXTRB:
45105+
case X86ISD::PEXTRW:
45106+
return false;
4510145107
// SSE vector multiplies are either inbounds or saturate.
4510245108
case X86ISD::VPMADDUBSW:
4510345109
case X86ISD::VPMADDWD:
45110+
return false;
4510445111
// SSE vector shifts handle out of bounds shift amounts.
4510545112
case X86ISD::VSHLI:
4510645113
case X86ISD::VSRLI:
4510745114
case X86ISD::VSRAI:
4510845115
return false;
45109-
// SSE blends.
45116+
// SSE blends.
4511045117
case X86ISD::BLENDI:
4511145118
case X86ISD::BLENDV:
4511245119
return false;
45113-
// SSE target shuffles.
45120+
// SSE target shuffles.
4511445121
case X86ISD::PSHUFD:
4511545122
case X86ISD::UNPCKL:
4511645123
case X86ISD::UNPCKH:
4511745124
case X86ISD::VPERMILPI:
4511845125
case X86ISD::VPERMV3:
4511945126
return false;
45120-
// SSE comparisons handle all icmp/fcmp cases.
45121-
// TODO: Add CMPM/MM with test coverage.
45127+
// SSE comparisons handle all icmp/fcmp cases.
45128+
// TODO: Add CMPM/MM with test coverage.
4512245129
case X86ISD::CMPP:
4512345130
case X86ISD::PCMPEQ:
4512445131
case X86ISD::PCMPGT:

llvm/test/CodeGen/X86/avg.ll

Lines changed: 39 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1829,73 +1829,70 @@ define void @not_avg_v16i8_wide_constants(ptr %a, ptr %b) nounwind {
18291829
; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm3 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero,mem[4],zero,mem[5],zero,mem[6],zero,mem[7],zero
18301830
; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm0 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero,mem[4],zero,mem[5],zero,mem[6],zero,mem[7],zero
18311831
; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm1 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero,mem[4],zero,mem[5],zero,mem[6],zero,mem[7],zero
1832-
; AVX1-NEXT: vpxor %xmm4, %xmm4, %xmm4
1833-
; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
1834-
; AVX1-NEXT: vpextrd $2, %xmm5, %ecx
1835-
; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
1836-
; AVX1-NEXT: vpextrd $2, %xmm4, %eax
18371832
; AVX1-NEXT: vpextrw $3, %xmm3, %edx
1833+
; AVX1-NEXT: vpextrw $2, %xmm3, %ecx
1834+
; AVX1-NEXT: vpextrw $1, %xmm3, %eax
18381835
; AVX1-NEXT: decl %edx
18391836
; AVX1-NEXT: vmovd %edx, %xmm4
1840-
; AVX1-NEXT: vpextrw $2, %xmm3, %edx
1841-
; AVX1-NEXT: decl %edx
1842-
; AVX1-NEXT: vmovd %edx, %xmm5
1843-
; AVX1-NEXT: vpextrw $1, %xmm3, %edx
1844-
; AVX1-NEXT: decl %edx
1845-
; AVX1-NEXT: vmovd %edx, %xmm6
18461837
; AVX1-NEXT: vpextrw $0, %xmm3, %edx
1838+
; AVX1-NEXT: decl %ecx
1839+
; AVX1-NEXT: vmovd %ecx, %xmm5
1840+
; AVX1-NEXT: vpextrw $3, %xmm2, %ecx
1841+
; AVX1-NEXT: decl %eax
1842+
; AVX1-NEXT: vmovd %eax, %xmm6
1843+
; AVX1-NEXT: vpextrw $2, %xmm2, %eax
18471844
; AVX1-NEXT: decl %edx
18481845
; AVX1-NEXT: vmovd %edx, %xmm7
1849-
; AVX1-NEXT: vpextrw $3, %xmm2, %edx
1850-
; AVX1-NEXT: decl %edx
1851-
; AVX1-NEXT: vmovd %edx, %xmm8
1852-
; AVX1-NEXT: vpextrw $2, %xmm2, %edx
1853-
; AVX1-NEXT: decl %edx
1854-
; AVX1-NEXT: vmovd %edx, %xmm9
18551846
; AVX1-NEXT: vpextrw $1, %xmm2, %edx
1847+
; AVX1-NEXT: decl %ecx
1848+
; AVX1-NEXT: vmovd %ecx, %xmm8
1849+
; AVX1-NEXT: vpextrw $0, %xmm2, %ecx
1850+
; AVX1-NEXT: decl %eax
1851+
; AVX1-NEXT: vmovd %eax, %xmm9
1852+
; AVX1-NEXT: vpextrw $7, %xmm3, %eax
18561853
; AVX1-NEXT: decl %edx
18571854
; AVX1-NEXT: vmovd %edx, %xmm10
1858-
; AVX1-NEXT: vpextrw $0, %xmm2, %edx
1859-
; AVX1-NEXT: decl %edx
1860-
; AVX1-NEXT: vmovd %edx, %xmm11
1861-
; AVX1-NEXT: vpextrw $5, %xmm3, %edx
1862-
; AVX1-NEXT: decl %edx
1863-
; AVX1-NEXT: vmovd %edx, %xmm12
1864-
; AVX1-NEXT: vpextrw $4, %xmm3, %edx
1855+
; AVX1-NEXT: vpextrw $6, %xmm3, %edx
1856+
; AVX1-NEXT: decl %ecx
1857+
; AVX1-NEXT: vmovd %ecx, %xmm11
1858+
; AVX1-NEXT: vpextrw $7, %xmm2, %ecx
1859+
; AVX1-NEXT: decl %eax
1860+
; AVX1-NEXT: vmovd %eax, %xmm12
1861+
; AVX1-NEXT: vpextrw $6, %xmm2, %eax
18651862
; AVX1-NEXT: decl %edx
18661863
; AVX1-NEXT: vmovd %edx, %xmm13
1867-
; AVX1-NEXT: vpextrw $5, %xmm2, %edx
1868-
; AVX1-NEXT: decl %edx
1869-
; AVX1-NEXT: vmovd %edx, %xmm14
1870-
; AVX1-NEXT: vpextrw $4, %xmm2, %edx
1864+
; AVX1-NEXT: vpextrw $5, %xmm3, %edx
1865+
; AVX1-NEXT: decl %ecx
1866+
; AVX1-NEXT: vmovd %ecx, %xmm14
1867+
; AVX1-NEXT: vpextrw $4, %xmm3, %ecx
1868+
; AVX1-NEXT: decl %eax
1869+
; AVX1-NEXT: vmovd %eax, %xmm3
1870+
; AVX1-NEXT: vpextrw $5, %xmm2, %eax
18711871
; AVX1-NEXT: decl %edx
18721872
; AVX1-NEXT: vmovd %edx, %xmm15
1873-
; AVX1-NEXT: vpextrw $7, %xmm3, %edx
1873+
; AVX1-NEXT: vpextrw $4, %xmm2, %edx
18741874
; AVX1-NEXT: decl %ecx
1875-
; AVX1-NEXT: vmovd %ecx, %xmm3
1876-
; AVX1-NEXT: vpextrw $7, %xmm2, %ecx
1877-
; AVX1-NEXT: decl %edx
1878-
; AVX1-NEXT: vmovd %edx, %xmm2
1875+
; AVX1-NEXT: vmovd %ecx, %xmm2
18791876
; AVX1-NEXT: decl %eax
18801877
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
18811878
; AVX1-NEXT: vmovd %eax, %xmm5
1882-
; AVX1-NEXT: decl %ecx
1879+
; AVX1-NEXT: decl %edx
18831880
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
1884-
; AVX1-NEXT: vmovd %ecx, %xmm7
1881+
; AVX1-NEXT: vmovd %edx, %xmm7
18851882
; AVX1-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm6[0],xmm4[0],xmm6[1],xmm4[1]
18861883
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
18871884
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
18881885
; AVX1-NEXT: vpunpckldq {{.*#+}} xmm6 = xmm8[0],xmm6[0],xmm8[1],xmm6[1]
18891886
; AVX1-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm4
18901887
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
1891-
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
1892-
; AVX1-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm6
1893-
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
1894-
; AVX1-NEXT: vmovddup {{.*#+}} ymm3 = ymm6[0,0,2,2]
1895-
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1],xmm5[2],xmm7[2],xmm5[3],xmm7[3]
1888+
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm14[0],xmm3[1],xmm14[1],xmm3[2],xmm14[2],xmm3[3],xmm14[3]
1889+
; AVX1-NEXT: vinsertf128 $1, %xmm6, %ymm3, %ymm3
1890+
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm15[0],xmm2[1],xmm15[1],xmm2[2],xmm15[2],xmm2[3],xmm15[3]
1891+
; AVX1-NEXT: vshufps {{.*#+}} ymm3 = ymm3[0,0,0,0,4,4,4,4]
1892+
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
18961893
; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm5, %ymm2
1897-
; AVX1-NEXT: vshufps {{.*#+}} ymm2 = ymm2[0,0,0,0,4,4,4,4]
1898-
; AVX1-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
1894+
; AVX1-NEXT: vmovddup {{.*#+}} ymm2 = ymm2[0,0,2,2]
1895+
; AVX1-NEXT: vblendps {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3],ymm2[4,5,6],ymm3[7]
18991896
; AVX1-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
19001897
; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
19011898
; AVX1-NEXT: vandps %ymm0, %ymm2, %ymm1

0 commit comments

Comments
 (0)