Skip to content

Commit 9ba8f81

Browse files
committed
[X86] andnot-patterns.ll - add additional multiuse tests
1 parent b8996f8 commit 9ba8f81

File tree

1 file changed

+140
-3
lines changed

1 file changed

+140
-3
lines changed

llvm/test/CodeGen/X86/andnot-patterns.ll

Lines changed: 140 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
; TODO - PR112425 - attempt to reconstruct andnot patterns through bitwise-agnostic operations
88

99
declare void @use_i64(i64)
10+
declare void @use_i32(i32)
1011

1112
;
1213
; Fold (and X, (rotl (not Y), Z))) -> (and X, (not (rotl Y, Z)))
@@ -132,8 +133,8 @@ define i8 @andnot_rotl_i8(i8 %a0, i8 %a1, i8 %a2) nounwind {
132133
ret i8 %and
133134
}
134135

135-
define i64 @andnot_rotl_i64_multiuse(i64 %a0, i64 %a1, i64 %a2) nounwind {
136-
; X86-LABEL: andnot_rotl_i64_multiuse:
136+
define i64 @andnot_rotl_i64_multiuse_rot(i64 %a0, i64 %a1, i64 %a2) nounwind {
137+
; X86-LABEL: andnot_rotl_i64_multiuse_rot:
137138
; X86: # %bb.0:
138139
; X86-NEXT: pushl %ebx
139140
; X86-NEXT: pushl %edi
@@ -171,7 +172,7 @@ define i64 @andnot_rotl_i64_multiuse(i64 %a0, i64 %a1, i64 %a2) nounwind {
171172
; X86-NEXT: popl %ebx
172173
; X86-NEXT: retl
173174
;
174-
; X64-LABEL: andnot_rotl_i64_multiuse:
175+
; X64-LABEL: andnot_rotl_i64_multiuse_rot:
175176
; X64: # %bb.0:
176177
; X64-NEXT: pushq %rbx
177178
; X64-NEXT: movq %rdx, %rcx
@@ -316,6 +317,44 @@ define i8 @andnot_rotr_i8(i8 %a0, i8 %a1, i8 %a2) nounwind {
316317
ret i8 %and
317318
}
318319

320+
define i32 @andnot_rotr_i32_multiuse_not(i32 %a0, i32 %a1, i32 %a2) nounwind {
321+
; X86-LABEL: andnot_rotr_i32_multiuse_not:
322+
; X86: # %bb.0:
323+
; X86-NEXT: pushl %esi
324+
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx
325+
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
326+
; X86-NEXT: notl %eax
327+
; X86-NEXT: movl %eax, %esi
328+
; X86-NEXT: rorl %cl, %esi
329+
; X86-NEXT: andl {{[0-9]+}}(%esp), %esi
330+
; X86-NEXT: pushl %eax
331+
; X86-NEXT: calll use_i32@PLT
332+
; X86-NEXT: addl $4, %esp
333+
; X86-NEXT: movl %esi, %eax
334+
; X86-NEXT: popl %esi
335+
; X86-NEXT: retl
336+
;
337+
; X64-LABEL: andnot_rotr_i32_multiuse_not:
338+
; X64: # %bb.0:
339+
; X64-NEXT: pushq %rbx
340+
; X64-NEXT: movl %edx, %ecx
341+
; X64-NEXT: notl %esi
342+
; X64-NEXT: movl %esi, %ebx
343+
; X64-NEXT: # kill: def $cl killed $cl killed $ecx
344+
; X64-NEXT: rorl %cl, %ebx
345+
; X64-NEXT: andl %edi, %ebx
346+
; X64-NEXT: movl %esi, %edi
347+
; X64-NEXT: callq use_i32@PLT
348+
; X64-NEXT: movl %ebx, %eax
349+
; X64-NEXT: popq %rbx
350+
; X64-NEXT: retq
351+
%not = xor i32 %a1, -1
352+
%rot = tail call i32 @llvm.fshr.i32(i32 %not, i32 %not, i32 %a2)
353+
%and = and i32 %rot, %a0
354+
call void @use_i32(i32 %not)
355+
ret i32 %and
356+
}
357+
319358
;
320359
; Fold (and X, (bswap (not Y)))) -> (and X, (not (bswap Y)))
321360
;
@@ -421,6 +460,104 @@ define i16 @andnot_bswap_i16(i16 %a0, i16 %a1) nounwind {
421460
ret i16 %and
422461
}
423462

463+
define i32 @andnot_bswap_i32_multiuse_bswap(i32 %a0, i32 %a1) nounwind {
464+
; X86-LABEL: andnot_bswap_i32_multiuse_bswap:
465+
; X86: # %bb.0:
466+
; X86-NEXT: pushl %esi
467+
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
468+
; X86-NEXT: notl %eax
469+
; X86-NEXT: bswapl %eax
470+
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
471+
; X86-NEXT: andl %eax, %esi
472+
; X86-NEXT: pushl %eax
473+
; X86-NEXT: calll use_i32@PLT
474+
; X86-NEXT: addl $4, %esp
475+
; X86-NEXT: movl %esi, %eax
476+
; X86-NEXT: popl %esi
477+
; X86-NEXT: retl
478+
;
479+
; X64-LABEL: andnot_bswap_i32_multiuse_bswap:
480+
; X64: # %bb.0:
481+
; X64-NEXT: pushq %rbx
482+
; X64-NEXT: movl %edi, %ebx
483+
; X64-NEXT: notl %esi
484+
; X64-NEXT: bswapl %esi
485+
; X64-NEXT: andl %esi, %ebx
486+
; X64-NEXT: movl %esi, %edi
487+
; X64-NEXT: callq use_i32@PLT
488+
; X64-NEXT: movl %ebx, %eax
489+
; X64-NEXT: popq %rbx
490+
; X64-NEXT: retq
491+
%not = xor i32 %a1, -1
492+
%bswap = tail call i32 @llvm.bswap.i32(i32 %not)
493+
%and = and i32 %bswap, %a0
494+
call void @use_i32(i32 %bswap)
495+
ret i32 %and
496+
}
497+
498+
define i32 @andnot_bswap_i32_multiuse_not(i32 %a0, i32 %a1) nounwind {
499+
; X86-NOBMI-LABEL: andnot_bswap_i32_multiuse_not:
500+
; X86-NOBMI: # %bb.0:
501+
; X86-NOBMI-NEXT: pushl %esi
502+
; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax
503+
; X86-NOBMI-NEXT: notl %eax
504+
; X86-NOBMI-NEXT: movl %eax, %esi
505+
; X86-NOBMI-NEXT: bswapl %esi
506+
; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %esi
507+
; X86-NOBMI-NEXT: pushl %eax
508+
; X86-NOBMI-NEXT: calll use_i32@PLT
509+
; X86-NOBMI-NEXT: addl $4, %esp
510+
; X86-NOBMI-NEXT: movl %esi, %eax
511+
; X86-NOBMI-NEXT: popl %esi
512+
; X86-NOBMI-NEXT: retl
513+
;
514+
; X86-BMI-LABEL: andnot_bswap_i32_multiuse_not:
515+
; X86-BMI: # %bb.0:
516+
; X86-BMI-NEXT: pushl %esi
517+
; X86-BMI-NEXT: movl {{[0-9]+}}(%esp), %eax
518+
; X86-BMI-NEXT: movl %eax, %ecx
519+
; X86-BMI-NEXT: notl %ecx
520+
; X86-BMI-NEXT: bswapl %eax
521+
; X86-BMI-NEXT: andnl {{[0-9]+}}(%esp), %eax, %esi
522+
; X86-BMI-NEXT: pushl %ecx
523+
; X86-BMI-NEXT: calll use_i32@PLT
524+
; X86-BMI-NEXT: addl $4, %esp
525+
; X86-BMI-NEXT: movl %esi, %eax
526+
; X86-BMI-NEXT: popl %esi
527+
; X86-BMI-NEXT: retl
528+
;
529+
; X64-NOBMI-LABEL: andnot_bswap_i32_multiuse_not:
530+
; X64-NOBMI: # %bb.0:
531+
; X64-NOBMI-NEXT: pushq %rbx
532+
; X64-NOBMI-NEXT: notl %esi
533+
; X64-NOBMI-NEXT: movl %esi, %ebx
534+
; X64-NOBMI-NEXT: bswapl %ebx
535+
; X64-NOBMI-NEXT: andl %edi, %ebx
536+
; X64-NOBMI-NEXT: movl %esi, %edi
537+
; X64-NOBMI-NEXT: callq use_i32@PLT
538+
; X64-NOBMI-NEXT: movl %ebx, %eax
539+
; X64-NOBMI-NEXT: popq %rbx
540+
; X64-NOBMI-NEXT: retq
541+
;
542+
; X64-BMI-LABEL: andnot_bswap_i32_multiuse_not:
543+
; X64-BMI: # %bb.0:
544+
; X64-BMI-NEXT: pushq %rbx
545+
; X64-BMI-NEXT: movl %esi, %eax
546+
; X64-BMI-NEXT: notl %eax
547+
; X64-BMI-NEXT: bswapl %esi
548+
; X64-BMI-NEXT: andnl %edi, %esi, %ebx
549+
; X64-BMI-NEXT: movl %eax, %edi
550+
; X64-BMI-NEXT: callq use_i32@PLT
551+
; X64-BMI-NEXT: movl %ebx, %eax
552+
; X64-BMI-NEXT: popq %rbx
553+
; X64-BMI-NEXT: retq
554+
%not = xor i32 %a1, -1
555+
%bswap = tail call i32 @llvm.bswap.i32(i32 %not)
556+
%and = and i32 %bswap, %a0
557+
call void @use_i32(i32 %not)
558+
ret i32 %and
559+
}
560+
424561
;
425562
; Fold (and X, (bitreverse (not Y)))) -> (and X, (not (bitreverse Y)))
426563
;

0 commit comments

Comments
 (0)