@@ -435,8 +435,8 @@ define { i129, i1 } @smul_ovf(i129 %x, i129 %y) nounwind {
435
435
; X86-NEXT: movl %edx, %esi
436
436
; X86-NEXT: movl %eax, %ebp
437
437
; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
438
- ; X86-NEXT: movl {{[0-9]+}}(%esp) , %eax
439
- ; X86-NEXT: mull %ecx
438
+ ; X86-NEXT: movl %ecx , %eax
439
+ ; X86-NEXT: mull {{[0-9]+}}(%esp)
440
440
; X86-NEXT: movl %eax, %ebx
441
441
; X86-NEXT: movl %eax, %ecx
442
442
; X86-NEXT: movl %eax, {{[-0-9]+}}(%e{{[sb]}}p) # 4-byte Spill
@@ -822,167 +822,169 @@ define { i129, i1 } @smul_ovf(i129 %x, i129 %y) nounwind {
822
822
; X64-NEXT: pushq %rbx
823
823
; X64-NEXT: movq %r9, %r15
824
824
; X64-NEXT: movq %rcx, %r9
825
- ; X64-NEXT: movq %rdx, %r14
825
+ ; X64-NEXT: movq %rdx, %r10
826
826
; X64-NEXT: movq %rsi, %r12
827
827
; X64-NEXT: movq %rdi, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
828
828
; X64-NEXT: movq {{[0-9]+}}(%rsp), %r11
829
829
; X64-NEXT: andl $1, %r11d
830
830
; X64-NEXT: negq %r11
831
831
; X64-NEXT: andl $1, %r9d
832
832
; X64-NEXT: negq %r9
833
- ; X64-NEXT: movq %r9 , %rax
834
- ; X64-NEXT: mulq %r8
833
+ ; X64-NEXT: movq %r8 , %rax
834
+ ; X64-NEXT: mulq %r9
835
835
; X64-NEXT: movq %rdx, %rcx
836
- ; X64-NEXT: movq %rax, %rbp
837
836
; X64-NEXT: movq %rax, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
837
+ ; X64-NEXT: movq %rax, %rdi
838
838
; X64-NEXT: movq %rdx, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
839
- ; X64-NEXT: addq %rdx, %rbp
839
+ ; X64-NEXT: addq %rdx, %rdi
840
840
; X64-NEXT: adcq $0, %rcx
841
- ; X64-NEXT: movq %r9 , %rax
842
- ; X64-NEXT: mulq %r15
841
+ ; X64-NEXT: movq %r15 , %rax
842
+ ; X64-NEXT: mulq %r9
843
843
; X64-NEXT: movq %rdx, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
844
844
; X64-NEXT: movq %rax, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
845
- ; X64-NEXT: addq %rax, %rbp
845
+ ; X64-NEXT: addq %rax, %rdi
846
846
; X64-NEXT: adcq %rdx, %rcx
847
847
; X64-NEXT: setb %sil
848
- ; X64-NEXT: movzbl %sil, %edi
848
+ ; X64-NEXT: movzbl %sil, %r14d
849
849
; X64-NEXT: addq %rax, %rcx
850
- ; X64-NEXT: adcq %rdx, %rdi
850
+ ; X64-NEXT: adcq %rdx, %r14
851
851
; X64-NEXT: movq %r12, %rax
852
852
; X64-NEXT: mulq %r8
853
- ; X64-NEXT: movq %rdx, %r10
853
+ ; X64-NEXT: movq %rdx, %rbx
854
854
; X64-NEXT: movq %rax, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
855
- ; X64-NEXT: movq %r14 , %rax
855
+ ; X64-NEXT: movq %r10 , %rax
856
856
; X64-NEXT: mulq %r8
857
- ; X64-NEXT: movq %rdx, %rbx
858
- ; X64-NEXT: movq %rax, %r13
859
- ; X64-NEXT: addq %r10 , %r13
860
- ; X64-NEXT: adcq $0, %rbx
857
+ ; X64-NEXT: movq %rdx, %r13
858
+ ; X64-NEXT: movq %rax, %rbp
859
+ ; X64-NEXT: addq %rbx , %rbp
860
+ ; X64-NEXT: adcq $0, %r13
861
861
; X64-NEXT: movq %r12, %rax
862
862
; X64-NEXT: mulq %r15
863
863
; X64-NEXT: movq %rdx, %rsi
864
- ; X64-NEXT: addq %r13 , %rax
864
+ ; X64-NEXT: addq %rbp , %rax
865
865
; X64-NEXT: movq %rax, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
866
- ; X64-NEXT: adcq %rbx , %rsi
866
+ ; X64-NEXT: adcq %r13 , %rsi
867
867
; X64-NEXT: setb %r8b
868
- ; X64-NEXT: movq %r14 , %rax
868
+ ; X64-NEXT: movq %r10 , %rax
869
869
; X64-NEXT: mulq %r15
870
870
; X64-NEXT: movq %rdx, %rbx
871
871
; X64-NEXT: addq %rsi, %rax
872
872
; X64-NEXT: movzbl %r8b, %edx
873
873
; X64-NEXT: adcq %rdx, %rbx
874
874
; X64-NEXT: addq {{[-0-9]+}}(%r{{[sb]}}p), %rax # 8-byte Folded Reload
875
875
; X64-NEXT: movq %rax, %rsi
876
- ; X64-NEXT: adcq %rbp , %rbx
876
+ ; X64-NEXT: adcq %rdi , %rbx
877
877
; X64-NEXT: adcq $0, %rcx
878
- ; X64-NEXT: adcq $0, %rdi
878
+ ; X64-NEXT: adcq $0, %r14
879
879
; X64-NEXT: movq %r11, %rax
880
880
; X64-NEXT: mulq %r12
881
881
; X64-NEXT: movq %rdx, %r13
882
- ; X64-NEXT: movq %rax, %r15
883
- ; X64-NEXT: movq %r11, %rax
884
- ; X64-NEXT: mulq %r14
885
- ; X64-NEXT: movq %rax, %r14
886
882
; X64-NEXT: movq %rax, %r8
883
+ ; X64-NEXT: movq %r11, %rax
884
+ ; X64-NEXT: mulq %r10
885
+ ; X64-NEXT: movq %rax, %r15
886
+ ; X64-NEXT: movq %rax, %rdi
887
887
; X64-NEXT: movq %rax, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
888
- ; X64-NEXT: addq %r13, %r14
888
+ ; X64-NEXT: addq %r13, %r15
889
889
; X64-NEXT: movq %rdx, %rbp
890
890
; X64-NEXT: movq %rdx, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
891
891
; X64-NEXT: adcq $0, %rbp
892
- ; X64-NEXT: addq %r15 , %r14
892
+ ; X64-NEXT: addq %r8 , %r15
893
893
; X64-NEXT: adcq %r13, %rbp
894
894
; X64-NEXT: setb %al
895
- ; X64-NEXT: addq %r8 , %rbp
895
+ ; X64-NEXT: addq %rdi , %rbp
896
896
; X64-NEXT: movzbl %al, %r12d
897
897
; X64-NEXT: adcq %rdx, %r12
898
- ; X64-NEXT: addq %r15, %rsi
898
+ ; X64-NEXT: addq %r8, %rsi
899
+ ; X64-NEXT: movq %r8, %r10
900
+ ; X64-NEXT: movq %r8, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
899
901
; X64-NEXT: movq %rsi, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
900
- ; X64-NEXT: adcq %rbx, %r14
902
+ ; X64-NEXT: adcq %rbx, %r15
901
903
; X64-NEXT: adcq $0, %rbp
902
904
; X64-NEXT: adcq $0, %r12
903
905
; X64-NEXT: addq %rcx, %rbp
904
- ; X64-NEXT: adcq %rdi , %r12
906
+ ; X64-NEXT: adcq %r14 , %r12
905
907
; X64-NEXT: setb %cl
906
908
; X64-NEXT: movq %r9, %rax
907
909
; X64-NEXT: mulq %r11
908
- ; X64-NEXT: movq %rax, %r10
909
- ; X64-NEXT: addq %rdx, %r10
910
- ; X64-NEXT: movq %rdx, %rdi
911
- ; X64-NEXT: adcq $0, %rdi
912
- ; X64-NEXT: addq %rax, %r10
913
- ; X64-NEXT: adcq %rdx, %rdi
914
- ; X64-NEXT: setb %bl
915
- ; X64-NEXT: addq %rax, %rdi
916
- ; X64-NEXT: movzbl %bl , %esi
917
- ; X64-NEXT: adcq %rdx, %rsi
910
+ ; X64-NEXT: movq %rax, %r8
911
+ ; X64-NEXT: addq %rdx, %r8
912
+ ; X64-NEXT: movq %rdx, %rbx
913
+ ; X64-NEXT: adcq $0, %rbx
914
+ ; X64-NEXT: addq %rax, %r8
915
+ ; X64-NEXT: adcq %rdx, %rbx
916
+ ; X64-NEXT: setb %r14b
917
+ ; X64-NEXT: addq %rax, %rbx
918
+ ; X64-NEXT: movzbl %r14b , %r14d
919
+ ; X64-NEXT: adcq %rdx, %r14
918
920
; X64-NEXT: addq %rax, %rbp
919
- ; X64-NEXT: adcq %r12, %r10
921
+ ; X64-NEXT: adcq %r12, %r8
920
922
; X64-NEXT: movzbl %cl, %eax
921
- ; X64-NEXT: adcq %rax, %rdi
922
- ; X64-NEXT: adcq $0, %rsi
923
- ; X64-NEXT: movq %rsi, {{[-0-9]+}}(%r{{[sb]}}p) # 8-byte Spill
923
+ ; X64-NEXT: adcq %rax, %rbx
924
+ ; X64-NEXT: adcq $0, %r14
924
925
; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
925
- ; X64-NEXT: movq %rsi, %r8
926
+ ; X64-NEXT: movq %rsi, %rdi
926
927
; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rax # 8-byte Reload
927
- ; X64-NEXT: addq %rax, %r8
928
+ ; X64-NEXT: addq %rax, %rdi
928
929
; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rdx # 8-byte Reload
929
930
; X64-NEXT: movq %rdx, %rcx
930
931
; X64-NEXT: adcq $0, %rcx
931
- ; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rbx # 8-byte Reload
932
- ; X64-NEXT: addq %rbx , %r8
932
+ ; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %r12 # 8-byte Reload
933
+ ; X64-NEXT: addq %r12 , %rdi
933
934
; X64-NEXT: adcq %rax, %rcx
934
935
; X64-NEXT: setb %al
935
936
; X64-NEXT: addq %rsi, %rcx
936
937
; X64-NEXT: movzbl %al, %esi
937
938
; X64-NEXT: adcq %rdx, %rsi
938
939
; X64-NEXT: movq %r9, %rax
939
940
; X64-NEXT: imulq %r11
940
- ; X64-NEXT: movq %rbx , %r11
941
+ ; X64-NEXT: movq %r12 , %r11
941
942
; X64-NEXT: addq %rax, %r11
942
- ; X64-NEXT: movq %r8 , %r12
943
+ ; X64-NEXT: movq %rdi , %r12
943
944
; X64-NEXT: adcq %rdx, %r12
944
945
; X64-NEXT: addq %rcx, %r11
945
946
; X64-NEXT: adcq %rsi, %r12
946
- ; X64-NEXT: movq %r15 , %r9
947
+ ; X64-NEXT: movq %r10 , %r9
947
948
; X64-NEXT: addq %r13, %r9
948
949
; X64-NEXT: adcq $0, %r13
949
950
; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rcx # 8-byte Reload
950
951
; X64-NEXT: addq %rcx, %r9
951
952
; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
952
953
; X64-NEXT: adcq %rsi, %r13
953
- ; X64-NEXT: setb %bl
954
+ ; X64-NEXT: setb %r10b
954
955
; X64-NEXT: addq %rcx, %r13
955
- ; X64-NEXT: movzbl %bl , %ecx
956
+ ; X64-NEXT: movzbl %r10b , %ecx
956
957
; X64-NEXT: adcq %rsi, %rcx
957
- ; X64-NEXT: addq %r15, %rax
958
+ ; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Reload
959
+ ; X64-NEXT: addq %rsi, %rax
958
960
; X64-NEXT: adcq %r9, %rdx
959
961
; X64-NEXT: addq %r13, %rax
960
962
; X64-NEXT: adcq %rcx, %rdx
961
- ; X64-NEXT: addq {{[-0-9]+}}(%r{{[sb]}}p), %r15 # 8-byte Folded Reload
962
- ; X64-NEXT: adcq %r8 , %r9
963
+ ; X64-NEXT: addq {{[-0-9]+}}(%r{{[sb]}}p), %rsi # 8-byte Folded Reload
964
+ ; X64-NEXT: adcq %rdi , %r9
963
965
; X64-NEXT: adcq %r11, %rax
964
966
; X64-NEXT: adcq %r12, %rdx
965
- ; X64-NEXT: addq %rbp, %r15
966
- ; X64-NEXT: adcq %r10 , %r9
967
- ; X64-NEXT: adcq %rdi , %rax
968
- ; X64-NEXT: adcq {{[-0-9]+}}(%r{{[sb]}}p) , %rdx # 8-byte Folded Reload
969
- ; X64-NEXT: movq %r14 , %rcx
967
+ ; X64-NEXT: addq %rbp, %rsi
968
+ ; X64-NEXT: adcq %r8 , %r9
969
+ ; X64-NEXT: adcq %rbx , %rax
970
+ ; X64-NEXT: adcq %r14 , %rdx
971
+ ; X64-NEXT: movq %r15 , %rcx
970
972
; X64-NEXT: sarq $63, %rcx
971
973
; X64-NEXT: xorq %rcx, %rdx
972
974
; X64-NEXT: xorq %rcx, %r9
973
975
; X64-NEXT: orq %rdx, %r9
974
976
; X64-NEXT: xorq %rcx, %rax
975
- ; X64-NEXT: xorq %r15 , %rcx
977
+ ; X64-NEXT: xorq %rsi , %rcx
976
978
; X64-NEXT: orq %rax, %rcx
977
979
; X64-NEXT: orq %r9, %rcx
978
980
; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rax # 8-byte Reload
979
981
; X64-NEXT: movl %eax, %esi
980
982
; X64-NEXT: andl $1, %esi
981
983
; X64-NEXT: movq %rsi, %rdx
982
984
; X64-NEXT: negq %rdx
983
- ; X64-NEXT: xorq %rdx, %r14
985
+ ; X64-NEXT: xorq %rdx, %r15
984
986
; X64-NEXT: xorq %rax, %rdx
985
- ; X64-NEXT: orq %r14 , %rdx
987
+ ; X64-NEXT: orq %r15 , %rdx
986
988
; X64-NEXT: orq %rcx, %rdx
987
989
; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rax # 8-byte Reload
988
990
; X64-NEXT: movq {{[-0-9]+}}(%r{{[sb]}}p), %rcx # 8-byte Reload
0 commit comments