|
403 | 403 | (decl amode_to_synthetic_amode (Amode) SyntheticAmode)
|
404 | 404 | (extern constructor amode_to_synthetic_amode amode_to_synthetic_amode)
|
405 | 405 |
|
| 406 | +(decl synthetic_amode_slot (i32) SyntheticAmode) |
| 407 | +(extern constructor synthetic_amode_slot synthetic_amode_slot) |
| 408 | + |
| 409 | +;; Helper for loads/stores to/from stackslots. |
| 410 | +(decl stackslot_amode (StackSlot Offset32 Offset32) SyntheticAmode) |
| 411 | +(rule (stackslot_amode slot offset1 offset2) |
| 412 | + (let ((slot_offset i32 (abi_stackslot_offset_into_slot_region slot offset1 offset2))) |
| 413 | + (synthetic_amode_slot slot_offset))) |
| 414 | + |
406 | 415 | ;; An `Amode` represents a possible addressing mode that can be used
|
407 | 416 | ;; in instructions. These denote a 64-bit value only.
|
408 | 417 | (type Amode (enum
|
|
494 | 503 | (provide (= result (concat flags (bvadd val (sign_ext 64 offset)))))
|
495 | 504 | (require
|
496 | 505 | (= (widthof val) 64)))
|
497 |
| -(decl to_amode (MemFlags Value Offset32) Amode) |
| 506 | +(decl to_amode (MemFlags Value Offset32) SyntheticAmode) |
498 | 507 | (rule 0 (to_amode flags base offset)
|
499 | 508 | (amode_imm_reg flags base offset))
|
500 | 509 | (rule 1 (to_amode flags (iadd x y) offset)
|
501 | 510 | (to_amode_add flags x y offset))
|
502 | 511 |
|
| 512 | +(rule 2 |
| 513 | + (to_amode flags (stack_addr slot offset1) offset2) |
| 514 | + (stackslot_amode slot offset1 offset2)) |
| 515 | + |
503 | 516 | ;; Same as `to_amode`, except that the base address is computed via the addition
|
504 | 517 | ;; of the two `Value` arguments provided.
|
505 | 518 | ;;
|
|
572 | 585 |
|
573 | 586 | ;; Offsetting an Amode. Used when we need to do consecutive
|
574 | 587 | ;; loads/stores to adjacent addresses.
|
575 |
| -(decl amode_offset (Amode i32) Amode) |
| 588 | +(decl amode_offset (SyntheticAmode i32) SyntheticAmode) |
576 | 589 | (extern constructor amode_offset amode_offset)
|
577 | 590 |
|
578 | 591 | ;; Return a zero offset as an `Offset32`.
|
|
1368 | 1381 | (rule 0 (x64_load (multi_lane _bits _lanes) addr _ext_kind)
|
1369 | 1382 | (x64_movdqu_load addr))
|
1370 | 1383 |
|
1371 |
| -(decl x64_mov (Amode) Reg) |
| 1384 | +(decl x64_mov (SyntheticAmode) Reg) |
1372 | 1385 | (spec (x64_mov addr)
|
1373 | 1386 | (provide (= result (conv_to 64 (load_effect (extract 79 64 addr) 64 (extract 63 0 addr))))))
|
1374 | 1387 | (rule (x64_mov addr) (x64_movq_rm addr))
|
|
2701 | 2714 | (decl x64_pextrb (Xmm u8) Gpr)
|
2702 | 2715 | (rule (x64_pextrb src lane) (x64_pextrb_a_or_avx src lane))
|
2703 | 2716 |
|
2704 |
| -(decl x64_pextrb_store (Amode Xmm u8) SideEffectNoResult) |
| 2717 | +(decl x64_pextrb_store (SyntheticAmode Xmm u8) SideEffectNoResult) |
2705 | 2718 | (rule (x64_pextrb_store addr src lane) (x64_pextrb_a_mem_or_avx addr src lane))
|
2706 | 2719 |
|
2707 | 2720 | (decl x64_pextrw (Xmm u8) Gpr)
|
2708 | 2721 | (rule (x64_pextrw src lane) (x64_pextrw_a_or_avx src lane))
|
2709 | 2722 |
|
2710 |
| -(decl x64_pextrw_store (Amode Xmm u8) SideEffectNoResult) |
| 2723 | +(decl x64_pextrw_store (SyntheticAmode Xmm u8) SideEffectNoResult) |
2711 | 2724 | (rule (x64_pextrw_store addr src lane) (x64_pextrw_b_mem_or_avx addr src lane))
|
2712 | 2725 |
|
2713 | 2726 | (decl x64_pextrd (Xmm u8) Gpr)
|
2714 | 2727 | (rule (x64_pextrd src lane) (x64_pextrd_a_or_avx src lane))
|
2715 | 2728 |
|
2716 |
| -(decl x64_pextrd_store (Amode Xmm u8) SideEffectNoResult) |
| 2729 | +(decl x64_pextrd_store (SyntheticAmode Xmm u8) SideEffectNoResult) |
2717 | 2730 | (rule (x64_pextrd_store addr src lane) (x64_pextrd_a_mem_or_avx addr src lane))
|
2718 | 2731 |
|
2719 | 2732 | (decl x64_pextrq (Xmm u8) Gpr)
|
2720 | 2733 | (rule (x64_pextrq src lane) (x64_pextrq_a_or_avx src lane))
|
2721 | 2734 |
|
2722 |
| -(decl x64_pextrq_store (Amode Xmm u8) SideEffectNoResult) |
| 2735 | +(decl x64_pextrq_store (SyntheticAmode Xmm u8) SideEffectNoResult) |
2723 | 2736 | (rule (x64_pextrq_store addr src lane) (x64_pextrq_a_mem_or_avx addr src lane))
|
2724 | 2737 |
|
2725 | 2738 | ;; Helper for creating `pmovmskb` instructions.
|
|
3181 | 3194 | )
|
3182 | 3195 | (x64_por low_gt_and_high_eq high_halves_gt)))
|
3183 | 3196 |
|
3184 |
| -(decl x64_add_mem (Type Amode Value) SideEffectNoResult) |
| 3197 | +(decl x64_add_mem (Type SyntheticAmode Value) SideEffectNoResult) |
3185 | 3198 | (spec (x64_add_mem ty addr val)
|
3186 | 3199 | (provide (= result (store_effect
|
3187 | 3200 | (extract 79 64 addr)
|
|
3207 | 3220 | (rule 2 (x64_add_mem $I32 addr (i8_from_iconst val)) (x64_addl_mi_sxb_mem addr val))
|
3208 | 3221 | (rule 2 (x64_add_mem $I64 addr (i8_from_iconst val)) (x64_addq_mi_sxb_mem addr val))
|
3209 | 3222 |
|
3210 |
| -(decl x64_sub_mem (Type Amode Value) SideEffectNoResult) |
| 3223 | +(decl x64_sub_mem (Type SyntheticAmode Value) SideEffectNoResult) |
3211 | 3224 |
|
3212 | 3225 | ;; `sub mem, reg`
|
3213 | 3226 | (rule 0 (x64_sub_mem $I8 addr val) (x64_subb_mr_mem addr val))
|
|
3223 | 3236 | (rule 2 (x64_sub_mem $I32 addr (i8_from_iconst val)) (x64_subl_mi_sxb_mem addr val))
|
3224 | 3237 | (rule 2 (x64_sub_mem $I64 addr (i8_from_iconst val)) (x64_subq_mi_sxb_mem addr val))
|
3225 | 3238 |
|
3226 |
| -(decl x64_and_mem (Type Amode Value) SideEffectNoResult) |
| 3239 | +(decl x64_and_mem (Type SyntheticAmode Value) SideEffectNoResult) |
3227 | 3240 |
|
3228 | 3241 | ;; `and mem, imm`
|
3229 | 3242 | (rule (x64_and_mem $I8 addr val) (x64_andb_mr_mem addr val))
|
|
3241 | 3254 | (rule 2 (x64_and_mem $I32 addr (i8_from_iconst val)) (x64_andl_mi_sxb_mem addr val))
|
3242 | 3255 | (rule 2 (x64_and_mem $I64 addr (i8_from_iconst val)) (x64_andq_mi_sxb_mem addr val))
|
3243 | 3256 |
|
3244 |
| -(decl x64_or_mem (Type Amode Value) SideEffectNoResult) |
| 3257 | +(decl x64_or_mem (Type SyntheticAmode Value) SideEffectNoResult) |
3245 | 3258 |
|
3246 | 3259 | ;; `or mem, reg`
|
3247 | 3260 | (rule 0 (x64_or_mem $I8 addr val) (x64_orb_mr_mem addr val))
|
|
3259 | 3272 | (rule 2 (x64_or_mem $I32 addr (i8_from_iconst val)) (x64_orl_mi_sxb_mem addr val))
|
3260 | 3273 | (rule 2 (x64_or_mem $I64 addr (i8_from_iconst val)) (x64_orq_mi_sxb_mem addr val))
|
3261 | 3274 |
|
3262 |
| -(decl x64_xor_mem (Type Amode Value) SideEffectNoResult) |
| 3275 | +(decl x64_xor_mem (Type SyntheticAmode Value) SideEffectNoResult) |
3263 | 3276 |
|
3264 | 3277 | ;; `xor mem, reg`
|
3265 | 3278 | (rule 0 (x64_xor_mem $I8 addr val) (x64_xorb_mr_mem addr val))
|
|
3644 | 3657 | (rule (x64_xchg $I32 addr operand) (x64_xchgl_rm operand addr))
|
3645 | 3658 | (rule (x64_xchg $I64 addr operand) (x64_xchgq_rm operand addr))
|
3646 | 3659 |
|
3647 |
| -(decl x64_lock_add (OperandSize Amode Gpr) SideEffectNoResult) |
| 3660 | +(decl x64_lock_add (OperandSize SyntheticAmode Gpr) SideEffectNoResult) |
3648 | 3661 | (rule (x64_lock_add (OperandSize.Size8) addr reg) (x64_lock_addb_mr_mem addr reg))
|
3649 | 3662 | (rule (x64_lock_add (OperandSize.Size16) addr reg) (x64_lock_addw_mr_mem addr reg))
|
3650 | 3663 | (rule (x64_lock_add (OperandSize.Size32) addr reg) (x64_lock_addl_mr_mem addr reg))
|
3651 | 3664 | (rule (x64_lock_add (OperandSize.Size64) addr reg) (x64_lock_addq_mr_mem addr reg))
|
3652 | 3665 |
|
3653 |
| -(decl x64_lock_sub (OperandSize Amode Gpr) SideEffectNoResult) |
| 3666 | +(decl x64_lock_sub (OperandSize SyntheticAmode Gpr) SideEffectNoResult) |
3654 | 3667 | (rule (x64_lock_sub (OperandSize.Size8) addr reg) (x64_lock_subb_mr_mem addr reg))
|
3655 | 3668 | (rule (x64_lock_sub (OperandSize.Size16) addr reg) (x64_lock_subw_mr_mem addr reg))
|
3656 | 3669 | (rule (x64_lock_sub (OperandSize.Size32) addr reg) (x64_lock_subl_mr_mem addr reg))
|
3657 | 3670 | (rule (x64_lock_sub (OperandSize.Size64) addr reg) (x64_lock_subq_mr_mem addr reg))
|
3658 | 3671 |
|
3659 |
| -(decl x64_lock_and (OperandSize Amode Gpr) SideEffectNoResult) |
| 3672 | +(decl x64_lock_and (OperandSize SyntheticAmode Gpr) SideEffectNoResult) |
3660 | 3673 | (rule (x64_lock_and (OperandSize.Size8) addr reg) (x64_lock_andb_mr_mem addr reg))
|
3661 | 3674 | (rule (x64_lock_and (OperandSize.Size16) addr reg) (x64_lock_andw_mr_mem addr reg))
|
3662 | 3675 | (rule (x64_lock_and (OperandSize.Size32) addr reg) (x64_lock_andl_mr_mem addr reg))
|
3663 | 3676 | (rule (x64_lock_and (OperandSize.Size64) addr reg) (x64_lock_andq_mr_mem addr reg))
|
3664 | 3677 |
|
3665 |
| -(decl x64_lock_or (OperandSize Amode Gpr) SideEffectNoResult) |
| 3678 | +(decl x64_lock_or (OperandSize SyntheticAmode Gpr) SideEffectNoResult) |
3666 | 3679 | (rule (x64_lock_or (OperandSize.Size8) addr reg) (x64_lock_orb_mr_mem addr reg))
|
3667 | 3680 | (rule (x64_lock_or (OperandSize.Size16) addr reg) (x64_lock_orw_mr_mem addr reg))
|
3668 | 3681 | (rule (x64_lock_or (OperandSize.Size32) addr reg) (x64_lock_orl_mr_mem addr reg))
|
3669 | 3682 | (rule (x64_lock_or (OperandSize.Size64) addr reg) (x64_lock_orq_mr_mem addr reg))
|
3670 | 3683 |
|
3671 |
| -(decl x64_lock_xor (OperandSize Amode Gpr) SideEffectNoResult) |
| 3684 | +(decl x64_lock_xor (OperandSize SyntheticAmode Gpr) SideEffectNoResult) |
3672 | 3685 | (rule (x64_lock_xor (OperandSize.Size8) addr reg) (x64_lock_xorb_mr_mem addr reg))
|
3673 | 3686 | (rule (x64_lock_xor (OperandSize.Size16) addr reg) (x64_lock_xorw_mr_mem addr reg))
|
3674 | 3687 | (rule (x64_lock_xor (OperandSize.Size32) addr reg) (x64_lock_xorl_mr_mem addr reg))
|
|
0 commit comments