@@ -135,7 +135,9 @@ static inline void write_cr0(unsigned long x)
135
135
136
136
static inline unsigned long read_cr2 (void )
137
137
{
138
- return PVOP_CALLEE0 (unsigned long , mmu .read_cr2 );
138
+ return PVOP_ALT_CALLEE0 (unsigned long , mmu .read_cr2 ,
139
+ "mov %%cr2, %%rax;" ,
140
+ ALT_NOT (X86_FEATURE_XENPV ));
139
141
}
140
142
141
143
static inline void write_cr2 (unsigned long x )
@@ -145,12 +147,14 @@ static inline void write_cr2(unsigned long x)
145
147
146
148
static inline unsigned long __read_cr3 (void )
147
149
{
148
- return PVOP_CALL0 (unsigned long , mmu .read_cr3 );
150
+ return PVOP_ALT_CALL0 (unsigned long , mmu .read_cr3 ,
151
+ "mov %%cr3, %%rax;" , ALT_NOT (X86_FEATURE_XENPV ));
149
152
}
150
153
151
154
static inline void write_cr3 (unsigned long x )
152
155
{
153
- PVOP_VCALL1 (mmu .write_cr3 , x );
156
+ PVOP_ALT_VCALL1 (mmu .write_cr3 , x ,
157
+ "mov %%rdi, %%cr3" , ALT_NOT (X86_FEATURE_XENPV ));
154
158
}
155
159
156
160
static inline void __write_cr4 (unsigned long x )
@@ -170,7 +174,7 @@ static inline void halt(void)
170
174
171
175
static inline void wbinvd (void )
172
176
{
173
- PVOP_VCALL0 (cpu .wbinvd );
177
+ PVOP_ALT_VCALL0 (cpu .wbinvd , "wbinvd" , ALT_NOT ( X86_FEATURE_XENPV ) );
174
178
}
175
179
176
180
static inline u64 paravirt_read_msr (unsigned msr )
@@ -384,22 +388,28 @@ static inline void paravirt_release_p4d(unsigned long pfn)
384
388
385
389
static inline pte_t __pte (pteval_t val )
386
390
{
387
- return (pte_t ) { PVOP_CALLEE1 (pteval_t , mmu .make_pte , val ) };
391
+ return (pte_t ) { PVOP_ALT_CALLEE1 (pteval_t , mmu .make_pte , val ,
392
+ "mov %%rdi, %%rax" ,
393
+ ALT_NOT (X86_FEATURE_XENPV )) };
388
394
}
389
395
390
396
static inline pteval_t pte_val (pte_t pte )
391
397
{
392
- return PVOP_CALLEE1 (pteval_t , mmu .pte_val , pte .pte );
398
+ return PVOP_ALT_CALLEE1 (pteval_t , mmu .pte_val , pte .pte ,
399
+ "mov %%rdi, %%rax" , ALT_NOT (X86_FEATURE_XENPV ));
393
400
}
394
401
395
402
static inline pgd_t __pgd (pgdval_t val )
396
403
{
397
- return (pgd_t ) { PVOP_CALLEE1 (pgdval_t , mmu .make_pgd , val ) };
404
+ return (pgd_t ) { PVOP_ALT_CALLEE1 (pgdval_t , mmu .make_pgd , val ,
405
+ "mov %%rdi, %%rax" ,
406
+ ALT_NOT (X86_FEATURE_XENPV )) };
398
407
}
399
408
400
409
static inline pgdval_t pgd_val (pgd_t pgd )
401
410
{
402
- return PVOP_CALLEE1 (pgdval_t , mmu .pgd_val , pgd .pgd );
411
+ return PVOP_ALT_CALLEE1 (pgdval_t , mmu .pgd_val , pgd .pgd ,
412
+ "mov %%rdi, %%rax" , ALT_NOT (X86_FEATURE_XENPV ));
403
413
}
404
414
405
415
#define __HAVE_ARCH_PTEP_MODIFY_PROT_TRANSACTION
@@ -432,12 +442,15 @@ static inline void set_pmd(pmd_t *pmdp, pmd_t pmd)
432
442
433
443
static inline pmd_t __pmd (pmdval_t val )
434
444
{
435
- return (pmd_t ) { PVOP_CALLEE1 (pmdval_t , mmu .make_pmd , val ) };
445
+ return (pmd_t ) { PVOP_ALT_CALLEE1 (pmdval_t , mmu .make_pmd , val ,
446
+ "mov %%rdi, %%rax" ,
447
+ ALT_NOT (X86_FEATURE_XENPV )) };
436
448
}
437
449
438
450
static inline pmdval_t pmd_val (pmd_t pmd )
439
451
{
440
- return PVOP_CALLEE1 (pmdval_t , mmu .pmd_val , pmd .pmd );
452
+ return PVOP_ALT_CALLEE1 (pmdval_t , mmu .pmd_val , pmd .pmd ,
453
+ "mov %%rdi, %%rax" , ALT_NOT (X86_FEATURE_XENPV ));
441
454
}
442
455
443
456
static inline void set_pud (pud_t * pudp , pud_t pud )
@@ -449,14 +462,16 @@ static inline pud_t __pud(pudval_t val)
449
462
{
450
463
pudval_t ret ;
451
464
452
- ret = PVOP_CALLEE1 (pudval_t , mmu .make_pud , val );
465
+ ret = PVOP_ALT_CALLEE1 (pudval_t , mmu .make_pud , val ,
466
+ "mov %%rdi, %%rax" , ALT_NOT (X86_FEATURE_XENPV ));
453
467
454
468
return (pud_t ) { ret };
455
469
}
456
470
457
471
static inline pudval_t pud_val (pud_t pud )
458
472
{
459
- return PVOP_CALLEE1 (pudval_t , mmu .pud_val , pud .pud );
473
+ return PVOP_ALT_CALLEE1 (pudval_t , mmu .pud_val , pud .pud ,
474
+ "mov %%rdi, %%rax" , ALT_NOT (X86_FEATURE_XENPV ));
460
475
}
461
476
462
477
static inline void pud_clear (pud_t * pudp )
@@ -475,14 +490,17 @@ static inline void set_p4d(p4d_t *p4dp, p4d_t p4d)
475
490
476
491
static inline p4d_t __p4d (p4dval_t val )
477
492
{
478
- p4dval_t ret = PVOP_CALLEE1 (p4dval_t , mmu .make_p4d , val );
493
+ p4dval_t ret = PVOP_ALT_CALLEE1 (p4dval_t , mmu .make_p4d , val ,
494
+ "mov %%rdi, %%rax" ,
495
+ ALT_NOT (X86_FEATURE_XENPV ));
479
496
480
497
return (p4d_t ) { ret };
481
498
}
482
499
483
500
static inline p4dval_t p4d_val (p4d_t p4d )
484
501
{
485
- return PVOP_CALLEE1 (p4dval_t , mmu .p4d_val , p4d .p4d );
502
+ return PVOP_ALT_CALLEE1 (p4dval_t , mmu .p4d_val , p4d .p4d ,
503
+ "mov %%rdi, %%rax" , ALT_NOT (X86_FEATURE_XENPV ));
486
504
}
487
505
488
506
static inline void __set_pgd (pgd_t * pgdp , pgd_t pgd )
@@ -569,7 +587,9 @@ static __always_inline void pv_queued_spin_lock_slowpath(struct qspinlock *lock,
569
587
570
588
static __always_inline void pv_queued_spin_unlock (struct qspinlock * lock )
571
589
{
572
- PVOP_VCALLEE1 (lock .queued_spin_unlock , lock );
590
+ PVOP_ALT_VCALLEE1 (lock .queued_spin_unlock , lock ,
591
+ "movb $0, (%%" _ASM_ARG1 ");" ,
592
+ ALT_NOT (X86_FEATURE_PVUNLOCK ));
573
593
}
574
594
575
595
static __always_inline void pv_wait (u8 * ptr , u8 val )
@@ -584,7 +604,9 @@ static __always_inline void pv_kick(int cpu)
584
604
585
605
static __always_inline bool pv_vcpu_is_preempted (long cpu )
586
606
{
587
- return PVOP_CALLEE1 (bool , lock .vcpu_is_preempted , cpu );
607
+ return PVOP_ALT_CALLEE1 (bool , lock .vcpu_is_preempted , cpu ,
608
+ "xor %%" _ASM_AX ", %%" _ASM_AX ";" ,
609
+ ALT_NOT (X86_FEATURE_VCPUPREEMPT ));
588
610
}
589
611
590
612
void __raw_callee_save___native_queued_spin_unlock (struct qspinlock * lock );
@@ -658,17 +680,18 @@ bool __raw_callee_save___native_vcpu_is_preempted(long cpu);
658
680
#ifdef CONFIG_PARAVIRT_XXL
659
681
static inline notrace unsigned long arch_local_save_flags (void )
660
682
{
661
- return PVOP_CALLEE0 (unsigned long , irq .save_fl );
683
+ return PVOP_ALT_CALLEE0 (unsigned long , irq .save_fl , "pushf; pop %%rax;" ,
684
+ ALT_NOT (X86_FEATURE_XENPV ));
662
685
}
663
686
664
687
static inline notrace void arch_local_irq_disable (void )
665
688
{
666
- PVOP_VCALLEE0 (irq .irq_disable );
689
+ PVOP_ALT_VCALLEE0 (irq .irq_disable , "cli;" , ALT_NOT ( X86_FEATURE_XENPV ) );
667
690
}
668
691
669
692
static inline notrace void arch_local_irq_enable (void )
670
693
{
671
- PVOP_VCALLEE0 (irq .irq_enable );
694
+ PVOP_ALT_VCALLEE0 (irq .irq_enable , "sti;" , ALT_NOT ( X86_FEATURE_XENPV ) );
672
695
}
673
696
674
697
static inline notrace unsigned long arch_local_irq_save (void )
@@ -713,35 +736,9 @@ extern void default_banner(void);
713
736
.popsection
714
737
715
738
716
- #define COND_PUSH (set , mask , reg ) \
717
- .if ((~(set)) & mask); push %reg; .endif
718
- #define COND_POP (set , mask , reg ) \
719
- .if ((~(set)) & mask); pop %reg; .endif
720
-
721
739
#ifdef CONFIG_X86_64
722
740
#ifdef CONFIG_PARAVIRT_XXL
723
741
724
- #define PV_SAVE_REGS (set ) \
725
- COND_PUSH(set, CLBR_RAX, rax); \
726
- COND_PUSH(set, CLBR_RCX, rcx); \
727
- COND_PUSH(set, CLBR_RDX, rdx); \
728
- COND_PUSH(set, CLBR_RSI, rsi); \
729
- COND_PUSH(set, CLBR_RDI, rdi); \
730
- COND_PUSH(set, CLBR_R8, r8); \
731
- COND_PUSH(set, CLBR_R9, r9); \
732
- COND_PUSH(set, CLBR_R10, r10); \
733
- COND_PUSH(set, CLBR_R11, r11)
734
- #define PV_RESTORE_REGS (set ) \
735
- COND_POP(set, CLBR_R11, r11); \
736
- COND_POP(set, CLBR_R10, r10); \
737
- COND_POP(set, CLBR_R9, r9); \
738
- COND_POP(set, CLBR_R8, r8); \
739
- COND_POP(set, CLBR_RDI, rdi); \
740
- COND_POP(set, CLBR_RSI, rsi); \
741
- COND_POP(set, CLBR_RDX, rdx); \
742
- COND_POP(set, CLBR_RCX, rcx); \
743
- COND_POP(set, CLBR_RAX, rax)
744
-
745
742
#define PARA_PATCH (off ) ((off) / 8)
746
743
#define PARA_SITE (ptype , ops ) _PVSITE(ptype, ops, .quad, 8)
747
744
#define PARA_INDIRECT (addr ) *addr(%rip)
@@ -752,12 +749,14 @@ extern void default_banner(void);
752
749
X86_FEATURE_XENPV, "jmp xen_iret;", "jmp native_iret;")
753
750
754
751
#ifdef CONFIG_DEBUG_ENTRY
755
- #define SAVE_FLAGS (clobbers ) \
756
- PARA_SITE(PARA_PATCH(PV_IRQ_save_fl), \
757
- PV_SAVE_REGS(clobbers | CLBR_CALLEE_SAVE); \
758
- ANNOTATE_RETPOLINE_SAFE; \
759
- call PARA_INDIRECT(pv_ops+PV_IRQ_save_fl); \
760
- PV_RESTORE_REGS(clobbers | CLBR_CALLEE_SAVE);)
752
+ .macro PARA_IRQ_save_fl
753
+ PARA_SITE (PARA_PATCH (PV_IRQ_save_fl ),
754
+ ANNOTATE_RETPOLINE_SAFE ;
755
+ call PARA_INDIRECT (pv_ops + PV_IRQ_save_fl );)
756
+ .endm
757
+
758
+ #define SAVE_FLAGS ALTERNATIVE "PARA_IRQ_save_fl;", "pushf; pop %rax;", \
759
+ ALT_NOT(X86_FEATURE_XENPV)
761
760
#endif
762
761
#endif /* CONFIG_PARAVIRT_XXL */
763
762
#endif /* CONFIG_X86_64 */
0 commit comments