Skip to content

Commit 029f56d

Browse files
committed
Merge tag 'x86_asm_for_v5.10' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull x86 asm updates from Borislav Petkov: "Two asm wrapper fixes: - Use XORL instead of XORQ to avoid a REX prefix and save some bytes in the .fixup section, by Uros Bizjak. - Replace __force_order dummy variable with a memory clobber to fix LLVM requiring a definition for former and to prevent memory accesses from still being cached/reordered, by Arvind Sankar" * tag 'x86_asm_for_v5.10' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: x86/asm: Replace __force_order with a memory clobber x86/uaccess: Use XORL %0,%0 in __get_user_asm()
2 parents 7cd4ecd + aa5cacd commit 029f56d

File tree

4 files changed

+18
-25
lines changed

4 files changed

+18
-25
lines changed

arch/x86/boot/compressed/pgtable_64.c

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,6 @@
55
#include "pgtable.h"
66
#include "../string.h"
77

8-
/*
9-
* __force_order is used by special_insns.h asm code to force instruction
10-
* serialization.
11-
*
12-
* It is not referenced from the code, but GCC < 5 with -fPIE would fail
13-
* due to an undefined symbol. Define it to make these ancient GCCs work.
14-
*/
15-
unsigned long __force_order;
16-
178
#define BIOS_START_MIN 0x20000U /* 128K, less than this is insane */
189
#define BIOS_START_MAX 0x9f000U /* 640K, absolute maximum */
1910

arch/x86/include/asm/special_insns.h

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -11,45 +11,47 @@
1111
#include <linux/jump_label.h>
1212

1313
/*
14-
* Volatile isn't enough to prevent the compiler from reordering the
15-
* read/write functions for the control registers and messing everything up.
16-
* A memory clobber would solve the problem, but would prevent reordering of
17-
* all loads stores around it, which can hurt performance. Solution is to
18-
* use a variable and mimic reads and writes to it to enforce serialization
14+
* The compiler should not reorder volatile asm statements with respect to each
15+
* other: they should execute in program order. However GCC 4.9.x and 5.x have
16+
* a bug (which was fixed in 8.1, 7.3 and 6.5) where they might reorder
17+
* volatile asm. The write functions are not affected since they have memory
18+
* clobbers preventing reordering. To prevent reads from being reordered with
19+
* respect to writes, use a dummy memory operand.
1920
*/
20-
extern unsigned long __force_order;
21+
22+
#define __FORCE_ORDER "m"(*(unsigned int *)0x1000UL)
2123

2224
void native_write_cr0(unsigned long val);
2325

2426
static inline unsigned long native_read_cr0(void)
2527
{
2628
unsigned long val;
27-
asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
29+
asm volatile("mov %%cr0,%0\n\t" : "=r" (val) : __FORCE_ORDER);
2830
return val;
2931
}
3032

3133
static __always_inline unsigned long native_read_cr2(void)
3234
{
3335
unsigned long val;
34-
asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
36+
asm volatile("mov %%cr2,%0\n\t" : "=r" (val) : __FORCE_ORDER);
3537
return val;
3638
}
3739

3840
static __always_inline void native_write_cr2(unsigned long val)
3941
{
40-
asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
42+
asm volatile("mov %0,%%cr2": : "r" (val) : "memory");
4143
}
4244

4345
static inline unsigned long __native_read_cr3(void)
4446
{
4547
unsigned long val;
46-
asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
48+
asm volatile("mov %%cr3,%0\n\t" : "=r" (val) : __FORCE_ORDER);
4749
return val;
4850
}
4951

5052
static inline void native_write_cr3(unsigned long val)
5153
{
52-
asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
54+
asm volatile("mov %0,%%cr3": : "r" (val) : "memory");
5355
}
5456

5557
static inline unsigned long native_read_cr4(void)
@@ -64,10 +66,10 @@ static inline unsigned long native_read_cr4(void)
6466
asm volatile("1: mov %%cr4, %0\n"
6567
"2:\n"
6668
_ASM_EXTABLE(1b, 2b)
67-
: "=r" (val), "=m" (__force_order) : "0" (0));
69+
: "=r" (val) : "0" (0), __FORCE_ORDER);
6870
#else
6971
/* CR4 always exists on x86_64. */
70-
asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
72+
asm volatile("mov %%cr4,%0\n\t" : "=r" (val) : __FORCE_ORDER);
7173
#endif
7274
return val;
7375
}

arch/x86/include/asm/uaccess.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -418,7 +418,7 @@ do { \
418418
"2:\n" \
419419
".section .fixup,\"ax\"\n" \
420420
"3: mov %[efault],%[errout]\n" \
421-
" xor"itype" %[output],%[output]\n" \
421+
" xorl %k[output],%k[output]\n" \
422422
" jmp 2b\n" \
423423
".previous\n" \
424424
_ASM_EXTABLE_UA(1b, 3b) \

arch/x86/kernel/cpu/common.c

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -360,7 +360,7 @@ void native_write_cr0(unsigned long val)
360360
unsigned long bits_missing = 0;
361361

362362
set_register:
363-
asm volatile("mov %0,%%cr0": "+r" (val), "+m" (__force_order));
363+
asm volatile("mov %0,%%cr0": "+r" (val) : : "memory");
364364

365365
if (static_branch_likely(&cr_pinning)) {
366366
if (unlikely((val & X86_CR0_WP) != X86_CR0_WP)) {
@@ -379,7 +379,7 @@ void native_write_cr4(unsigned long val)
379379
unsigned long bits_changed = 0;
380380

381381
set_register:
382-
asm volatile("mov %0,%%cr4": "+r" (val), "+m" (cr4_pinned_bits));
382+
asm volatile("mov %0,%%cr4": "+r" (val) : : "memory");
383383

384384
if (static_branch_likely(&cr_pinning)) {
385385
if (unlikely((val & cr4_pinned_mask) != cr4_pinned_bits)) {

0 commit comments

Comments
 (0)