Skip to content

Commit f5544ba

Browse files
author
Al Viro
committed
x86: get rid of user_atomic_cmpxchg_inatomic()
Only one user left; the thing had been made polymorphic back in 2013 for the sake of MPX. No point keeping it now that MPX is gone. Convert futex_atomic_cmpxchg_inatomic() to user_access_{begin,end}() while we are at it. Signed-off-by: Al Viro <[email protected]>
1 parent a251b2d commit f5544ba

File tree

2 files changed

+19
-94
lines changed

2 files changed

+19
-94
lines changed

arch/x86/include/asm/futex.h

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,25 @@ static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *o
9090
static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
9191
u32 oldval, u32 newval)
9292
{
93-
return user_atomic_cmpxchg_inatomic(uval, uaddr, oldval, newval);
93+
int ret = 0;
94+
95+
if (!user_access_begin(uaddr, sizeof(u32)))
96+
return -EFAULT;
97+
asm volatile("\n"
98+
"1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
99+
"2:\n"
100+
"\t.section .fixup, \"ax\"\n"
101+
"3:\tmov %3, %0\n"
102+
"\tjmp 2b\n"
103+
"\t.previous\n"
104+
_ASM_EXTABLE_UA(1b, 3b)
105+
: "+r" (ret), "=a" (oldval), "+m" (*uaddr)
106+
: "i" (-EFAULT), "r" (newval), "1" (oldval)
107+
: "memory"
108+
);
109+
user_access_end();
110+
*uval = oldval;
111+
return ret;
94112
}
95113

96114
#endif

arch/x86/include/asm/uaccess.h

Lines changed: 0 additions & 93 deletions
Original file line numberDiff line numberDiff line change
@@ -584,99 +584,6 @@ extern __must_check long strnlen_user(const char __user *str, long n);
584584
unsigned long __must_check clear_user(void __user *mem, unsigned long len);
585585
unsigned long __must_check __clear_user(void __user *mem, unsigned long len);
586586

587-
extern void __cmpxchg_wrong_size(void)
588-
__compiletime_error("Bad argument size for cmpxchg");
589-
590-
#define __user_atomic_cmpxchg_inatomic(uval, ptr, old, new, size) \
591-
({ \
592-
int __ret = 0; \
593-
__typeof__(*(ptr)) __old = (old); \
594-
__typeof__(*(ptr)) __new = (new); \
595-
__uaccess_begin_nospec(); \
596-
switch (size) { \
597-
case 1: \
598-
{ \
599-
asm volatile("\n" \
600-
"1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n" \
601-
"2:\n" \
602-
"\t.section .fixup, \"ax\"\n" \
603-
"3:\tmov %3, %0\n" \
604-
"\tjmp 2b\n" \
605-
"\t.previous\n" \
606-
_ASM_EXTABLE_UA(1b, 3b) \
607-
: "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
608-
: "i" (-EFAULT), "q" (__new), "1" (__old) \
609-
: "memory" \
610-
); \
611-
break; \
612-
} \
613-
case 2: \
614-
{ \
615-
asm volatile("\n" \
616-
"1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n" \
617-
"2:\n" \
618-
"\t.section .fixup, \"ax\"\n" \
619-
"3:\tmov %3, %0\n" \
620-
"\tjmp 2b\n" \
621-
"\t.previous\n" \
622-
_ASM_EXTABLE_UA(1b, 3b) \
623-
: "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
624-
: "i" (-EFAULT), "r" (__new), "1" (__old) \
625-
: "memory" \
626-
); \
627-
break; \
628-
} \
629-
case 4: \
630-
{ \
631-
asm volatile("\n" \
632-
"1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" \
633-
"2:\n" \
634-
"\t.section .fixup, \"ax\"\n" \
635-
"3:\tmov %3, %0\n" \
636-
"\tjmp 2b\n" \
637-
"\t.previous\n" \
638-
_ASM_EXTABLE_UA(1b, 3b) \
639-
: "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
640-
: "i" (-EFAULT), "r" (__new), "1" (__old) \
641-
: "memory" \
642-
); \
643-
break; \
644-
} \
645-
case 8: \
646-
{ \
647-
if (!IS_ENABLED(CONFIG_X86_64)) \
648-
__cmpxchg_wrong_size(); \
649-
\
650-
asm volatile("\n" \
651-
"1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n" \
652-
"2:\n" \
653-
"\t.section .fixup, \"ax\"\n" \
654-
"3:\tmov %3, %0\n" \
655-
"\tjmp 2b\n" \
656-
"\t.previous\n" \
657-
_ASM_EXTABLE_UA(1b, 3b) \
658-
: "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
659-
: "i" (-EFAULT), "r" (__new), "1" (__old) \
660-
: "memory" \
661-
); \
662-
break; \
663-
} \
664-
default: \
665-
__cmpxchg_wrong_size(); \
666-
} \
667-
__uaccess_end(); \
668-
*(uval) = __old; \
669-
__ret; \
670-
})
671-
672-
#define user_atomic_cmpxchg_inatomic(uval, ptr, old, new) \
673-
({ \
674-
access_ok((ptr), sizeof(*(ptr))) ? \
675-
__user_atomic_cmpxchg_inatomic((uval), (ptr), \
676-
(old), (new), sizeof(*(ptr))) : \
677-
-EFAULT; \
678-
})
679-
680587
/*
681588
* movsl can be slow when source and dest are not both 8-byte aligned
682589
*/

0 commit comments

Comments
 (0)