@@ -584,99 +584,6 @@ extern __must_check long strnlen_user(const char __user *str, long n);
584
584
unsigned long __must_check clear_user (void __user * mem , unsigned long len );
585
585
unsigned long __must_check __clear_user (void __user * mem , unsigned long len );
586
586
587
- extern void __cmpxchg_wrong_size (void )
588
- __compiletime_error ("Bad argument size for cmpxchg" );
589
-
590
- #define __user_atomic_cmpxchg_inatomic (uval , ptr , old , new , size ) \
591
- ({ \
592
- int __ret = 0; \
593
- __typeof__(*(ptr)) __old = (old); \
594
- __typeof__(*(ptr)) __new = (new); \
595
- __uaccess_begin_nospec(); \
596
- switch (size) { \
597
- case 1: \
598
- { \
599
- asm volatile("\n" \
600
- "1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n" \
601
- "2:\n" \
602
- "\t.section .fixup, \"ax\"\n" \
603
- "3:\tmov %3, %0\n" \
604
- "\tjmp 2b\n" \
605
- "\t.previous\n" \
606
- _ASM_EXTABLE_UA(1b, 3b) \
607
- : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
608
- : "i" (-EFAULT), "q" (__new), "1" (__old) \
609
- : "memory" \
610
- ); \
611
- break; \
612
- } \
613
- case 2: \
614
- { \
615
- asm volatile("\n" \
616
- "1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n" \
617
- "2:\n" \
618
- "\t.section .fixup, \"ax\"\n" \
619
- "3:\tmov %3, %0\n" \
620
- "\tjmp 2b\n" \
621
- "\t.previous\n" \
622
- _ASM_EXTABLE_UA(1b, 3b) \
623
- : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
624
- : "i" (-EFAULT), "r" (__new), "1" (__old) \
625
- : "memory" \
626
- ); \
627
- break; \
628
- } \
629
- case 4: \
630
- { \
631
- asm volatile("\n" \
632
- "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" \
633
- "2:\n" \
634
- "\t.section .fixup, \"ax\"\n" \
635
- "3:\tmov %3, %0\n" \
636
- "\tjmp 2b\n" \
637
- "\t.previous\n" \
638
- _ASM_EXTABLE_UA(1b, 3b) \
639
- : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
640
- : "i" (-EFAULT), "r" (__new), "1" (__old) \
641
- : "memory" \
642
- ); \
643
- break; \
644
- } \
645
- case 8: \
646
- { \
647
- if (!IS_ENABLED(CONFIG_X86_64)) \
648
- __cmpxchg_wrong_size(); \
649
- \
650
- asm volatile("\n" \
651
- "1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n" \
652
- "2:\n" \
653
- "\t.section .fixup, \"ax\"\n" \
654
- "3:\tmov %3, %0\n" \
655
- "\tjmp 2b\n" \
656
- "\t.previous\n" \
657
- _ASM_EXTABLE_UA(1b, 3b) \
658
- : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
659
- : "i" (-EFAULT), "r" (__new), "1" (__old) \
660
- : "memory" \
661
- ); \
662
- break; \
663
- } \
664
- default: \
665
- __cmpxchg_wrong_size(); \
666
- } \
667
- __uaccess_end(); \
668
- *(uval) = __old; \
669
- __ret; \
670
- })
671
-
672
- #define user_atomic_cmpxchg_inatomic (uval , ptr , old , new ) \
673
- ({ \
674
- access_ok((ptr), sizeof(*(ptr))) ? \
675
- __user_atomic_cmpxchg_inatomic((uval), (ptr), \
676
- (old), (new), sizeof(*(ptr))) : \
677
- -EFAULT; \
678
- })
679
-
680
587
/*
681
588
* movsl can be slow when source and dest are not both 8-byte aligned
682
589
*/
0 commit comments