Skip to content

Commit 3cf3fab

Browse files
committed
Merge tag 'locking-core-2023-10-28' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull locking updates from Info Molnar: "Futex improvements: - Add the 'futex2' syscall ABI, which is an attempt to get away from the multiplex syscall and adds a little room for extentions, while lifting some limitations. - Fix futex PI recursive rt_mutex waiter state bug - Fix inter-process shared futexes on no-MMU systems - Use folios instead of pages Micro-optimizations of locking primitives: - Improve arch_spin_value_unlocked() on asm-generic ticket spinlock architectures, to improve lockref code generation - Improve the x86-32 lockref_get_not_zero() main loop by adding build-time CMPXCHG8B support detection for the relevant lockref code, and by better interfacing the CMPXCHG8B assembly code with the compiler - Introduce arch_sync_try_cmpxchg() on x86 to improve sync_try_cmpxchg() code generation. Convert some sync_cmpxchg() users to sync_try_cmpxchg(). - Micro-optimize rcuref_put_slowpath() Locking debuggability improvements: - Improve CONFIG_DEBUG_RT_MUTEXES=y to have a fast-path as well - Enforce atomicity of sched_submit_work(), which is de-facto atomic but was un-enforced previously. - Extend <linux/cleanup.h>'s no_free_ptr() with __must_check semantics - Fix ww_mutex self-tests - Clean up const-propagation in <linux/seqlock.h> and simplify the API-instantiation macros a bit RT locking improvements: - Provide the rt_mutex_*_schedule() primitives/helpers and use them in the rtmutex code to avoid recursion vs. rtlock on the PI state. - Add nested blocking lockdep asserts to rt_mutex_lock(), rtlock_lock() and rwbase_read_lock() .. plus misc fixes & cleanups" * tag 'locking-core-2023-10-28' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (39 commits) futex: Don't include process MM in futex key on no-MMU locking/seqlock: Fix grammar in comment alpha: Fix up new futex syscall numbers locking/seqlock: Propagate 'const' pointers within read-only methods, remove forced type casts locking/lockdep: Fix string sizing bug that triggers a format-truncation compiler-warning locking/seqlock: Change __seqprop() to return the function pointer locking/seqlock: Simplify SEQCOUNT_LOCKNAME() locking/atomics: Use atomic_try_cmpxchg_release() to micro-optimize rcuref_put_slowpath() locking/atomic, xen: Use sync_try_cmpxchg() instead of sync_cmpxchg() locking/atomic/x86: Introduce arch_sync_try_cmpxchg() locking/atomic: Add generic support for sync_try_cmpxchg() and its fallback locking/seqlock: Fix typo in comment futex/requeue: Remove unnecessary ‘NULL’ initialization from futex_proxy_trylock_atomic() locking/local, arch: Rewrite local_add_unless() as a static inline function locking/debug: Fix debugfs API return value checks to use IS_ERR() locking/ww_mutex/test: Make sure we bail out instead of livelock locking/ww_mutex/test: Fix potential workqueue corruption locking/ww_mutex/test: Use prng instead of rng to avoid hangs at bootup futex: Add sys_futex_requeue() futex: Add flags2 argument to futex_requeue() ...
2 parents 9cda4eb + c73801a commit 3cf3fab

File tree

56 files changed

+894
-337
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

56 files changed

+894
-337
lines changed

arch/alpha/include/asm/local.h

Lines changed: 16 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -65,28 +65,27 @@ static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
6565
#define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
6666

6767
/**
68-
* local_add_unless - add unless the number is a given value
68+
* local_add_unless - add unless the number is already a given value
6969
* @l: pointer of type local_t
7070
* @a: the amount to add to l...
7171
* @u: ...unless l is equal to u.
7272
*
73-
* Atomically adds @a to @l, so long as it was not @u.
74-
* Returns non-zero if @l was not @u, and zero otherwise.
73+
* Atomically adds @a to @l, if @v was not already @u.
74+
* Returns true if the addition was done.
7575
*/
76-
#define local_add_unless(l, a, u) \
77-
({ \
78-
long c, old; \
79-
c = local_read(l); \
80-
for (;;) { \
81-
if (unlikely(c == (u))) \
82-
break; \
83-
old = local_cmpxchg((l), c, c + (a)); \
84-
if (likely(old == c)) \
85-
break; \
86-
c = old; \
87-
} \
88-
c != (u); \
89-
})
76+
static __inline__ bool
77+
local_add_unless(local_t *l, long a, long u)
78+
{
79+
long c = local_read(l);
80+
81+
do {
82+
if (unlikely(c == u))
83+
return false;
84+
} while (!local_try_cmpxchg(l, &c, c + a));
85+
86+
return true;
87+
}
88+
9089
#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
9190

9291
#define local_add_negative(a, l) (local_add_return((a), (l)) < 0)

arch/alpha/kernel/syscalls/syscall.tbl

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -492,3 +492,7 @@
492492
560 common set_mempolicy_home_node sys_ni_syscall
493493
561 common cachestat sys_cachestat
494494
562 common fchmodat2 sys_fchmodat2
495+
# 563 reserved for map_shadow_stack
496+
564 common futex_wake sys_futex_wake
497+
565 common futex_wait sys_futex_wait
498+
566 common futex_requeue sys_futex_requeue

arch/arm/tools/syscall.tbl

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -466,3 +466,6 @@
466466
450 common set_mempolicy_home_node sys_set_mempolicy_home_node
467467
451 common cachestat sys_cachestat
468468
452 common fchmodat2 sys_fchmodat2
469+
454 common futex_wake sys_futex_wake
470+
455 common futex_wait sys_futex_wait
471+
456 common futex_requeue sys_futex_requeue

arch/arm64/include/asm/unistd.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939
#define __ARM_NR_compat_set_tls (__ARM_NR_COMPAT_BASE + 5)
4040
#define __ARM_NR_COMPAT_END (__ARM_NR_COMPAT_BASE + 0x800)
4141

42-
#define __NR_compat_syscalls 453
42+
#define __NR_compat_syscalls 457
4343
#endif
4444

4545
#define __ARCH_WANT_SYS_CLONE

arch/arm64/include/asm/unistd32.h

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -911,6 +911,12 @@ __SYSCALL(__NR_set_mempolicy_home_node, sys_set_mempolicy_home_node)
911911
__SYSCALL(__NR_cachestat, sys_cachestat)
912912
#define __NR_fchmodat2 452
913913
__SYSCALL(__NR_fchmodat2, sys_fchmodat2)
914+
#define __NR_futex_wake 454
915+
__SYSCALL(__NR_futex_wake, sys_futex_wake)
916+
#define __NR_futex_wait 455
917+
__SYSCALL(__NR_futex_wait, sys_futex_wait)
918+
#define __NR_futex_requeue 456
919+
__SYSCALL(__NR_futex_requeue, sys_futex_requeue)
914920

915921
/*
916922
* Please add new compat syscalls above this comment and update

arch/ia64/kernel/syscalls/syscall.tbl

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -373,3 +373,6 @@
373373
450 common set_mempolicy_home_node sys_set_mempolicy_home_node
374374
451 common cachestat sys_cachestat
375375
452 common fchmodat2 sys_fchmodat2
376+
454 common futex_wake sys_futex_wake
377+
455 common futex_wait sys_futex_wait
378+
456 common futex_requeue sys_futex_requeue

arch/loongarch/include/asm/local.h

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -70,22 +70,27 @@ static inline bool local_try_cmpxchg(local_t *l, long *old, long new)
7070
#define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
7171

7272
/**
73-
* local_add_unless - add unless the number is a given value
73+
* local_add_unless - add unless the number is already a given value
7474
* @l: pointer of type local_t
7575
* @a: the amount to add to l...
7676
* @u: ...unless l is equal to u.
7777
*
78-
* Atomically adds @a to @l, so long as it was not @u.
79-
* Returns non-zero if @l was not @u, and zero otherwise.
78+
* Atomically adds @a to @l, if @v was not already @u.
79+
* Returns true if the addition was done.
8080
*/
81-
#define local_add_unless(l, a, u) \
82-
({ \
83-
long c, old; \
84-
c = local_read(l); \
85-
while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
86-
c = old; \
87-
c != (u); \
88-
})
81+
static inline bool
82+
local_add_unless(local_t *l, long a, long u)
83+
{
84+
long c = local_read(l);
85+
86+
do {
87+
if (unlikely(c == u))
88+
return false;
89+
} while (!local_try_cmpxchg(l, &c, c + a));
90+
91+
return true;
92+
}
93+
8994
#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
9095

9196
#define local_dec_return(l) local_sub_return(1, (l))

arch/m68k/kernel/syscalls/syscall.tbl

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -452,3 +452,6 @@
452452
450 common set_mempolicy_home_node sys_set_mempolicy_home_node
453453
451 common cachestat sys_cachestat
454454
452 common fchmodat2 sys_fchmodat2
455+
454 common futex_wake sys_futex_wake
456+
455 common futex_wait sys_futex_wait
457+
456 common futex_requeue sys_futex_requeue

arch/microblaze/kernel/syscalls/syscall.tbl

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -458,3 +458,6 @@
458458
450 common set_mempolicy_home_node sys_set_mempolicy_home_node
459459
451 common cachestat sys_cachestat
460460
452 common fchmodat2 sys_fchmodat2
461+
454 common futex_wake sys_futex_wake
462+
455 common futex_wait sys_futex_wait
463+
456 common futex_requeue sys_futex_requeue

arch/mips/include/asm/local.h

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -108,22 +108,27 @@ static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
108108
#define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
109109

110110
/**
111-
* local_add_unless - add unless the number is a given value
111+
* local_add_unless - add unless the number is already a given value
112112
* @l: pointer of type local_t
113113
* @a: the amount to add to l...
114114
* @u: ...unless l is equal to u.
115115
*
116-
* Atomically adds @a to @l, so long as it was not @u.
117-
* Returns non-zero if @l was not @u, and zero otherwise.
116+
* Atomically adds @a to @l, if @v was not already @u.
117+
* Returns true if the addition was done.
118118
*/
119-
#define local_add_unless(l, a, u) \
120-
({ \
121-
long c, old; \
122-
c = local_read(l); \
123-
while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
124-
c = old; \
125-
c != (u); \
126-
})
119+
static __inline__ bool
120+
local_add_unless(local_t *l, long a, long u)
121+
{
122+
long c = local_read(l);
123+
124+
do {
125+
if (unlikely(c == u))
126+
return false;
127+
} while (!local_try_cmpxchg(l, &c, c + a));
128+
129+
return true;
130+
}
131+
127132
#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
128133

129134
#define local_dec_return(l) local_sub_return(1, (l))

0 commit comments

Comments
 (0)