Skip to content

Commit 5bece3d

Browse files
daxtensmpe
authored andcommitted
powerpc: support KASAN instrumentation of bitops
The powerpc-specific bitops are not being picked up by the KASAN test suite. Instrumentation is done via the bitops/instrumented-{atomic,lock}.h headers. They require that arch-specific versions of bitop functions are renamed to arch_*. Do this renaming. For clear_bit_unlock_is_negative_byte, the current implementation uses the PG_waiters constant. This works because it's a preprocessor macro - so it's only actually evaluated in contexts where PG_waiters is defined. With instrumentation however, it becomes a static inline function, and all of a sudden we need the actual value of PG_waiters. Because of the order of header includes, it's not available and we fail to compile. Instead, manually specify that we care about bit 7. This is still correct: bit 7 is the bit that would mark a negative byte. While we're at it, replace __inline__ with inline across the file. Reviewed-by: Christophe Leroy <[email protected]> Signed-off-by: Daniel Axtens <[email protected]> Tested-by: Christophe Leroy <[email protected]> Signed-off-by: Michael Ellerman <[email protected]> Link: https://lore.kernel.org/r/[email protected]
1 parent 81d2c6f commit 5bece3d

File tree

1 file changed

+29
-22
lines changed

1 file changed

+29
-22
lines changed

arch/powerpc/include/asm/bitops.h

Lines changed: 29 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@
6464

6565
/* Macro for generating the ***_bits() functions */
6666
#define DEFINE_BITOP(fn, op, prefix) \
67-
static __inline__ void fn(unsigned long mask, \
67+
static inline void fn(unsigned long mask, \
6868
volatile unsigned long *_p) \
6969
{ \
7070
unsigned long old; \
@@ -86,30 +86,30 @@ DEFINE_BITOP(clear_bits, andc, "")
8686
DEFINE_BITOP(clear_bits_unlock, andc, PPC_RELEASE_BARRIER)
8787
DEFINE_BITOP(change_bits, xor, "")
8888

89-
static __inline__ void set_bit(int nr, volatile unsigned long *addr)
89+
static inline void arch_set_bit(int nr, volatile unsigned long *addr)
9090
{
9191
set_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
9292
}
9393

94-
static __inline__ void clear_bit(int nr, volatile unsigned long *addr)
94+
static inline void arch_clear_bit(int nr, volatile unsigned long *addr)
9595
{
9696
clear_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
9797
}
9898

99-
static __inline__ void clear_bit_unlock(int nr, volatile unsigned long *addr)
99+
static inline void arch_clear_bit_unlock(int nr, volatile unsigned long *addr)
100100
{
101101
clear_bits_unlock(BIT_MASK(nr), addr + BIT_WORD(nr));
102102
}
103103

104-
static __inline__ void change_bit(int nr, volatile unsigned long *addr)
104+
static inline void arch_change_bit(int nr, volatile unsigned long *addr)
105105
{
106106
change_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
107107
}
108108

109109
/* Like DEFINE_BITOP(), with changes to the arguments to 'op' and the output
110110
* operands. */
111111
#define DEFINE_TESTOP(fn, op, prefix, postfix, eh) \
112-
static __inline__ unsigned long fn( \
112+
static inline unsigned long fn( \
113113
unsigned long mask, \
114114
volatile unsigned long *_p) \
115115
{ \
@@ -138,34 +138,34 @@ DEFINE_TESTOP(test_and_clear_bits, andc, PPC_ATOMIC_ENTRY_BARRIER,
138138
DEFINE_TESTOP(test_and_change_bits, xor, PPC_ATOMIC_ENTRY_BARRIER,
139139
PPC_ATOMIC_EXIT_BARRIER, 0)
140140

141-
static __inline__ int test_and_set_bit(unsigned long nr,
142-
volatile unsigned long *addr)
141+
static inline int arch_test_and_set_bit(unsigned long nr,
142+
volatile unsigned long *addr)
143143
{
144144
return test_and_set_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
145145
}
146146

147-
static __inline__ int test_and_set_bit_lock(unsigned long nr,
148-
volatile unsigned long *addr)
147+
static inline int arch_test_and_set_bit_lock(unsigned long nr,
148+
volatile unsigned long *addr)
149149
{
150150
return test_and_set_bits_lock(BIT_MASK(nr),
151151
addr + BIT_WORD(nr)) != 0;
152152
}
153153

154-
static __inline__ int test_and_clear_bit(unsigned long nr,
155-
volatile unsigned long *addr)
154+
static inline int arch_test_and_clear_bit(unsigned long nr,
155+
volatile unsigned long *addr)
156156
{
157157
return test_and_clear_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
158158
}
159159

160-
static __inline__ int test_and_change_bit(unsigned long nr,
161-
volatile unsigned long *addr)
160+
static inline int arch_test_and_change_bit(unsigned long nr,
161+
volatile unsigned long *addr)
162162
{
163163
return test_and_change_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
164164
}
165165

166166
#ifdef CONFIG_PPC64
167-
static __inline__ unsigned long clear_bit_unlock_return_word(int nr,
168-
volatile unsigned long *addr)
167+
static inline unsigned long
168+
clear_bit_unlock_return_word(int nr, volatile unsigned long *addr)
169169
{
170170
unsigned long old, t;
171171
unsigned long *p = (unsigned long *)addr + BIT_WORD(nr);
@@ -185,15 +185,18 @@ static __inline__ unsigned long clear_bit_unlock_return_word(int nr,
185185
return old;
186186
}
187187

188-
/* This is a special function for mm/filemap.c */
189-
#define clear_bit_unlock_is_negative_byte(nr, addr) \
190-
(clear_bit_unlock_return_word(nr, addr) & BIT_MASK(PG_waiters))
188+
/*
189+
* This is a special function for mm/filemap.c
190+
* Bit 7 corresponds to PG_waiters.
191+
*/
192+
#define arch_clear_bit_unlock_is_negative_byte(nr, addr) \
193+
(clear_bit_unlock_return_word(nr, addr) & BIT_MASK(7))
191194

192195
#endif /* CONFIG_PPC64 */
193196

194197
#include <asm-generic/bitops/non-atomic.h>
195198

196-
static __inline__ void __clear_bit_unlock(int nr, volatile unsigned long *addr)
199+
static inline void arch___clear_bit_unlock(int nr, volatile unsigned long *addr)
197200
{
198201
__asm__ __volatile__(PPC_RELEASE_BARRIER "" ::: "memory");
199202
__clear_bit(nr, addr);
@@ -215,14 +218,14 @@ static __inline__ void __clear_bit_unlock(int nr, volatile unsigned long *addr)
215218
* fls: find last (most-significant) bit set.
216219
* Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
217220
*/
218-
static __inline__ int fls(unsigned int x)
221+
static inline int fls(unsigned int x)
219222
{
220223
return 32 - __builtin_clz(x);
221224
}
222225

223226
#include <asm-generic/bitops/builtin-__fls.h>
224227

225-
static __inline__ int fls64(__u64 x)
228+
static inline int fls64(__u64 x)
226229
{
227230
return 64 - __builtin_clzll(x);
228231
}
@@ -239,6 +242,10 @@ unsigned long __arch_hweight64(__u64 w);
239242

240243
#include <asm-generic/bitops/find.h>
241244

245+
/* wrappers that deal with KASAN instrumentation */
246+
#include <asm-generic/bitops/instrumented-atomic.h>
247+
#include <asm-generic/bitops/instrumented-lock.h>
248+
242249
/* Little-endian versions */
243250
#include <asm-generic/bitops/le.h>
244251

0 commit comments

Comments
 (0)