Skip to content

Commit 9c44575

Browse files
committed
Merge tag 'bitmap-for-6.12' of https://github.com/norov/linux
Pull bitmap updates from Yury Norov: - switch all bitmamp APIs from inline to __always_inline (Brian Norris) The __always_inline series improves on code generation, and now with the latest compiler versions is required to avoid compilation warnings. It spent enough in my backlog, and I'm thankful to Brian Norris for taking over and moving it forward. - introduce GENMASK_U128() macro (Anshuman Khandual) GENMASK_U128() is a prerequisite needed for arm64 development * tag 'bitmap-for-6.12' of https://github.com/norov/linux: lib/test_bits.c: Add tests for GENMASK_U128() uapi: Define GENMASK_U128 nodemask: Switch from inline to __always_inline cpumask: Switch from inline to __always_inline bitmap: Switch from inline to __always_inline find: Switch from inline to __always_inline
2 parents ba33a49 + d7bcc37 commit 9c44575

File tree

8 files changed

+325
-232
lines changed

8 files changed

+325
-232
lines changed

include/linux/bitmap.h

Lines changed: 76 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -203,12 +203,12 @@ unsigned long bitmap_find_next_zero_area_off(unsigned long *map,
203203
* the bit offset of all zero areas this function finds is multiples of that
204204
* power of 2. A @align_mask of 0 means no alignment is required.
205205
*/
206-
static inline unsigned long
207-
bitmap_find_next_zero_area(unsigned long *map,
208-
unsigned long size,
209-
unsigned long start,
210-
unsigned int nr,
211-
unsigned long align_mask)
206+
static __always_inline
207+
unsigned long bitmap_find_next_zero_area(unsigned long *map,
208+
unsigned long size,
209+
unsigned long start,
210+
unsigned int nr,
211+
unsigned long align_mask)
212212
{
213213
return bitmap_find_next_zero_area_off(map, size, start, nr,
214214
align_mask, 0);
@@ -228,7 +228,7 @@ void bitmap_fold(unsigned long *dst, const unsigned long *orig,
228228

229229
#define bitmap_size(nbits) (ALIGN(nbits, BITS_PER_LONG) / BITS_PER_BYTE)
230230

231-
static inline void bitmap_zero(unsigned long *dst, unsigned int nbits)
231+
static __always_inline void bitmap_zero(unsigned long *dst, unsigned int nbits)
232232
{
233233
unsigned int len = bitmap_size(nbits);
234234

@@ -238,7 +238,7 @@ static inline void bitmap_zero(unsigned long *dst, unsigned int nbits)
238238
memset(dst, 0, len);
239239
}
240240

241-
static inline void bitmap_fill(unsigned long *dst, unsigned int nbits)
241+
static __always_inline void bitmap_fill(unsigned long *dst, unsigned int nbits)
242242
{
243243
unsigned int len = bitmap_size(nbits);
244244

@@ -248,8 +248,8 @@ static inline void bitmap_fill(unsigned long *dst, unsigned int nbits)
248248
memset(dst, 0xff, len);
249249
}
250250

251-
static inline void bitmap_copy(unsigned long *dst, const unsigned long *src,
252-
unsigned int nbits)
251+
static __always_inline
252+
void bitmap_copy(unsigned long *dst, const unsigned long *src, unsigned int nbits)
253253
{
254254
unsigned int len = bitmap_size(nbits);
255255

@@ -262,8 +262,8 @@ static inline void bitmap_copy(unsigned long *dst, const unsigned long *src,
262262
/*
263263
* Copy bitmap and clear tail bits in last word.
264264
*/
265-
static inline void bitmap_copy_clear_tail(unsigned long *dst,
266-
const unsigned long *src, unsigned int nbits)
265+
static __always_inline
266+
void bitmap_copy_clear_tail(unsigned long *dst, const unsigned long *src, unsigned int nbits)
267267
{
268268
bitmap_copy(dst, src, nbits);
269269
if (nbits % BITS_PER_LONG)
@@ -318,42 +318,46 @@ void bitmap_to_arr64(u64 *buf, const unsigned long *bitmap, unsigned int nbits);
318318
bitmap_copy_clear_tail((unsigned long *)(buf), (const unsigned long *)(bitmap), (nbits))
319319
#endif
320320

321-
static inline bool bitmap_and(unsigned long *dst, const unsigned long *src1,
322-
const unsigned long *src2, unsigned int nbits)
321+
static __always_inline
322+
bool bitmap_and(unsigned long *dst, const unsigned long *src1,
323+
const unsigned long *src2, unsigned int nbits)
323324
{
324325
if (small_const_nbits(nbits))
325326
return (*dst = *src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)) != 0;
326327
return __bitmap_and(dst, src1, src2, nbits);
327328
}
328329

329-
static inline void bitmap_or(unsigned long *dst, const unsigned long *src1,
330-
const unsigned long *src2, unsigned int nbits)
330+
static __always_inline
331+
void bitmap_or(unsigned long *dst, const unsigned long *src1,
332+
const unsigned long *src2, unsigned int nbits)
331333
{
332334
if (small_const_nbits(nbits))
333335
*dst = *src1 | *src2;
334336
else
335337
__bitmap_or(dst, src1, src2, nbits);
336338
}
337339

338-
static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1,
339-
const unsigned long *src2, unsigned int nbits)
340+
static __always_inline
341+
void bitmap_xor(unsigned long *dst, const unsigned long *src1,
342+
const unsigned long *src2, unsigned int nbits)
340343
{
341344
if (small_const_nbits(nbits))
342345
*dst = *src1 ^ *src2;
343346
else
344347
__bitmap_xor(dst, src1, src2, nbits);
345348
}
346349

347-
static inline bool bitmap_andnot(unsigned long *dst, const unsigned long *src1,
348-
const unsigned long *src2, unsigned int nbits)
350+
static __always_inline
351+
bool bitmap_andnot(unsigned long *dst, const unsigned long *src1,
352+
const unsigned long *src2, unsigned int nbits)
349353
{
350354
if (small_const_nbits(nbits))
351355
return (*dst = *src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0;
352356
return __bitmap_andnot(dst, src1, src2, nbits);
353357
}
354358

355-
static inline void bitmap_complement(unsigned long *dst, const unsigned long *src,
356-
unsigned int nbits)
359+
static __always_inline
360+
void bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int nbits)
357361
{
358362
if (small_const_nbits(nbits))
359363
*dst = ~(*src);
@@ -368,8 +372,8 @@ static inline void bitmap_complement(unsigned long *dst, const unsigned long *sr
368372
#endif
369373
#define BITMAP_MEM_MASK (BITMAP_MEM_ALIGNMENT - 1)
370374

371-
static inline bool bitmap_equal(const unsigned long *src1,
372-
const unsigned long *src2, unsigned int nbits)
375+
static __always_inline
376+
bool bitmap_equal(const unsigned long *src1, const unsigned long *src2, unsigned int nbits)
373377
{
374378
if (small_const_nbits(nbits))
375379
return !((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits));
@@ -388,45 +392,45 @@ static inline bool bitmap_equal(const unsigned long *src1,
388392
*
389393
* Returns: True if (*@src1 | *@src2) == *@src3, false otherwise
390394
*/
391-
static inline bool bitmap_or_equal(const unsigned long *src1,
392-
const unsigned long *src2,
393-
const unsigned long *src3,
394-
unsigned int nbits)
395+
static __always_inline
396+
bool bitmap_or_equal(const unsigned long *src1, const unsigned long *src2,
397+
const unsigned long *src3, unsigned int nbits)
395398
{
396399
if (!small_const_nbits(nbits))
397400
return __bitmap_or_equal(src1, src2, src3, nbits);
398401

399402
return !(((*src1 | *src2) ^ *src3) & BITMAP_LAST_WORD_MASK(nbits));
400403
}
401404

402-
static inline bool bitmap_intersects(const unsigned long *src1,
403-
const unsigned long *src2,
404-
unsigned int nbits)
405+
static __always_inline
406+
bool bitmap_intersects(const unsigned long *src1, const unsigned long *src2, unsigned int nbits)
405407
{
406408
if (small_const_nbits(nbits))
407409
return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0;
408410
else
409411
return __bitmap_intersects(src1, src2, nbits);
410412
}
411413

412-
static inline bool bitmap_subset(const unsigned long *src1,
413-
const unsigned long *src2, unsigned int nbits)
414+
static __always_inline
415+
bool bitmap_subset(const unsigned long *src1, const unsigned long *src2, unsigned int nbits)
414416
{
415417
if (small_const_nbits(nbits))
416418
return ! ((*src1 & ~(*src2)) & BITMAP_LAST_WORD_MASK(nbits));
417419
else
418420
return __bitmap_subset(src1, src2, nbits);
419421
}
420422

421-
static inline bool bitmap_empty(const unsigned long *src, unsigned nbits)
423+
static __always_inline
424+
bool bitmap_empty(const unsigned long *src, unsigned nbits)
422425
{
423426
if (small_const_nbits(nbits))
424427
return ! (*src & BITMAP_LAST_WORD_MASK(nbits));
425428

426429
return find_first_bit(src, nbits) == nbits;
427430
}
428431

429-
static inline bool bitmap_full(const unsigned long *src, unsigned int nbits)
432+
static __always_inline
433+
bool bitmap_full(const unsigned long *src, unsigned int nbits)
430434
{
431435
if (small_const_nbits(nbits))
432436
return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits));
@@ -460,8 +464,8 @@ unsigned long bitmap_weight_andnot(const unsigned long *src1,
460464
return __bitmap_weight_andnot(src1, src2, nbits);
461465
}
462466

463-
static __always_inline void bitmap_set(unsigned long *map, unsigned int start,
464-
unsigned int nbits)
467+
static __always_inline
468+
void bitmap_set(unsigned long *map, unsigned int start, unsigned int nbits)
465469
{
466470
if (__builtin_constant_p(nbits) && nbits == 1)
467471
__set_bit(start, map);
@@ -476,8 +480,8 @@ static __always_inline void bitmap_set(unsigned long *map, unsigned int start,
476480
__bitmap_set(map, start, nbits);
477481
}
478482

479-
static __always_inline void bitmap_clear(unsigned long *map, unsigned int start,
480-
unsigned int nbits)
483+
static __always_inline
484+
void bitmap_clear(unsigned long *map, unsigned int start, unsigned int nbits)
481485
{
482486
if (__builtin_constant_p(nbits) && nbits == 1)
483487
__clear_bit(start, map);
@@ -492,29 +496,32 @@ static __always_inline void bitmap_clear(unsigned long *map, unsigned int start,
492496
__bitmap_clear(map, start, nbits);
493497
}
494498

495-
static inline void bitmap_shift_right(unsigned long *dst, const unsigned long *src,
496-
unsigned int shift, unsigned int nbits)
499+
static __always_inline
500+
void bitmap_shift_right(unsigned long *dst, const unsigned long *src,
501+
unsigned int shift, unsigned int nbits)
497502
{
498503
if (small_const_nbits(nbits))
499504
*dst = (*src & BITMAP_LAST_WORD_MASK(nbits)) >> shift;
500505
else
501506
__bitmap_shift_right(dst, src, shift, nbits);
502507
}
503508

504-
static inline void bitmap_shift_left(unsigned long *dst, const unsigned long *src,
505-
unsigned int shift, unsigned int nbits)
509+
static __always_inline
510+
void bitmap_shift_left(unsigned long *dst, const unsigned long *src,
511+
unsigned int shift, unsigned int nbits)
506512
{
507513
if (small_const_nbits(nbits))
508514
*dst = (*src << shift) & BITMAP_LAST_WORD_MASK(nbits);
509515
else
510516
__bitmap_shift_left(dst, src, shift, nbits);
511517
}
512518

513-
static inline void bitmap_replace(unsigned long *dst,
514-
const unsigned long *old,
515-
const unsigned long *new,
516-
const unsigned long *mask,
517-
unsigned int nbits)
519+
static __always_inline
520+
void bitmap_replace(unsigned long *dst,
521+
const unsigned long *old,
522+
const unsigned long *new,
523+
const unsigned long *mask,
524+
unsigned int nbits)
518525
{
519526
if (small_const_nbits(nbits))
520527
*dst = (*old & ~(*mask)) | (*new & *mask);
@@ -557,8 +564,9 @@ static inline void bitmap_replace(unsigned long *dst,
557564
* bitmap_gather() can be seen as the 'reverse' bitmap_scatter() operation.
558565
* See bitmap_scatter() for details related to this relationship.
559566
*/
560-
static inline void bitmap_scatter(unsigned long *dst, const unsigned long *src,
561-
const unsigned long *mask, unsigned int nbits)
567+
static __always_inline
568+
void bitmap_scatter(unsigned long *dst, const unsigned long *src,
569+
const unsigned long *mask, unsigned int nbits)
562570
{
563571
unsigned int n = 0;
564572
unsigned int bit;
@@ -611,8 +619,9 @@ static inline void bitmap_scatter(unsigned long *dst, const unsigned long *src,
611619
* bitmap_scatter(res, src, mask, n) and a call to
612620
* bitmap_scatter(res, result, mask, n) will lead to the same res value.
613621
*/
614-
static inline void bitmap_gather(unsigned long *dst, const unsigned long *src,
615-
const unsigned long *mask, unsigned int nbits)
622+
static __always_inline
623+
void bitmap_gather(unsigned long *dst, const unsigned long *src,
624+
const unsigned long *mask, unsigned int nbits)
616625
{
617626
unsigned int n = 0;
618627
unsigned int bit;
@@ -623,9 +632,9 @@ static inline void bitmap_gather(unsigned long *dst, const unsigned long *src,
623632
__assign_bit(n++, dst, test_bit(bit, src));
624633
}
625634

626-
static inline void bitmap_next_set_region(unsigned long *bitmap,
627-
unsigned int *rs, unsigned int *re,
628-
unsigned int end)
635+
static __always_inline
636+
void bitmap_next_set_region(unsigned long *bitmap, unsigned int *rs,
637+
unsigned int *re, unsigned int end)
629638
{
630639
*rs = find_next_bit(bitmap, end, *rs);
631640
*re = find_next_zero_bit(bitmap, end, *rs + 1);
@@ -640,7 +649,8 @@ static inline void bitmap_next_set_region(unsigned long *bitmap,
640649
* This is the complement to __bitmap_find_free_region() and releases
641650
* the found region (by clearing it in the bitmap).
642651
*/
643-
static inline void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order)
652+
static __always_inline
653+
void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order)
644654
{
645655
bitmap_clear(bitmap, pos, BIT(order));
646656
}
@@ -656,7 +666,8 @@ static inline void bitmap_release_region(unsigned long *bitmap, unsigned int pos
656666
* Returns: 0 on success, or %-EBUSY if specified region wasn't
657667
* free (not all bits were zero).
658668
*/
659-
static inline int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order)
669+
static __always_inline
670+
int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order)
660671
{
661672
unsigned int len = BIT(order);
662673

@@ -680,7 +691,8 @@ static inline int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos
680691
* Returns: the bit offset in bitmap of the allocated region,
681692
* or -errno on failure.
682693
*/
683-
static inline int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order)
694+
static __always_inline
695+
int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order)
684696
{
685697
unsigned int pos, end; /* scans bitmap by regions of size order */
686698

@@ -734,7 +746,7 @@ static inline int bitmap_find_free_region(unsigned long *bitmap, unsigned int bi
734746
* That is ``(u32 *)(&val)[0]`` gets the upper 32 bits,
735747
* but we expect the lower 32-bits of u64.
736748
*/
737-
static inline void bitmap_from_u64(unsigned long *dst, u64 mask)
749+
static __always_inline void bitmap_from_u64(unsigned long *dst, u64 mask)
738750
{
739751
bitmap_from_arr64(dst, &mask, 64);
740752
}
@@ -749,9 +761,8 @@ static inline void bitmap_from_u64(unsigned long *dst, u64 mask)
749761
* @map memory region. For @nbits = 0 and @nbits > BITS_PER_LONG the return
750762
* value is undefined.
751763
*/
752-
static inline unsigned long bitmap_read(const unsigned long *map,
753-
unsigned long start,
754-
unsigned long nbits)
764+
static __always_inline
765+
unsigned long bitmap_read(const unsigned long *map, unsigned long start, unsigned long nbits)
755766
{
756767
size_t index = BIT_WORD(start);
757768
unsigned long offset = start % BITS_PER_LONG;
@@ -784,8 +795,9 @@ static inline unsigned long bitmap_read(const unsigned long *map,
784795
*
785796
* For @nbits == 0 and @nbits > BITS_PER_LONG no writes are performed.
786797
*/
787-
static inline void bitmap_write(unsigned long *map, unsigned long value,
788-
unsigned long start, unsigned long nbits)
798+
static __always_inline
799+
void bitmap_write(unsigned long *map, unsigned long value,
800+
unsigned long start, unsigned long nbits)
789801
{
790802
size_t index;
791803
unsigned long offset;

include/linux/bits.h

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,4 +36,19 @@
3636
#define GENMASK_ULL(h, l) \
3737
(GENMASK_INPUT_CHECK(h, l) + __GENMASK_ULL(h, l))
3838

39+
#if !defined(__ASSEMBLY__)
40+
/*
41+
* Missing asm support
42+
*
43+
* __GENMASK_U128() depends on _BIT128() which would not work
44+
* in the asm code, as it shifts an 'unsigned __init128' data
45+
* type instead of direct representation of 128 bit constants
46+
* such as long and unsigned long. The fundamental problem is
47+
* that a 128 bit constant will get silently truncated by the
48+
* gcc compiler.
49+
*/
50+
#define GENMASK_U128(h, l) \
51+
(GENMASK_INPUT_CHECK(h, l) + __GENMASK_U128(h, l))
52+
#endif
53+
3954
#endif /* __LINUX_BITS_H */

0 commit comments

Comments
 (0)