1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef __LINUX_BITMAP_H 3 #define __LINUX_BITMAP_H 4 5 #ifndef __ASSEMBLY__ 6 7 #include <linux/align.h> 8 #include <linux/bitops.h> 9 #include <linux/errno.h> 10 #include <linux/find.h> 11 #include <linux/limits.h> 12 #include <linux/string.h> 13 #include <linux/types.h> 14 #include <linux/bitmap-str.h> 15 16 struct device; 17 18 /* 19 * bitmaps provide bit arrays that consume one or more unsigned 20 * longs. The bitmap interface and available operations are listed 21 * here, in bitmap.h 22 * 23 * Function implementations generic to all architectures are in 24 * lib/bitmap.c. Functions implementations that are architecture 25 * specific are in various include/asm-<arch>/bitops.h headers 26 * and other arch/<arch> specific files. 27 * 28 * See lib/bitmap.c for more details. 29 */ 30 31 /** 32 * DOC: bitmap overview 33 * 34 * The available bitmap operations and their rough meaning in the 35 * case that the bitmap is a single unsigned long are thus: 36 * 37 * The generated code is more efficient when nbits is known at 38 * compile-time and at most BITS_PER_LONG. 39 * 40 * :: 41 * 42 * bitmap_zero(dst, nbits) *dst = 0UL 43 * bitmap_fill(dst, nbits) *dst = ~0UL 44 * bitmap_copy(dst, src, nbits) *dst = *src 45 * bitmap_and(dst, src1, src2, nbits) *dst = *src1 & *src2 46 * bitmap_or(dst, src1, src2, nbits) *dst = *src1 | *src2 47 * bitmap_xor(dst, src1, src2, nbits) *dst = *src1 ^ *src2 48 * bitmap_andnot(dst, src1, src2, nbits) *dst = *src1 & ~(*src2) 49 * bitmap_complement(dst, src, nbits) *dst = ~(*src) 50 * bitmap_equal(src1, src2, nbits) Are *src1 and *src2 equal? 51 * bitmap_intersects(src1, src2, nbits) Do *src1 and *src2 overlap? 52 * bitmap_subset(src1, src2, nbits) Is *src1 a subset of *src2? 53 * bitmap_empty(src, nbits) Are all bits zero in *src? 54 * bitmap_full(src, nbits) Are all bits set in *src? 55 * bitmap_weight(src, nbits) Hamming Weight: number set bits 56 * bitmap_weight_and(src1, src2, nbits) Hamming Weight of and'ed bitmap 57 * bitmap_weight_andnot(src1, src2, nbits) Hamming Weight of andnot'ed bitmap 58 * bitmap_set(dst, pos, nbits) Set specified bit area 59 * bitmap_clear(dst, pos, nbits) Clear specified bit area 60 * bitmap_find_next_zero_area(buf, len, pos, n, mask) Find bit free area 61 * bitmap_find_next_zero_area_off(buf, len, pos, n, mask, mask_off) as above 62 * bitmap_shift_right(dst, src, n, nbits) *dst = *src >> n 63 * bitmap_shift_left(dst, src, n, nbits) *dst = *src << n 64 * bitmap_cut(dst, src, first, n, nbits) Cut n bits from first, copy rest 65 * bitmap_replace(dst, old, new, mask, nbits) *dst = (*old & ~(*mask)) | (*new & *mask) 66 * bitmap_scatter(dst, src, mask, nbits) *dst = map(dense, sparse)(src) 67 * bitmap_gather(dst, src, mask, nbits) *dst = map(sparse, dense)(src) 68 * bitmap_remap(dst, src, old, new, nbits) *dst = map(old, new)(src) 69 * bitmap_bitremap(oldbit, old, new, nbits) newbit = map(old, new)(oldbit) 70 * bitmap_onto(dst, orig, relmap, nbits) *dst = orig relative to relmap 71 * bitmap_fold(dst, orig, sz, nbits) dst bits = orig bits mod sz 72 * bitmap_parse(buf, buflen, dst, nbits) Parse bitmap dst from kernel buf 73 * bitmap_parse_user(ubuf, ulen, dst, nbits) Parse bitmap dst from user buf 74 * bitmap_parselist(buf, dst, nbits) Parse bitmap dst from kernel buf 75 * bitmap_parselist_user(buf, dst, nbits) Parse bitmap dst from user buf 76 * bitmap_find_free_region(bitmap, bits, order) Find and allocate bit region 77 * bitmap_release_region(bitmap, pos, order) Free specified bit region 78 * bitmap_allocate_region(bitmap, pos, order) Allocate specified bit region 79 * bitmap_from_arr32(dst, buf, nbits) Copy nbits from u32[] buf to dst 80 * bitmap_from_arr64(dst, buf, nbits) Copy nbits from u64[] buf to dst 81 * bitmap_to_arr32(buf, src, nbits) Copy nbits from buf to u32[] dst 82 * bitmap_to_arr64(buf, src, nbits) Copy nbits from buf to u64[] dst 83 * bitmap_get_value8(map, start) Get 8bit value from map at start 84 * bitmap_set_value8(map, value, start) Set 8bit value to map at start 85 * 86 * Note, bitmap_zero() and bitmap_fill() operate over the region of 87 * unsigned longs, that is, bits behind bitmap till the unsigned long 88 * boundary will be zeroed or filled as well. Consider to use 89 * bitmap_clear() or bitmap_set() to make explicit zeroing or filling 90 * respectively. 91 */ 92 93 /** 94 * DOC: bitmap bitops 95 * 96 * Also the following operations in asm/bitops.h apply to bitmaps.:: 97 * 98 * set_bit(bit, addr) *addr |= bit 99 * clear_bit(bit, addr) *addr &= ~bit 100 * change_bit(bit, addr) *addr ^= bit 101 * test_bit(bit, addr) Is bit set in *addr? 102 * test_and_set_bit(bit, addr) Set bit and return old value 103 * test_and_clear_bit(bit, addr) Clear bit and return old value 104 * test_and_change_bit(bit, addr) Change bit and return old value 105 * find_first_zero_bit(addr, nbits) Position first zero bit in *addr 106 * find_first_bit(addr, nbits) Position first set bit in *addr 107 * find_next_zero_bit(addr, nbits, bit) 108 * Position next zero bit in *addr >= bit 109 * find_next_bit(addr, nbits, bit) Position next set bit in *addr >= bit 110 * find_next_and_bit(addr1, addr2, nbits, bit) 111 * Same as find_next_bit, but in 112 * (*addr1 & *addr2) 113 * 114 */ 115 116 /** 117 * DOC: declare bitmap 118 * The DECLARE_BITMAP(name,bits) macro, in linux/types.h, can be used 119 * to declare an array named 'name' of just enough unsigned longs to 120 * contain all bit positions from 0 to 'bits' - 1. 121 */ 122 123 /* 124 * Allocation and deallocation of bitmap. 125 * Provided in lib/bitmap.c to avoid circular dependency. 126 */ 127 unsigned long *bitmap_alloc(unsigned int nbits, gfp_t flags); 128 unsigned long *bitmap_zalloc(unsigned int nbits, gfp_t flags); 129 unsigned long *bitmap_alloc_node(unsigned int nbits, gfp_t flags, int node); 130 unsigned long *bitmap_zalloc_node(unsigned int nbits, gfp_t flags, int node); 131 void bitmap_free(const unsigned long *bitmap); 132 133 /* Managed variants of the above. */ 134 unsigned long *devm_bitmap_alloc(struct device *dev, 135 unsigned int nbits, gfp_t flags); 136 unsigned long *devm_bitmap_zalloc(struct device *dev, 137 unsigned int nbits, gfp_t flags); 138 139 /* 140 * lib/bitmap.c provides these functions: 141 */ 142 143 bool __bitmap_equal(const unsigned long *bitmap1, 144 const unsigned long *bitmap2, unsigned int nbits); 145 bool __pure __bitmap_or_equal(const unsigned long *src1, 146 const unsigned long *src2, 147 const unsigned long *src3, 148 unsigned int nbits); 149 void __bitmap_complement(unsigned long *dst, const unsigned long *src, 150 unsigned int nbits); 151 void __bitmap_shift_right(unsigned long *dst, const unsigned long *src, 152 unsigned int shift, unsigned int nbits); 153 void __bitmap_shift_left(unsigned long *dst, const unsigned long *src, 154 unsigned int shift, unsigned int nbits); 155 void bitmap_cut(unsigned long *dst, const unsigned long *src, 156 unsigned int first, unsigned int cut, unsigned int nbits); 157 bool __bitmap_and(unsigned long *dst, const unsigned long *bitmap1, 158 const unsigned long *bitmap2, unsigned int nbits); 159 void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1, 160 const unsigned long *bitmap2, unsigned int nbits); 161 void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1, 162 const unsigned long *bitmap2, unsigned int nbits); 163 bool __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1, 164 const unsigned long *bitmap2, unsigned int nbits); 165 void __bitmap_replace(unsigned long *dst, 166 const unsigned long *old, const unsigned long *new, 167 const unsigned long *mask, unsigned int nbits); 168 bool __bitmap_intersects(const unsigned long *bitmap1, 169 const unsigned long *bitmap2, unsigned int nbits); 170 bool __bitmap_subset(const unsigned long *bitmap1, 171 const unsigned long *bitmap2, unsigned int nbits); 172 unsigned int __bitmap_weight(const unsigned long *bitmap, unsigned int nbits); 173 unsigned int __bitmap_weight_and(const unsigned long *bitmap1, 174 const unsigned long *bitmap2, unsigned int nbits); 175 unsigned int __bitmap_weight_andnot(const unsigned long *bitmap1, 176 const unsigned long *bitmap2, unsigned int nbits); 177 void __bitmap_set(unsigned long *map, unsigned int start, int len); 178 void __bitmap_clear(unsigned long *map, unsigned int start, int len); 179 180 unsigned long bitmap_find_next_zero_area_off(unsigned long *map, 181 unsigned long size, 182 unsigned long start, 183 unsigned int nr, 184 unsigned long align_mask, 185 unsigned long align_offset); 186 187 /** 188 * bitmap_find_next_zero_area - find a contiguous aligned zero area 189 * @map: The address to base the search on 190 * @size: The bitmap size in bits 191 * @start: The bitnumber to start searching at 192 * @nr: The number of zeroed bits we're looking for 193 * @align_mask: Alignment mask for zero area 194 * 195 * The @align_mask should be one less than a power of 2; the effect is that 196 * the bit offset of all zero areas this function finds is multiples of that 197 * power of 2. A @align_mask of 0 means no alignment is required. 198 */ 199 static inline unsigned long 200 bitmap_find_next_zero_area(unsigned long *map, 201 unsigned long size, 202 unsigned long start, 203 unsigned int nr, 204 unsigned long align_mask) 205 { 206 return bitmap_find_next_zero_area_off(map, size, start, nr, 207 align_mask, 0); 208 } 209 210 void bitmap_remap(unsigned long *dst, const unsigned long *src, 211 const unsigned long *old, const unsigned long *new, unsigned int nbits); 212 int bitmap_bitremap(int oldbit, 213 const unsigned long *old, const unsigned long *new, int bits); 214 void bitmap_onto(unsigned long *dst, const unsigned long *orig, 215 const unsigned long *relmap, unsigned int bits); 216 void bitmap_fold(unsigned long *dst, const unsigned long *orig, 217 unsigned int sz, unsigned int nbits); 218 219 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1))) 220 #define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1))) 221 222 static inline void bitmap_zero(unsigned long *dst, unsigned int nbits) 223 { 224 unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); 225 226 if (small_const_nbits(nbits)) 227 *dst = 0; 228 else 229 memset(dst, 0, len); 230 } 231 232 static inline void bitmap_fill(unsigned long *dst, unsigned int nbits) 233 { 234 unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); 235 236 if (small_const_nbits(nbits)) 237 *dst = ~0UL; 238 else 239 memset(dst, 0xff, len); 240 } 241 242 static inline void bitmap_copy(unsigned long *dst, const unsigned long *src, 243 unsigned int nbits) 244 { 245 unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long); 246 247 if (small_const_nbits(nbits)) 248 *dst = *src; 249 else 250 memcpy(dst, src, len); 251 } 252 253 /* 254 * Copy bitmap and clear tail bits in last word. 255 */ 256 static inline void bitmap_copy_clear_tail(unsigned long *dst, 257 const unsigned long *src, unsigned int nbits) 258 { 259 bitmap_copy(dst, src, nbits); 260 if (nbits % BITS_PER_LONG) 261 dst[nbits / BITS_PER_LONG] &= BITMAP_LAST_WORD_MASK(nbits); 262 } 263 264 /* 265 * On 32-bit systems bitmaps are represented as u32 arrays internally. On LE64 266 * machines the order of hi and lo parts of numbers match the bitmap structure. 267 * In both cases conversion is not needed when copying data from/to arrays of 268 * u32. But in LE64 case, typecast in bitmap_copy_clear_tail() may lead 269 * to out-of-bound access. To avoid that, both LE and BE variants of 64-bit 270 * architectures are not using bitmap_copy_clear_tail(). 271 */ 272 #if BITS_PER_LONG == 64 273 void bitmap_from_arr32(unsigned long *bitmap, const u32 *buf, 274 unsigned int nbits); 275 void bitmap_to_arr32(u32 *buf, const unsigned long *bitmap, 276 unsigned int nbits); 277 #else 278 #define bitmap_from_arr32(bitmap, buf, nbits) \ 279 bitmap_copy_clear_tail((unsigned long *) (bitmap), \ 280 (const unsigned long *) (buf), (nbits)) 281 #define bitmap_to_arr32(buf, bitmap, nbits) \ 282 bitmap_copy_clear_tail((unsigned long *) (buf), \ 283 (const unsigned long *) (bitmap), (nbits)) 284 #endif 285 286 /* 287 * On 64-bit systems bitmaps are represented as u64 arrays internally. So, 288 * the conversion is not needed when copying data from/to arrays of u64. 289 */ 290 #if BITS_PER_LONG == 32 291 void bitmap_from_arr64(unsigned long *bitmap, const u64 *buf, unsigned int nbits); 292 void bitmap_to_arr64(u64 *buf, const unsigned long *bitmap, unsigned int nbits); 293 #else 294 #define bitmap_from_arr64(bitmap, buf, nbits) \ 295 bitmap_copy_clear_tail((unsigned long *)(bitmap), (const unsigned long *)(buf), (nbits)) 296 #define bitmap_to_arr64(buf, bitmap, nbits) \ 297 bitmap_copy_clear_tail((unsigned long *)(buf), (const unsigned long *)(bitmap), (nbits)) 298 #endif 299 300 static inline bool bitmap_and(unsigned long *dst, const unsigned long *src1, 301 const unsigned long *src2, unsigned int nbits) 302 { 303 if (small_const_nbits(nbits)) 304 return (*dst = *src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)) != 0; 305 return __bitmap_and(dst, src1, src2, nbits); 306 } 307 308 static inline void bitmap_or(unsigned long *dst, const unsigned long *src1, 309 const unsigned long *src2, unsigned int nbits) 310 { 311 if (small_const_nbits(nbits)) 312 *dst = *src1 | *src2; 313 else 314 __bitmap_or(dst, src1, src2, nbits); 315 } 316 317 static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1, 318 const unsigned long *src2, unsigned int nbits) 319 { 320 if (small_const_nbits(nbits)) 321 *dst = *src1 ^ *src2; 322 else 323 __bitmap_xor(dst, src1, src2, nbits); 324 } 325 326 static inline bool bitmap_andnot(unsigned long *dst, const unsigned long *src1, 327 const unsigned long *src2, unsigned int nbits) 328 { 329 if (small_const_nbits(nbits)) 330 return (*dst = *src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0; 331 return __bitmap_andnot(dst, src1, src2, nbits); 332 } 333 334 static inline void bitmap_complement(unsigned long *dst, const unsigned long *src, 335 unsigned int nbits) 336 { 337 if (small_const_nbits(nbits)) 338 *dst = ~(*src); 339 else 340 __bitmap_complement(dst, src, nbits); 341 } 342 343 #ifdef __LITTLE_ENDIAN 344 #define BITMAP_MEM_ALIGNMENT 8 345 #else 346 #define BITMAP_MEM_ALIGNMENT (8 * sizeof(unsigned long)) 347 #endif 348 #define BITMAP_MEM_MASK (BITMAP_MEM_ALIGNMENT - 1) 349 350 static inline bool bitmap_equal(const unsigned long *src1, 351 const unsigned long *src2, unsigned int nbits) 352 { 353 if (small_const_nbits(nbits)) 354 return !((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits)); 355 if (__builtin_constant_p(nbits & BITMAP_MEM_MASK) && 356 IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) 357 return !memcmp(src1, src2, nbits / 8); 358 return __bitmap_equal(src1, src2, nbits); 359 } 360 361 /** 362 * bitmap_or_equal - Check whether the or of two bitmaps is equal to a third 363 * @src1: Pointer to bitmap 1 364 * @src2: Pointer to bitmap 2 will be or'ed with bitmap 1 365 * @src3: Pointer to bitmap 3. Compare to the result of *@src1 | *@src2 366 * @nbits: number of bits in each of these bitmaps 367 * 368 * Returns: True if (*@src1 | *@src2) == *@src3, false otherwise 369 */ 370 static inline bool bitmap_or_equal(const unsigned long *src1, 371 const unsigned long *src2, 372 const unsigned long *src3, 373 unsigned int nbits) 374 { 375 if (!small_const_nbits(nbits)) 376 return __bitmap_or_equal(src1, src2, src3, nbits); 377 378 return !(((*src1 | *src2) ^ *src3) & BITMAP_LAST_WORD_MASK(nbits)); 379 } 380 381 static inline bool bitmap_intersects(const unsigned long *src1, 382 const unsigned long *src2, 383 unsigned int nbits) 384 { 385 if (small_const_nbits(nbits)) 386 return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0; 387 else 388 return __bitmap_intersects(src1, src2, nbits); 389 } 390 391 static inline bool bitmap_subset(const unsigned long *src1, 392 const unsigned long *src2, unsigned int nbits) 393 { 394 if (small_const_nbits(nbits)) 395 return ! ((*src1 & ~(*src2)) & BITMAP_LAST_WORD_MASK(nbits)); 396 else 397 return __bitmap_subset(src1, src2, nbits); 398 } 399 400 static inline bool bitmap_empty(const unsigned long *src, unsigned nbits) 401 { 402 if (small_const_nbits(nbits)) 403 return ! (*src & BITMAP_LAST_WORD_MASK(nbits)); 404 405 return find_first_bit(src, nbits) == nbits; 406 } 407 408 static inline bool bitmap_full(const unsigned long *src, unsigned int nbits) 409 { 410 if (small_const_nbits(nbits)) 411 return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits)); 412 413 return find_first_zero_bit(src, nbits) == nbits; 414 } 415 416 static __always_inline 417 unsigned int bitmap_weight(const unsigned long *src, unsigned int nbits) 418 { 419 if (small_const_nbits(nbits)) 420 return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits)); 421 return __bitmap_weight(src, nbits); 422 } 423 424 static __always_inline 425 unsigned long bitmap_weight_and(const unsigned long *src1, 426 const unsigned long *src2, unsigned int nbits) 427 { 428 if (small_const_nbits(nbits)) 429 return hweight_long(*src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)); 430 return __bitmap_weight_and(src1, src2, nbits); 431 } 432 433 static __always_inline 434 unsigned long bitmap_weight_andnot(const unsigned long *src1, 435 const unsigned long *src2, unsigned int nbits) 436 { 437 if (small_const_nbits(nbits)) 438 return hweight_long(*src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits)); 439 return __bitmap_weight_andnot(src1, src2, nbits); 440 } 441 442 static __always_inline void bitmap_set(unsigned long *map, unsigned int start, 443 unsigned int nbits) 444 { 445 if (__builtin_constant_p(nbits) && nbits == 1) 446 __set_bit(start, map); 447 else if (small_const_nbits(start + nbits)) 448 *map |= GENMASK(start + nbits - 1, start); 449 else if (__builtin_constant_p(start & BITMAP_MEM_MASK) && 450 IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && 451 __builtin_constant_p(nbits & BITMAP_MEM_MASK) && 452 IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) 453 memset((char *)map + start / 8, 0xff, nbits / 8); 454 else 455 __bitmap_set(map, start, nbits); 456 } 457 458 static __always_inline void bitmap_clear(unsigned long *map, unsigned int start, 459 unsigned int nbits) 460 { 461 if (__builtin_constant_p(nbits) && nbits == 1) 462 __clear_bit(start, map); 463 else if (small_const_nbits(start + nbits)) 464 *map &= ~GENMASK(start + nbits - 1, start); 465 else if (__builtin_constant_p(start & BITMAP_MEM_MASK) && 466 IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) && 467 __builtin_constant_p(nbits & BITMAP_MEM_MASK) && 468 IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT)) 469 memset((char *)map + start / 8, 0, nbits / 8); 470 else 471 __bitmap_clear(map, start, nbits); 472 } 473 474 static inline void bitmap_shift_right(unsigned long *dst, const unsigned long *src, 475 unsigned int shift, unsigned int nbits) 476 { 477 if (small_const_nbits(nbits)) 478 *dst = (*src & BITMAP_LAST_WORD_MASK(nbits)) >> shift; 479 else 480 __bitmap_shift_right(dst, src, shift, nbits); 481 } 482 483 static inline void bitmap_shift_left(unsigned long *dst, const unsigned long *src, 484 unsigned int shift, unsigned int nbits) 485 { 486 if (small_const_nbits(nbits)) 487 *dst = (*src << shift) & BITMAP_LAST_WORD_MASK(nbits); 488 else 489 __bitmap_shift_left(dst, src, shift, nbits); 490 } 491 492 static inline void bitmap_replace(unsigned long *dst, 493 const unsigned long *old, 494 const unsigned long *new, 495 const unsigned long *mask, 496 unsigned int nbits) 497 { 498 if (small_const_nbits(nbits)) 499 *dst = (*old & ~(*mask)) | (*new & *mask); 500 else 501 __bitmap_replace(dst, old, new, mask, nbits); 502 } 503 504 /** 505 * bitmap_scatter - Scatter a bitmap according to the given mask 506 * @dst: scattered bitmap 507 * @src: gathered bitmap 508 * @mask: mask representing bits to assign to in the scattered bitmap 509 * @nbits: number of bits in each of these bitmaps 510 * 511 * Scatters bitmap with sequential bits according to the given @mask. 512 * 513 * Example: 514 * If @src bitmap = 0x005a, with @mask = 0x1313, @dst will be 0x0302. 515 * 516 * Or in binary form 517 * @src @mask @dst 518 * 0000000001011010 0001001100010011 0000001100000010 519 * 520 * (Bits 0, 1, 2, 3, 4, 5 are copied to the bits 0, 1, 4, 8, 9, 12) 521 * 522 * A more 'visual' description of the operation: 523 * src: 0000000001011010 524 * |||||| 525 * +------+||||| 526 * | +----+|||| 527 * | |+----+||| 528 * | || +-+|| 529 * | || | || 530 * mask: ...v..vv...v..vv 531 * ...0..11...0..10 532 * dst: 0000001100000010 533 * 534 * A relationship exists between bitmap_scatter() and bitmap_gather(). 535 * bitmap_gather() can be seen as the 'reverse' bitmap_scatter() operation. 536 * See bitmap_scatter() for details related to this relationship. 537 */ 538 static inline void bitmap_scatter(unsigned long *dst, const unsigned long *src, 539 const unsigned long *mask, unsigned int nbits) 540 { 541 unsigned int n = 0; 542 unsigned int bit; 543 544 bitmap_zero(dst, nbits); 545 546 for_each_set_bit(bit, mask, nbits) 547 __assign_bit(bit, dst, test_bit(n++, src)); 548 } 549 550 /** 551 * bitmap_gather - Gather a bitmap according to given mask 552 * @dst: gathered bitmap 553 * @src: scattered bitmap 554 * @mask: mask representing bits to extract from in the scattered bitmap 555 * @nbits: number of bits in each of these bitmaps 556 * 557 * Gathers bitmap with sparse bits according to the given @mask. 558 * 559 * Example: 560 * If @src bitmap = 0x0302, with @mask = 0x1313, @dst will be 0x001a. 561 * 562 * Or in binary form 563 * @src @mask @dst 564 * 0000001100000010 0001001100010011 0000000000011010 565 * 566 * (Bits 0, 1, 4, 8, 9, 12 are copied to the bits 0, 1, 2, 3, 4, 5) 567 * 568 * A more 'visual' description of the operation: 569 * mask: ...v..vv...v..vv 570 * src: 0000001100000010 571 * ^ ^^ ^ 0 572 * | || | 10 573 * | || > 010 574 * | |+--> 1010 575 * | +--> 11010 576 * +----> 011010 577 * dst: 0000000000011010 578 * 579 * A relationship exists between bitmap_gather() and bitmap_scatter(). See 580 * bitmap_scatter() for the bitmap scatter detailed operations. 581 * Suppose scattered computed using bitmap_scatter(scattered, src, mask, n). 582 * The operation bitmap_gather(result, scattered, mask, n) leads to a result 583 * equal or equivalent to src. 584 * 585 * The result can be 'equivalent' because bitmap_scatter() and bitmap_gather() 586 * are not bijective. 587 * The result and src values are equivalent in that sense that a call to 588 * bitmap_scatter(res, src, mask, n) and a call to 589 * bitmap_scatter(res, result, mask, n) will lead to the same res value. 590 */ 591 static inline void bitmap_gather(unsigned long *dst, const unsigned long *src, 592 const unsigned long *mask, unsigned int nbits) 593 { 594 unsigned int n = 0; 595 unsigned int bit; 596 597 bitmap_zero(dst, nbits); 598 599 for_each_set_bit(bit, mask, nbits) 600 __assign_bit(n++, dst, test_bit(bit, src)); 601 } 602 603 static inline void bitmap_next_set_region(unsigned long *bitmap, 604 unsigned int *rs, unsigned int *re, 605 unsigned int end) 606 { 607 *rs = find_next_bit(bitmap, end, *rs); 608 *re = find_next_zero_bit(bitmap, end, *rs + 1); 609 } 610 611 /** 612 * bitmap_release_region - release allocated bitmap region 613 * @bitmap: array of unsigned longs corresponding to the bitmap 614 * @pos: beginning of bit region to release 615 * @order: region size (log base 2 of number of bits) to release 616 * 617 * This is the complement to __bitmap_find_free_region() and releases 618 * the found region (by clearing it in the bitmap). 619 */ 620 static inline void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order) 621 { 622 bitmap_clear(bitmap, pos, BIT(order)); 623 } 624 625 /** 626 * bitmap_allocate_region - allocate bitmap region 627 * @bitmap: array of unsigned longs corresponding to the bitmap 628 * @pos: beginning of bit region to allocate 629 * @order: region size (log base 2 of number of bits) to allocate 630 * 631 * Allocate (set bits in) a specified region of a bitmap. 632 * 633 * Returns: 0 on success, or %-EBUSY if specified region wasn't 634 * free (not all bits were zero). 635 */ 636 static inline int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order) 637 { 638 unsigned int len = BIT(order); 639 640 if (find_next_bit(bitmap, pos + len, pos) < pos + len) 641 return -EBUSY; 642 bitmap_set(bitmap, pos, len); 643 return 0; 644 } 645 646 /** 647 * bitmap_find_free_region - find a contiguous aligned mem region 648 * @bitmap: array of unsigned longs corresponding to the bitmap 649 * @bits: number of bits in the bitmap 650 * @order: region size (log base 2 of number of bits) to find 651 * 652 * Find a region of free (zero) bits in a @bitmap of @bits bits and 653 * allocate them (set them to one). Only consider regions of length 654 * a power (@order) of two, aligned to that power of two, which 655 * makes the search algorithm much faster. 656 * 657 * Returns: the bit offset in bitmap of the allocated region, 658 * or -errno on failure. 659 */ 660 static inline int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order) 661 { 662 unsigned int pos, end; /* scans bitmap by regions of size order */ 663 664 for (pos = 0; (end = pos + BIT(order)) <= bits; pos = end) { 665 if (!bitmap_allocate_region(bitmap, pos, order)) 666 return pos; 667 } 668 return -ENOMEM; 669 } 670 671 /** 672 * BITMAP_FROM_U64() - Represent u64 value in the format suitable for bitmap. 673 * @n: u64 value 674 * 675 * Linux bitmaps are internally arrays of unsigned longs, i.e. 32-bit 676 * integers in 32-bit environment, and 64-bit integers in 64-bit one. 677 * 678 * There are four combinations of endianness and length of the word in linux 679 * ABIs: LE64, BE64, LE32 and BE32. 680 * 681 * On 64-bit kernels 64-bit LE and BE numbers are naturally ordered in 682 * bitmaps and therefore don't require any special handling. 683 * 684 * On 32-bit kernels 32-bit LE ABI orders lo word of 64-bit number in memory 685 * prior to hi, and 32-bit BE orders hi word prior to lo. The bitmap on the 686 * other hand is represented as an array of 32-bit words and the position of 687 * bit N may therefore be calculated as: word #(N/32) and bit #(N%32) in that 688 * word. For example, bit #42 is located at 10th position of 2nd word. 689 * It matches 32-bit LE ABI, and we can simply let the compiler store 64-bit 690 * values in memory as it usually does. But for BE we need to swap hi and lo 691 * words manually. 692 * 693 * With all that, the macro BITMAP_FROM_U64() does explicit reordering of hi and 694 * lo parts of u64. For LE32 it does nothing, and for BE environment it swaps 695 * hi and lo words, as is expected by bitmap. 696 */ 697 #if __BITS_PER_LONG == 64 698 #define BITMAP_FROM_U64(n) (n) 699 #else 700 #define BITMAP_FROM_U64(n) ((unsigned long) ((u64)(n) & ULONG_MAX)), \ 701 ((unsigned long) ((u64)(n) >> 32)) 702 #endif 703 704 /** 705 * bitmap_from_u64 - Check and swap words within u64. 706 * @mask: source bitmap 707 * @dst: destination bitmap 708 * 709 * In 32-bit Big Endian kernel, when using ``(u32 *)(&val)[*]`` 710 * to read u64 mask, we will get the wrong word. 711 * That is ``(u32 *)(&val)[0]`` gets the upper 32 bits, 712 * but we expect the lower 32-bits of u64. 713 */ 714 static inline void bitmap_from_u64(unsigned long *dst, u64 mask) 715 { 716 bitmap_from_arr64(dst, &mask, 64); 717 } 718 719 /** 720 * bitmap_get_value8 - get an 8-bit value within a memory region 721 * @map: address to the bitmap memory region 722 * @start: bit offset of the 8-bit value; must be a multiple of 8 723 * 724 * Returns the 8-bit value located at the @start bit offset within the @src 725 * memory region. 726 */ 727 static inline unsigned long bitmap_get_value8(const unsigned long *map, 728 unsigned long start) 729 { 730 const size_t index = BIT_WORD(start); 731 const unsigned long offset = start % BITS_PER_LONG; 732 733 return (map[index] >> offset) & 0xFF; 734 } 735 736 /** 737 * bitmap_set_value8 - set an 8-bit value within a memory region 738 * @map: address to the bitmap memory region 739 * @value: the 8-bit value; values wider than 8 bits may clobber bitmap 740 * @start: bit offset of the 8-bit value; must be a multiple of 8 741 */ 742 static inline void bitmap_set_value8(unsigned long *map, unsigned long value, 743 unsigned long start) 744 { 745 const size_t index = BIT_WORD(start); 746 const unsigned long offset = start % BITS_PER_LONG; 747 748 map[index] &= ~(0xFFUL << offset); 749 map[index] |= value << offset; 750 } 751 752 #endif /* __ASSEMBLY__ */ 753 754 #endif /* __LINUX_BITMAP_H */ 755