1 #ifndef _TOOLS_LINUX_BITOPS_H_ 2 #define _TOOLS_LINUX_BITOPS_H_ 3 4 #include <linux/kernel.h> 5 #include <linux/compiler.h> 6 #include <asm/hweight.h> 7 8 #ifndef __WORDSIZE 9 #define __WORDSIZE (__SIZEOF_LONG__ * 8) 10 #endif 11 12 #define BITS_PER_LONG __WORDSIZE 13 14 #define BIT_MASK(nr) (1UL << ((nr) % BITS_PER_LONG)) 15 #define BIT_WORD(nr) ((nr) / BITS_PER_LONG) 16 #define BITS_PER_BYTE 8 17 #define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long)) 18 #define BITS_TO_U64(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(u64)) 19 #define BITS_TO_U32(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(u32)) 20 #define BITS_TO_BYTES(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE) 21 22 /* 23 * Include this here because some architectures need generic_ffs/fls in 24 * scope 25 * 26 * XXX: this needs to be asm/bitops.h, when we get to per arch optimizations 27 */ 28 #include <asm-generic/bitops.h> 29 30 #define for_each_set_bit(bit, addr, size) \ 31 for ((bit) = find_first_bit((addr), (size)); \ 32 (bit) < (size); \ 33 (bit) = find_next_bit((addr), (size), (bit) + 1)) 34 35 /* same as for_each_set_bit() but use bit as value to start with */ 36 #define for_each_set_bit_from(bit, addr, size) \ 37 for ((bit) = find_next_bit((addr), (size), (bit)); \ 38 (bit) < (size); \ 39 (bit) = find_next_bit((addr), (size), (bit) + 1)) 40 41 static inline unsigned long hweight_long(unsigned long w) 42 { 43 return sizeof(w) == 4 ? hweight32(w) : hweight64(w); 44 } 45 46 static inline unsigned fls_long(unsigned long l) 47 { 48 if (sizeof(l) == 4) 49 return fls(l); 50 return fls64(l); 51 } 52 53 #endif 54