xref: /linux-6.15/include/linux/bitops.h (revision 1da177e4)
1 #ifndef _LINUX_BITOPS_H
2 #define _LINUX_BITOPS_H
3 #include <asm/types.h>
4 
5 /*
6  * ffs: find first bit set. This is defined the same way as
7  * the libc and compiler builtin ffs routines, therefore
8  * differs in spirit from the above ffz (man ffs).
9  */
10 
11 static inline int generic_ffs(int x)
12 {
13 	int r = 1;
14 
15 	if (!x)
16 		return 0;
17 	if (!(x & 0xffff)) {
18 		x >>= 16;
19 		r += 16;
20 	}
21 	if (!(x & 0xff)) {
22 		x >>= 8;
23 		r += 8;
24 	}
25 	if (!(x & 0xf)) {
26 		x >>= 4;
27 		r += 4;
28 	}
29 	if (!(x & 3)) {
30 		x >>= 2;
31 		r += 2;
32 	}
33 	if (!(x & 1)) {
34 		x >>= 1;
35 		r += 1;
36 	}
37 	return r;
38 }
39 
40 /*
41  * fls: find last bit set.
42  */
43 
44 static __inline__ int generic_fls(int x)
45 {
46 	int r = 32;
47 
48 	if (!x)
49 		return 0;
50 	if (!(x & 0xffff0000u)) {
51 		x <<= 16;
52 		r -= 16;
53 	}
54 	if (!(x & 0xff000000u)) {
55 		x <<= 8;
56 		r -= 8;
57 	}
58 	if (!(x & 0xf0000000u)) {
59 		x <<= 4;
60 		r -= 4;
61 	}
62 	if (!(x & 0xc0000000u)) {
63 		x <<= 2;
64 		r -= 2;
65 	}
66 	if (!(x & 0x80000000u)) {
67 		x <<= 1;
68 		r -= 1;
69 	}
70 	return r;
71 }
72 
73 /*
74  * Include this here because some architectures need generic_ffs/fls in
75  * scope
76  */
77 #include <asm/bitops.h>
78 
79 static __inline__ int get_bitmask_order(unsigned int count)
80 {
81 	int order;
82 
83 	order = fls(count);
84 	return order;	/* We could be slightly more clever with -1 here... */
85 }
86 
87 /*
88  * hweightN: returns the hamming weight (i.e. the number
89  * of bits set) of a N-bit word
90  */
91 
92 static inline unsigned int generic_hweight32(unsigned int w)
93 {
94         unsigned int res = (w & 0x55555555) + ((w >> 1) & 0x55555555);
95         res = (res & 0x33333333) + ((res >> 2) & 0x33333333);
96         res = (res & 0x0F0F0F0F) + ((res >> 4) & 0x0F0F0F0F);
97         res = (res & 0x00FF00FF) + ((res >> 8) & 0x00FF00FF);
98         return (res & 0x0000FFFF) + ((res >> 16) & 0x0000FFFF);
99 }
100 
101 static inline unsigned int generic_hweight16(unsigned int w)
102 {
103         unsigned int res = (w & 0x5555) + ((w >> 1) & 0x5555);
104         res = (res & 0x3333) + ((res >> 2) & 0x3333);
105         res = (res & 0x0F0F) + ((res >> 4) & 0x0F0F);
106         return (res & 0x00FF) + ((res >> 8) & 0x00FF);
107 }
108 
109 static inline unsigned int generic_hweight8(unsigned int w)
110 {
111         unsigned int res = (w & 0x55) + ((w >> 1) & 0x55);
112         res = (res & 0x33) + ((res >> 2) & 0x33);
113         return (res & 0x0F) + ((res >> 4) & 0x0F);
114 }
115 
116 static inline unsigned long generic_hweight64(__u64 w)
117 {
118 #if BITS_PER_LONG < 64
119 	return generic_hweight32((unsigned int)(w >> 32)) +
120 				generic_hweight32((unsigned int)w);
121 #else
122 	u64 res;
123 	res = (w & 0x5555555555555555ul) + ((w >> 1) & 0x5555555555555555ul);
124 	res = (res & 0x3333333333333333ul) + ((res >> 2) & 0x3333333333333333ul);
125 	res = (res & 0x0F0F0F0F0F0F0F0Ful) + ((res >> 4) & 0x0F0F0F0F0F0F0F0Ful);
126 	res = (res & 0x00FF00FF00FF00FFul) + ((res >> 8) & 0x00FF00FF00FF00FFul);
127 	res = (res & 0x0000FFFF0000FFFFul) + ((res >> 16) & 0x0000FFFF0000FFFFul);
128 	return (res & 0x00000000FFFFFFFFul) + ((res >> 32) & 0x00000000FFFFFFFFul);
129 #endif
130 }
131 
132 static inline unsigned long hweight_long(unsigned long w)
133 {
134 	return sizeof(w) == 4 ? generic_hweight32(w) : generic_hweight64(w);
135 }
136 
137 /*
138  * rol32 - rotate a 32-bit value left
139  *
140  * @word: value to rotate
141  * @shift: bits to roll
142  */
143 static inline __u32 rol32(__u32 word, unsigned int shift)
144 {
145 	return (word << shift) | (word >> (32 - shift));
146 }
147 
148 /*
149  * ror32 - rotate a 32-bit value right
150  *
151  * @word: value to rotate
152  * @shift: bits to roll
153  */
154 static inline __u32 ror32(__u32 word, unsigned int shift)
155 {
156 	return (word >> shift) | (word << (32 - shift));
157 }
158 
159 #endif
160