1 /*-
2 * Copyright (c) 2010 Isilon Systems, Inc.
3 * Copyright (c) 2010 iX Systems, Inc.
4 * Copyright (c) 2010 Panasas, Inc.
5 * Copyright (c) 2013-2017 Mellanox Technologies, Ltd.
6 * All rights reserved.
7 *
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
10 * are met:
11 * 1. Redistributions of source code must retain the above copyright
12 * notice unmodified, this list of conditions, and the following
13 * disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright
15 * notice, this list of conditions and the following disclaimer in the
16 * documentation and/or other materials provided with the distribution.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 *
29 * $FreeBSD$
30 */
31 #ifndef _LINUX_BITOPS_H_
32 #define _LINUX_BITOPS_H_
33
34 #include <sys/param.h>
35 #include <sys/types.h>
36 #include <sys/systm.h>
37 #include <sys/errno.h>
38 #include <sys/libkern.h>
39
40 #define BIT(nr) (1UL << (nr))
41 #define BIT_ULL(nr) (1ULL << (nr))
42 #ifdef __LP64__
43 #define BITS_PER_LONG 64
44 #else
45 #define BITS_PER_LONG 32
46 #endif
47
48 #define BITS_PER_LONG_LONG 64
49
50 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG))
51 #define BITMAP_LAST_WORD_MASK(n) (~0UL >> (BITS_PER_LONG - (n)))
52 #define BITS_TO_LONGS(n) howmany((n), BITS_PER_LONG)
53 #define BIT_MASK(nr) (1UL << ((nr) & (BITS_PER_LONG - 1)))
54 #define BIT_WORD(nr) ((nr) / BITS_PER_LONG)
55 #define GENMASK(h, l) (((~0UL) >> (BITS_PER_LONG - (h) - 1)) & ((~0UL) << (l)))
56 #define GENMASK_ULL(h, l) (((~0ULL) >> (BITS_PER_LONG_LONG - (h) - 1)) & ((~0ULL) << (l)))
57 #define BITS_PER_BYTE 8
58 #define BITS_PER_TYPE(t) (sizeof(t) * BITS_PER_BYTE)
59
60 #define hweight8(x) bitcount((uint8_t)(x))
61 #define hweight16(x) bitcount16(x)
62 #define hweight32(x) bitcount32(x)
63 #define hweight64(x) bitcount64(x)
64 #define hweight_long(x) bitcountl(x)
65
66 static inline int
__ffs(int mask)67 __ffs(int mask)
68 {
69 return (ffs(mask) - 1);
70 }
71
72 static inline int
__fls(int mask)73 __fls(int mask)
74 {
75 return (fls(mask) - 1);
76 }
77
78 static inline int
__ffsl(long mask)79 __ffsl(long mask)
80 {
81 return (ffsl(mask) - 1);
82 }
83
84 static inline int
__flsl(long mask)85 __flsl(long mask)
86 {
87 return (flsl(mask) - 1);
88 }
89
90 static inline int
fls64(uint64_t mask)91 fls64(uint64_t mask)
92 {
93 return (flsll(mask));
94 }
95
96 static inline uint32_t
ror32(uint32_t word,unsigned int shift)97 ror32(uint32_t word, unsigned int shift)
98 {
99 return ((word >> shift) | (word << (32 - shift)));
100 }
101
102 #define ffz(mask) __ffs(~(mask))
103
get_count_order(unsigned int count)104 static inline int get_count_order(unsigned int count)
105 {
106 int order;
107
108 order = fls(count) - 1;
109 if (count & (count - 1))
110 order++;
111 return order;
112 }
113
114 static inline unsigned long
find_first_bit(const unsigned long * addr,unsigned long size)115 find_first_bit(const unsigned long *addr, unsigned long size)
116 {
117 long mask;
118 int bit;
119
120 for (bit = 0; size >= BITS_PER_LONG;
121 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
122 if (*addr == 0)
123 continue;
124 return (bit + __ffsl(*addr));
125 }
126 if (size) {
127 mask = (*addr) & BITMAP_LAST_WORD_MASK(size);
128 if (mask)
129 bit += __ffsl(mask);
130 else
131 bit += size;
132 }
133 return (bit);
134 }
135
136 static inline unsigned long
find_first_zero_bit(const unsigned long * addr,unsigned long size)137 find_first_zero_bit(const unsigned long *addr, unsigned long size)
138 {
139 long mask;
140 int bit;
141
142 for (bit = 0; size >= BITS_PER_LONG;
143 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
144 if (~(*addr) == 0)
145 continue;
146 return (bit + __ffsl(~(*addr)));
147 }
148 if (size) {
149 mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size);
150 if (mask)
151 bit += __ffsl(mask);
152 else
153 bit += size;
154 }
155 return (bit);
156 }
157
158 static inline unsigned long
find_last_bit(const unsigned long * addr,unsigned long size)159 find_last_bit(const unsigned long *addr, unsigned long size)
160 {
161 long mask;
162 int offs;
163 int bit;
164 int pos;
165
166 pos = size / BITS_PER_LONG;
167 offs = size % BITS_PER_LONG;
168 bit = BITS_PER_LONG * pos;
169 addr += pos;
170 if (offs) {
171 mask = (*addr) & BITMAP_LAST_WORD_MASK(offs);
172 if (mask)
173 return (bit + __flsl(mask));
174 }
175 while (pos--) {
176 addr--;
177 bit -= BITS_PER_LONG;
178 if (*addr)
179 return (bit + __flsl(*addr));
180 }
181 return (size);
182 }
183
184 static inline unsigned long
find_next_bit(const unsigned long * addr,unsigned long size,unsigned long offset)185 find_next_bit(const unsigned long *addr, unsigned long size, unsigned long offset)
186 {
187 long mask;
188 int offs;
189 int bit;
190 int pos;
191
192 if (offset >= size)
193 return (size);
194 pos = offset / BITS_PER_LONG;
195 offs = offset % BITS_PER_LONG;
196 bit = BITS_PER_LONG * pos;
197 addr += pos;
198 if (offs) {
199 mask = (*addr) & ~BITMAP_LAST_WORD_MASK(offs);
200 if (mask)
201 return (bit + __ffsl(mask));
202 if (size - bit <= BITS_PER_LONG)
203 return (size);
204 bit += BITS_PER_LONG;
205 addr++;
206 }
207 for (size -= bit; size >= BITS_PER_LONG;
208 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
209 if (*addr == 0)
210 continue;
211 return (bit + __ffsl(*addr));
212 }
213 if (size) {
214 mask = (*addr) & BITMAP_LAST_WORD_MASK(size);
215 if (mask)
216 bit += __ffsl(mask);
217 else
218 bit += size;
219 }
220 return (bit);
221 }
222
223 static inline unsigned long
find_next_zero_bit(const unsigned long * addr,unsigned long size,unsigned long offset)224 find_next_zero_bit(const unsigned long *addr, unsigned long size,
225 unsigned long offset)
226 {
227 long mask;
228 int offs;
229 int bit;
230 int pos;
231
232 if (offset >= size)
233 return (size);
234 pos = offset / BITS_PER_LONG;
235 offs = offset % BITS_PER_LONG;
236 bit = BITS_PER_LONG * pos;
237 addr += pos;
238 if (offs) {
239 mask = ~(*addr) & ~BITMAP_LAST_WORD_MASK(offs);
240 if (mask)
241 return (bit + __ffsl(mask));
242 if (size - bit <= BITS_PER_LONG)
243 return (size);
244 bit += BITS_PER_LONG;
245 addr++;
246 }
247 for (size -= bit; size >= BITS_PER_LONG;
248 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
249 if (~(*addr) == 0)
250 continue;
251 return (bit + __ffsl(~(*addr)));
252 }
253 if (size) {
254 mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size);
255 if (mask)
256 bit += __ffsl(mask);
257 else
258 bit += size;
259 }
260 return (bit);
261 }
262
263 #define __set_bit(i, a) \
264 atomic_set_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
265
266 #define set_bit(i, a) \
267 atomic_set_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
268
269 #define __clear_bit(i, a) \
270 atomic_clear_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
271
272 #define clear_bit(i, a) \
273 atomic_clear_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
274
275 #define test_bit(i, a) \
276 !!(READ_ONCE(((volatile unsigned long *)(a))[BIT_WORD(i)]) & BIT_MASK(i))
277
278 static inline int
test_and_clear_bit(long bit,volatile unsigned long * var)279 test_and_clear_bit(long bit, volatile unsigned long *var)
280 {
281 long val;
282
283 var += BIT_WORD(bit);
284 bit %= BITS_PER_LONG;
285 bit = (1UL << bit);
286
287 val = *var;
288 while (!atomic_fcmpset_long(var, &val, val & ~bit))
289 ;
290 return !!(val & bit);
291 }
292
293 static inline int
__test_and_clear_bit(long bit,volatile unsigned long * var)294 __test_and_clear_bit(long bit, volatile unsigned long *var)
295 {
296 long val;
297
298 var += BIT_WORD(bit);
299 bit %= BITS_PER_LONG;
300 bit = (1UL << bit);
301
302 val = *var;
303 *var &= ~bit;
304
305 return !!(val & bit);
306 }
307
308 static inline int
test_and_set_bit(long bit,volatile unsigned long * var)309 test_and_set_bit(long bit, volatile unsigned long *var)
310 {
311 long val;
312
313 var += BIT_WORD(bit);
314 bit %= BITS_PER_LONG;
315 bit = (1UL << bit);
316
317 val = *var;
318 while (!atomic_fcmpset_long(var, &val, val | bit))
319 ;
320 return !!(val & bit);
321 }
322
323 static inline int
__test_and_set_bit(long bit,volatile unsigned long * var)324 __test_and_set_bit(long bit, volatile unsigned long *var)
325 {
326 long val;
327
328 var += BIT_WORD(bit);
329 bit %= BITS_PER_LONG;
330 bit = (1UL << bit);
331
332 val = *var;
333 *var |= bit;
334
335 return !!(val & bit);
336 }
337
338 enum {
339 REG_OP_ISFREE,
340 REG_OP_ALLOC,
341 REG_OP_RELEASE,
342 };
343
344 static inline int
linux_reg_op(unsigned long * bitmap,int pos,int order,int reg_op)345 linux_reg_op(unsigned long *bitmap, int pos, int order, int reg_op)
346 {
347 int nbits_reg;
348 int index;
349 int offset;
350 int nlongs_reg;
351 int nbitsinlong;
352 unsigned long mask;
353 int i;
354 int ret = 0;
355
356 nbits_reg = 1 << order;
357 index = pos / BITS_PER_LONG;
358 offset = pos - (index * BITS_PER_LONG);
359 nlongs_reg = BITS_TO_LONGS(nbits_reg);
360 nbitsinlong = min(nbits_reg, BITS_PER_LONG);
361
362 mask = (1UL << (nbitsinlong - 1));
363 mask += mask - 1;
364 mask <<= offset;
365
366 switch (reg_op) {
367 case REG_OP_ISFREE:
368 for (i = 0; i < nlongs_reg; i++) {
369 if (bitmap[index + i] & mask)
370 goto done;
371 }
372 ret = 1;
373 break;
374
375 case REG_OP_ALLOC:
376 for (i = 0; i < nlongs_reg; i++)
377 bitmap[index + i] |= mask;
378 break;
379
380 case REG_OP_RELEASE:
381 for (i = 0; i < nlongs_reg; i++)
382 bitmap[index + i] &= ~mask;
383 break;
384 }
385 done:
386 return ret;
387 }
388
389 #define for_each_set_bit(bit, addr, size) \
390 for ((bit) = find_first_bit((addr), (size)); \
391 (bit) < (size); \
392 (bit) = find_next_bit((addr), (size), (bit) + 1))
393
394 #define for_each_clear_bit(bit, addr, size) \
395 for ((bit) = find_first_zero_bit((addr), (size)); \
396 (bit) < (size); \
397 (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
398
399 static inline uint64_t
sign_extend64(uint64_t value,int index)400 sign_extend64(uint64_t value, int index)
401 {
402 uint8_t shift = 63 - index;
403
404 return ((int64_t)(value << shift) >> shift);
405 }
406
407 #endif /* _LINUX_BITOPS_H_ */
408