2 * Copyright (c) 2010 Isilon Systems, Inc.
3 * Copyright (c) 2010 iX Systems, Inc.
4 * Copyright (c) 2010 Panasas, Inc.
5 * Copyright (c) 2013-2015 Mellanox Technologies, Ltd.
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
11 * 1. Redistributions of source code must retain the above copyright
12 * notice unmodified, this list of conditions, and the following
14 * 2. Redistributions in binary form must reproduce the above copyright
15 * notice, this list of conditions and the following disclaimer in the
16 * documentation and/or other materials provided with the distribution.
18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #ifndef _LINUX_BITOPS_H_
32 #define _LINUX_BITOPS_H_
34 #include <sys/param.h>
35 #include <sys/types.h>
36 #include <sys/systm.h>
37 #include <sys/errno.h>
38 #include <sys/libkern.h>
40 #define BIT(nr) (1UL << (nr))
41 #define BIT_ULL(nr) (1ULL << (nr))
43 #define BITS_PER_LONG 64
45 #define BITS_PER_LONG 32
48 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG))
49 #define BITMAP_LAST_WORD_MASK(n) (~0UL >> (BITS_PER_LONG - (n)))
50 #define BITS_TO_LONGS(n) howmany((n), BITS_PER_LONG)
51 #define BIT_MASK(nr) (1UL << ((nr) & (BITS_PER_LONG - 1)))
52 #define BIT_WORD(nr) ((nr) / BITS_PER_LONG)
53 #define GENMASK(h, l) (((~0UL) >> (BITS_PER_LONG - (h) - 1)) & ((~0UL) << (l)))
54 #define BITS_PER_BYTE 8
56 #define hweight8(x) bitcount((uint8_t)(x))
57 #define hweight16(x) bitcount16(x)
58 #define hweight32(x) bitcount32(x)
59 #define hweight64(x) bitcount64(x)
60 #define hweight_long(x) bitcountl(x)
65 return (ffs(mask) - 1);
71 return (fls(mask) - 1);
77 return (ffsl(mask) - 1);
83 return (flsl(mask) - 1);
92 static inline uint32_t
93 ror32(uint32_t word, unsigned int shift)
95 return ((word >> shift) | (word << (32 - shift)));
98 #define ffz(mask) __ffs(~(mask))
100 static inline int get_count_order(unsigned int count)
104 order = fls(count) - 1;
105 if (count & (count - 1))
110 static inline unsigned long
111 find_first_bit(const unsigned long *addr, unsigned long size)
116 for (bit = 0; size >= BITS_PER_LONG;
117 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
120 return (bit + __ffsl(*addr));
123 mask = (*addr) & BITMAP_LAST_WORD_MASK(size);
132 static inline unsigned long
133 find_first_zero_bit(const unsigned long *addr, unsigned long size)
138 for (bit = 0; size >= BITS_PER_LONG;
139 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
142 return (bit + __ffsl(~(*addr)));
145 mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size);
154 static inline unsigned long
155 find_last_bit(const unsigned long *addr, unsigned long size)
162 pos = size / BITS_PER_LONG;
163 offs = size % BITS_PER_LONG;
164 bit = BITS_PER_LONG * pos;
167 mask = (*addr) & BITMAP_LAST_WORD_MASK(offs);
169 return (bit + __flsl(mask));
173 bit -= BITS_PER_LONG;
175 return (bit + __flsl(*addr));
180 static inline unsigned long
181 find_next_bit(const unsigned long *addr, unsigned long size, unsigned long offset)
190 pos = offset / BITS_PER_LONG;
191 offs = offset % BITS_PER_LONG;
192 bit = BITS_PER_LONG * pos;
195 mask = (*addr) & ~BITMAP_LAST_WORD_MASK(offs);
197 return (bit + __ffsl(mask));
198 if (size - bit <= BITS_PER_LONG)
200 bit += BITS_PER_LONG;
203 for (size -= bit; size >= BITS_PER_LONG;
204 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
207 return (bit + __ffsl(*addr));
210 mask = (*addr) & BITMAP_LAST_WORD_MASK(size);
219 static inline unsigned long
220 find_next_zero_bit(const unsigned long *addr, unsigned long size,
221 unsigned long offset)
230 pos = offset / BITS_PER_LONG;
231 offs = offset % BITS_PER_LONG;
232 bit = BITS_PER_LONG * pos;
235 mask = ~(*addr) & ~BITMAP_LAST_WORD_MASK(offs);
237 return (bit + __ffsl(mask));
238 if (size - bit <= BITS_PER_LONG)
240 bit += BITS_PER_LONG;
243 for (size -= bit; size >= BITS_PER_LONG;
244 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
247 return (bit + __ffsl(~(*addr)));
250 mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size);
260 bitmap_zero(unsigned long *addr, int size)
264 len = BITS_TO_LONGS(size) * sizeof(long);
265 memset(addr, 0, len);
269 bitmap_fill(unsigned long *addr, int size)
274 len = (size / BITS_PER_LONG) * sizeof(long);
275 memset(addr, 0xff, len);
276 tail = size & (BITS_PER_LONG - 1);
278 addr[size / BITS_PER_LONG] = BITMAP_LAST_WORD_MASK(tail);
282 bitmap_full(unsigned long *addr, int size)
289 len = size / BITS_PER_LONG;
290 for (i = 0; i < len; i++)
293 tail = size & (BITS_PER_LONG - 1);
295 mask = BITMAP_LAST_WORD_MASK(tail);
296 if ((addr[i] & mask) != mask)
303 bitmap_empty(unsigned long *addr, int size)
310 len = size / BITS_PER_LONG;
311 for (i = 0; i < len; i++)
314 tail = size & (BITS_PER_LONG - 1);
316 mask = BITMAP_LAST_WORD_MASK(tail);
317 if ((addr[i] & mask) != 0)
323 #define __set_bit(i, a) \
324 atomic_set_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
326 #define set_bit(i, a) \
327 atomic_set_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
329 #define __clear_bit(i, a) \
330 atomic_clear_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
332 #define clear_bit(i, a) \
333 atomic_clear_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i))
335 #define test_bit(i, a) \
336 !!(atomic_load_acq_long(&((volatile unsigned long *)(a))[BIT_WORD(i)]) & \
340 test_and_clear_bit(long bit, volatile unsigned long *var)
344 var += BIT_WORD(bit);
345 bit %= BITS_PER_LONG;
349 } while (atomic_cmpset_long(var, val, val & ~bit) == 0);
351 return !!(val & bit);
355 __test_and_clear_bit(long bit, volatile unsigned long *var)
359 var += BIT_WORD(bit);
360 bit %= BITS_PER_LONG;
366 return !!(val & bit);
370 test_and_set_bit(long bit, volatile unsigned long *var)
374 var += BIT_WORD(bit);
375 bit %= BITS_PER_LONG;
379 } while (atomic_cmpset_long(var, val, val | bit) == 0);
381 return !!(val & bit);
385 __test_and_set_bit(long bit, volatile unsigned long *var)
389 var += BIT_WORD(bit);
390 bit %= BITS_PER_LONG;
396 return !!(val & bit);
400 bitmap_set(unsigned long *map, int start, int nr)
402 unsigned long *p = map + BIT_WORD(start);
403 const int size = start + nr;
404 int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
405 unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
407 while (nr - bits_to_set >= 0) {
410 bits_to_set = BITS_PER_LONG;
415 mask_to_set &= BITMAP_LAST_WORD_MASK(size);
421 bitmap_clear(unsigned long *map, int start, int nr)
423 unsigned long *p = map + BIT_WORD(start);
424 const int size = start + nr;
425 int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
426 unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
428 while (nr - bits_to_clear >= 0) {
429 *p &= ~mask_to_clear;
431 bits_to_clear = BITS_PER_LONG;
432 mask_to_clear = ~0UL;
436 mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
437 *p &= ~mask_to_clear;
448 __reg_op(unsigned long *bitmap, int pos, int order, int reg_op)
459 nbits_reg = 1 << order;
460 index = pos / BITS_PER_LONG;
461 offset = pos - (index * BITS_PER_LONG);
462 nlongs_reg = BITS_TO_LONGS(nbits_reg);
463 nbitsinlong = min(nbits_reg, BITS_PER_LONG);
465 mask = (1UL << (nbitsinlong - 1));
471 for (i = 0; i < nlongs_reg; i++) {
472 if (bitmap[index + i] & mask)
479 for (i = 0; i < nlongs_reg; i++)
480 bitmap[index + i] |= mask;
484 for (i = 0; i < nlongs_reg; i++)
485 bitmap[index + i] &= ~mask;
493 bitmap_find_free_region(unsigned long *bitmap, int bits, int order)
498 for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) {
499 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
501 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
508 bitmap_allocate_region(unsigned long *bitmap, int pos, int order)
510 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
512 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
517 bitmap_release_region(unsigned long *bitmap, int pos, int order)
519 __reg_op(bitmap, pos, order, REG_OP_RELEASE);
522 #define for_each_set_bit(bit, addr, size) \
523 for ((bit) = find_first_bit((addr), (size)); \
525 (bit) = find_next_bit((addr), (size), (bit) + 1))
527 static inline unsigned
528 bitmap_weight(unsigned long *bitmap, unsigned nbits)
533 for_each_set_bit(bit, bitmap, nbits)
539 bitmap_equal(const unsigned long *pa,
540 const unsigned long *pb, unsigned bits)
543 unsigned y = bits / BITS_PER_LONG;
545 for (x = 0; x != y; x++) {
550 y = bits % BITS_PER_LONG;
552 if ((pa[x] ^ pb[x]) & BITMAP_LAST_WORD_MASK(y))
558 static inline uint64_t
559 sign_extend64(uint64_t value, int index)
561 uint8_t shift = 63 - index;
563 return ((int64_t)(value << shift) >> shift);
566 #endif /* _LINUX_BITOPS_H_ */