1 /*- 2 * Copyright (c) 2010 Isilon Systems, Inc. 3 * Copyright (c) 2010 iX Systems, Inc. 4 * Copyright (c) 2010 Panasas, Inc. 5 * Copyright (c) 2013-2015 Mellanox Technologies, Ltd. 6 * All rights reserved. 7 * 8 * Redistribution and use in source and binary forms, with or without 9 * modification, are permitted provided that the following conditions 10 * are met: 11 * 1. Redistributions of source code must retain the above copyright 12 * notice unmodified, this list of conditions, and the following 13 * disclaimer. 14 * 2. Redistributions in binary form must reproduce the above copyright 15 * notice, this list of conditions and the following disclaimer in the 16 * documentation and/or other materials provided with the distribution. 17 * 18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 * 29 * $FreeBSD$ 30 */ 31 #ifndef _LINUX_BITOPS_H_ 32 #define _LINUX_BITOPS_H_ 33 34 #include <sys/param.h> 35 #include <sys/types.h> 36 #include <sys/systm.h> 37 #include <sys/errno.h> 38 #include <sys/libkern.h> 39 40 #define BIT(nr) (1UL << (nr)) 41 #define BIT_ULL(nr) (1ULL << (nr)) 42 #ifdef __LP64__ 43 #define BITS_PER_LONG 64 44 #else 45 #define BITS_PER_LONG 32 46 #endif 47 48 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG)) 49 #define BITMAP_LAST_WORD_MASK(n) (~0UL >> (BITS_PER_LONG - (n))) 50 #define BITS_TO_LONGS(n) howmany((n), BITS_PER_LONG) 51 #define BIT_MASK(nr) (1UL << ((nr) & (BITS_PER_LONG - 1))) 52 #define BIT_WORD(nr) ((nr) / BITS_PER_LONG) 53 #define GENMASK(h, l) (((~0UL) >> (BITS_PER_LONG - (h) - 1)) & ((~0UL) << (l))) 54 #define BITS_PER_BYTE 8 55 56 #define hweight8(x) bitcount((uint8_t)(x)) 57 #define hweight16(x) bitcount16(x) 58 #define hweight32(x) bitcount32(x) 59 #define hweight64(x) bitcount64(x) 60 #define hweight_long(x) bitcountl(x) 61 62 static inline int 63 __ffs(int mask) 64 { 65 return (ffs(mask) - 1); 66 } 67 68 static inline int 69 __fls(int mask) 70 { 71 return (fls(mask) - 1); 72 } 73 74 static inline int 75 __ffsl(long mask) 76 { 77 return (ffsl(mask) - 1); 78 } 79 80 static inline int 81 __flsl(long mask) 82 { 83 return (flsl(mask) - 1); 84 } 85 86 static inline int 87 fls64(uint64_t mask) 88 { 89 return (flsll(mask)); 90 } 91 92 static inline uint32_t 93 ror32(uint32_t word, unsigned int shift) 94 { 95 return ((word >> shift) | (word << (32 - shift))); 96 } 97 98 #define ffz(mask) __ffs(~(mask)) 99 100 static inline int get_count_order(unsigned int count) 101 { 102 int order; 103 104 order = fls(count) - 1; 105 if (count & (count - 1)) 106 order++; 107 return order; 108 } 109 110 static inline unsigned long 111 find_first_bit(const unsigned long *addr, unsigned long size) 112 { 113 long mask; 114 int bit; 115 116 for (bit = 0; size >= BITS_PER_LONG; 117 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 118 if (*addr == 0) 119 continue; 120 return (bit + __ffsl(*addr)); 121 } 122 if (size) { 123 mask = (*addr) & BITMAP_LAST_WORD_MASK(size); 124 if (mask) 125 bit += __ffsl(mask); 126 else 127 bit += size; 128 } 129 return (bit); 130 } 131 132 static inline unsigned long 133 find_first_zero_bit(const unsigned long *addr, unsigned long size) 134 { 135 long mask; 136 int bit; 137 138 for (bit = 0; size >= BITS_PER_LONG; 139 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 140 if (~(*addr) == 0) 141 continue; 142 return (bit + __ffsl(~(*addr))); 143 } 144 if (size) { 145 mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size); 146 if (mask) 147 bit += __ffsl(mask); 148 else 149 bit += size; 150 } 151 return (bit); 152 } 153 154 static inline unsigned long 155 find_last_bit(const unsigned long *addr, unsigned long size) 156 { 157 long mask; 158 int offs; 159 int bit; 160 int pos; 161 162 pos = size / BITS_PER_LONG; 163 offs = size % BITS_PER_LONG; 164 bit = BITS_PER_LONG * pos; 165 addr += pos; 166 if (offs) { 167 mask = (*addr) & BITMAP_LAST_WORD_MASK(offs); 168 if (mask) 169 return (bit + __flsl(mask)); 170 } 171 while (pos--) { 172 addr--; 173 bit -= BITS_PER_LONG; 174 if (*addr) 175 return (bit + __flsl(*addr)); 176 } 177 return (size); 178 } 179 180 static inline unsigned long 181 find_next_bit(const unsigned long *addr, unsigned long size, unsigned long offset) 182 { 183 long mask; 184 int offs; 185 int bit; 186 int pos; 187 188 if (offset >= size) 189 return (size); 190 pos = offset / BITS_PER_LONG; 191 offs = offset % BITS_PER_LONG; 192 bit = BITS_PER_LONG * pos; 193 addr += pos; 194 if (offs) { 195 mask = (*addr) & ~BITMAP_LAST_WORD_MASK(offs); 196 if (mask) 197 return (bit + __ffsl(mask)); 198 if (size - bit <= BITS_PER_LONG) 199 return (size); 200 bit += BITS_PER_LONG; 201 addr++; 202 } 203 for (size -= bit; size >= BITS_PER_LONG; 204 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 205 if (*addr == 0) 206 continue; 207 return (bit + __ffsl(*addr)); 208 } 209 if (size) { 210 mask = (*addr) & BITMAP_LAST_WORD_MASK(size); 211 if (mask) 212 bit += __ffsl(mask); 213 else 214 bit += size; 215 } 216 return (bit); 217 } 218 219 static inline unsigned long 220 find_next_zero_bit(const unsigned long *addr, unsigned long size, 221 unsigned long offset) 222 { 223 long mask; 224 int offs; 225 int bit; 226 int pos; 227 228 if (offset >= size) 229 return (size); 230 pos = offset / BITS_PER_LONG; 231 offs = offset % BITS_PER_LONG; 232 bit = BITS_PER_LONG * pos; 233 addr += pos; 234 if (offs) { 235 mask = ~(*addr) & ~BITMAP_LAST_WORD_MASK(offs); 236 if (mask) 237 return (bit + __ffsl(mask)); 238 if (size - bit <= BITS_PER_LONG) 239 return (size); 240 bit += BITS_PER_LONG; 241 addr++; 242 } 243 for (size -= bit; size >= BITS_PER_LONG; 244 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 245 if (~(*addr) == 0) 246 continue; 247 return (bit + __ffsl(~(*addr))); 248 } 249 if (size) { 250 mask = ~(*addr) & BITMAP_LAST_WORD_MASK(size); 251 if (mask) 252 bit += __ffsl(mask); 253 else 254 bit += size; 255 } 256 return (bit); 257 } 258 259 static inline void 260 bitmap_zero(unsigned long *addr, int size) 261 { 262 int len; 263 264 len = BITS_TO_LONGS(size) * sizeof(long); 265 memset(addr, 0, len); 266 } 267 268 static inline void 269 bitmap_fill(unsigned long *addr, int size) 270 { 271 int tail; 272 int len; 273 274 len = (size / BITS_PER_LONG) * sizeof(long); 275 memset(addr, 0xff, len); 276 tail = size & (BITS_PER_LONG - 1); 277 if (tail) 278 addr[size / BITS_PER_LONG] = BITMAP_LAST_WORD_MASK(tail); 279 } 280 281 static inline int 282 bitmap_full(unsigned long *addr, int size) 283 { 284 unsigned long mask; 285 int tail; 286 int len; 287 int i; 288 289 len = size / BITS_PER_LONG; 290 for (i = 0; i < len; i++) 291 if (addr[i] != ~0UL) 292 return (0); 293 tail = size & (BITS_PER_LONG - 1); 294 if (tail) { 295 mask = BITMAP_LAST_WORD_MASK(tail); 296 if ((addr[i] & mask) != mask) 297 return (0); 298 } 299 return (1); 300 } 301 302 static inline int 303 bitmap_empty(unsigned long *addr, int size) 304 { 305 unsigned long mask; 306 int tail; 307 int len; 308 int i; 309 310 len = size / BITS_PER_LONG; 311 for (i = 0; i < len; i++) 312 if (addr[i] != 0) 313 return (0); 314 tail = size & (BITS_PER_LONG - 1); 315 if (tail) { 316 mask = BITMAP_LAST_WORD_MASK(tail); 317 if ((addr[i] & mask) != 0) 318 return (0); 319 } 320 return (1); 321 } 322 323 #define __set_bit(i, a) \ 324 atomic_set_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i)) 325 326 #define set_bit(i, a) \ 327 atomic_set_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i)) 328 329 #define __clear_bit(i, a) \ 330 atomic_clear_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i)) 331 332 #define clear_bit(i, a) \ 333 atomic_clear_long(&((volatile unsigned long *)(a))[BIT_WORD(i)], BIT_MASK(i)) 334 335 #define test_bit(i, a) \ 336 !!(atomic_load_acq_long(&((volatile unsigned long *)(a))[BIT_WORD(i)]) & \ 337 BIT_MASK(i)) 338 339 static inline int 340 test_and_clear_bit(long bit, volatile unsigned long *var) 341 { 342 long val; 343 344 var += BIT_WORD(bit); 345 bit %= BITS_PER_LONG; 346 bit = (1UL << bit); 347 do { 348 val = *var; 349 } while (atomic_cmpset_long(var, val, val & ~bit) == 0); 350 351 return !!(val & bit); 352 } 353 354 static inline int 355 __test_and_clear_bit(long bit, volatile unsigned long *var) 356 { 357 long val; 358 359 var += BIT_WORD(bit); 360 bit %= BITS_PER_LONG; 361 bit = (1UL << bit); 362 363 val = *var; 364 *var &= ~bit; 365 366 return !!(val & bit); 367 } 368 369 static inline int 370 test_and_set_bit(long bit, volatile unsigned long *var) 371 { 372 long val; 373 374 var += BIT_WORD(bit); 375 bit %= BITS_PER_LONG; 376 bit = (1UL << bit); 377 do { 378 val = *var; 379 } while (atomic_cmpset_long(var, val, val | bit) == 0); 380 381 return !!(val & bit); 382 } 383 384 static inline int 385 __test_and_set_bit(long bit, volatile unsigned long *var) 386 { 387 long val; 388 389 var += BIT_WORD(bit); 390 bit %= BITS_PER_LONG; 391 bit = (1UL << bit); 392 393 val = *var; 394 *var |= bit; 395 396 return !!(val & bit); 397 } 398 399 static inline void 400 bitmap_set(unsigned long *map, int start, int nr) 401 { 402 unsigned long *p = map + BIT_WORD(start); 403 const int size = start + nr; 404 int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG); 405 unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start); 406 407 while (nr - bits_to_set >= 0) { 408 *p |= mask_to_set; 409 nr -= bits_to_set; 410 bits_to_set = BITS_PER_LONG; 411 mask_to_set = ~0UL; 412 p++; 413 } 414 if (nr) { 415 mask_to_set &= BITMAP_LAST_WORD_MASK(size); 416 *p |= mask_to_set; 417 } 418 } 419 420 static inline void 421 bitmap_clear(unsigned long *map, int start, int nr) 422 { 423 unsigned long *p = map + BIT_WORD(start); 424 const int size = start + nr; 425 int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG); 426 unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start); 427 428 while (nr - bits_to_clear >= 0) { 429 *p &= ~mask_to_clear; 430 nr -= bits_to_clear; 431 bits_to_clear = BITS_PER_LONG; 432 mask_to_clear = ~0UL; 433 p++; 434 } 435 if (nr) { 436 mask_to_clear &= BITMAP_LAST_WORD_MASK(size); 437 *p &= ~mask_to_clear; 438 } 439 } 440 441 enum { 442 REG_OP_ISFREE, 443 REG_OP_ALLOC, 444 REG_OP_RELEASE, 445 }; 446 447 static inline int 448 __reg_op(unsigned long *bitmap, int pos, int order, int reg_op) 449 { 450 int nbits_reg; 451 int index; 452 int offset; 453 int nlongs_reg; 454 int nbitsinlong; 455 unsigned long mask; 456 int i; 457 int ret = 0; 458 459 nbits_reg = 1 << order; 460 index = pos / BITS_PER_LONG; 461 offset = pos - (index * BITS_PER_LONG); 462 nlongs_reg = BITS_TO_LONGS(nbits_reg); 463 nbitsinlong = min(nbits_reg, BITS_PER_LONG); 464 465 mask = (1UL << (nbitsinlong - 1)); 466 mask += mask - 1; 467 mask <<= offset; 468 469 switch (reg_op) { 470 case REG_OP_ISFREE: 471 for (i = 0; i < nlongs_reg; i++) { 472 if (bitmap[index + i] & mask) 473 goto done; 474 } 475 ret = 1; 476 break; 477 478 case REG_OP_ALLOC: 479 for (i = 0; i < nlongs_reg; i++) 480 bitmap[index + i] |= mask; 481 break; 482 483 case REG_OP_RELEASE: 484 for (i = 0; i < nlongs_reg; i++) 485 bitmap[index + i] &= ~mask; 486 break; 487 } 488 done: 489 return ret; 490 } 491 492 static inline int 493 bitmap_find_free_region(unsigned long *bitmap, int bits, int order) 494 { 495 int pos; 496 int end; 497 498 for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) { 499 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE)) 500 continue; 501 __reg_op(bitmap, pos, order, REG_OP_ALLOC); 502 return pos; 503 } 504 return -ENOMEM; 505 } 506 507 static inline int 508 bitmap_allocate_region(unsigned long *bitmap, int pos, int order) 509 { 510 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE)) 511 return -EBUSY; 512 __reg_op(bitmap, pos, order, REG_OP_ALLOC); 513 return 0; 514 } 515 516 static inline void 517 bitmap_release_region(unsigned long *bitmap, int pos, int order) 518 { 519 __reg_op(bitmap, pos, order, REG_OP_RELEASE); 520 } 521 522 #define for_each_set_bit(bit, addr, size) \ 523 for ((bit) = find_first_bit((addr), (size)); \ 524 (bit) < (size); \ 525 (bit) = find_next_bit((addr), (size), (bit) + 1)) 526 527 static inline unsigned 528 bitmap_weight(unsigned long *bitmap, unsigned nbits) 529 { 530 unsigned bit; 531 unsigned retval = 0; 532 533 for_each_set_bit(bit, bitmap, nbits) 534 retval++; 535 return (retval); 536 } 537 538 static inline int 539 bitmap_equal(const unsigned long *pa, 540 const unsigned long *pb, unsigned bits) 541 { 542 unsigned x; 543 unsigned y = bits / BITS_PER_LONG; 544 545 for (x = 0; x != y; x++) { 546 if (pa[x] != pb[x]) 547 return (0); 548 } 549 550 y = bits % BITS_PER_LONG; 551 if (y != 0) { 552 if ((pa[x] ^ pb[x]) & BITMAP_LAST_WORD_MASK(y)) 553 return (0); 554 } 555 return (1); 556 } 557 558 static inline uint64_t 559 sign_extend64(uint64_t value, int index) 560 { 561 uint8_t shift = 63 - index; 562 563 return ((int64_t)(value << shift) >> shift); 564 } 565 566 #endif /* _LINUX_BITOPS_H_ */ 567