1 /*- 2 * Copyright (c) 2010 Isilon Systems, Inc. 3 * Copyright (c) 2010 iX Systems, Inc. 4 * Copyright (c) 2010 Panasas, Inc. 5 * All rights reserved. 6 * 7 * Redistribution and use in source and binary forms, with or without 8 * modification, are permitted provided that the following conditions 9 * are met: 10 * 1. Redistributions of source code must retain the above copyright 11 * notice unmodified, this list of conditions, and the following 12 * disclaimer. 13 * 2. Redistributions in binary form must reproduce the above copyright 14 * notice, this list of conditions and the following disclaimer in the 15 * documentation and/or other materials provided with the distribution. 16 * 17 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 18 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 19 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 20 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 21 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 22 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 */ 28 #ifndef _LINUX_BITOPS_H_ 29 #define _LINUX_BITOPS_H_ 30 31 #ifdef __LP64__ 32 #define BITS_PER_LONG 64 33 #else 34 #define BITS_PER_LONG 32 35 #endif 36 #define BIT_MASK(n) (~0UL >> (BITS_PER_LONG - (n))) 37 #define BITS_TO_LONGS(n) howmany((n), BITS_PER_LONG) 38 #define BIT_WORD(nr) ((nr) / BITS_PER_LONG) 39 40 static inline int 41 __ffs(int mask) 42 { 43 return (ffs(mask) - 1); 44 } 45 46 static inline int 47 __fls(int mask) 48 { 49 return (fls(mask) - 1); 50 } 51 52 static inline int 53 __ffsl(long mask) 54 { 55 return (ffsl(mask) - 1); 56 } 57 58 static inline int 59 __flsl(long mask) 60 { 61 return (flsl(mask) - 1); 62 } 63 64 65 #define ffz(mask) __ffs(~(mask)) 66 67 static inline int get_count_order(unsigned int count) 68 { 69 int order; 70 71 order = fls(count) - 1; 72 if (count & (count - 1)) 73 order++; 74 return order; 75 } 76 77 static inline unsigned long 78 find_first_bit(unsigned long *addr, unsigned long size) 79 { 80 long mask; 81 int bit; 82 83 for (bit = 0; size >= BITS_PER_LONG; 84 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 85 if (*addr == 0) 86 continue; 87 return (bit + __ffsl(*addr)); 88 } 89 if (size) { 90 mask = (*addr) & BIT_MASK(size); 91 if (mask) 92 bit += __ffsl(mask); 93 else 94 bit += size; 95 } 96 return (bit); 97 } 98 99 static inline unsigned long 100 find_first_zero_bit(unsigned long *addr, unsigned long size) 101 { 102 long mask; 103 int bit; 104 105 for (bit = 0; size >= BITS_PER_LONG; 106 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 107 if (~(*addr) == 0) 108 continue; 109 return (bit + __ffsl(~(*addr))); 110 } 111 if (size) { 112 mask = ~(*addr) & BIT_MASK(size); 113 if (mask) 114 bit += __ffsl(mask); 115 else 116 bit += size; 117 } 118 return (bit); 119 } 120 121 static inline unsigned long 122 find_last_bit(unsigned long *addr, unsigned long size) 123 { 124 long mask; 125 int offs; 126 int bit; 127 int pos; 128 129 pos = size / BITS_PER_LONG; 130 offs = size % BITS_PER_LONG; 131 bit = BITS_PER_LONG * pos; 132 addr += pos; 133 if (offs) { 134 mask = (*addr) & BIT_MASK(offs); 135 if (mask) 136 return (bit + __flsl(mask)); 137 } 138 while (--pos) { 139 addr--; 140 bit -= BITS_PER_LONG; 141 if (*addr) 142 return (bit + __flsl(mask)); 143 } 144 return (size); 145 } 146 147 static inline unsigned long 148 find_next_bit(unsigned long *addr, unsigned long size, unsigned long offset) 149 { 150 long mask; 151 int offs; 152 int bit; 153 int pos; 154 155 if (offset >= size) 156 return (size); 157 pos = offset / BITS_PER_LONG; 158 offs = offset % BITS_PER_LONG; 159 bit = BITS_PER_LONG * pos; 160 addr += pos; 161 if (offs) { 162 mask = (*addr) & ~BIT_MASK(offs); 163 if (mask) 164 return (bit + __ffsl(mask)); 165 bit += BITS_PER_LONG; 166 addr++; 167 } 168 for (size -= bit; size >= BITS_PER_LONG; 169 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 170 if (*addr == 0) 171 continue; 172 return (bit + __ffsl(*addr)); 173 } 174 if (size) { 175 mask = (*addr) & BIT_MASK(size); 176 if (mask) 177 bit += __ffsl(mask); 178 else 179 bit += size; 180 } 181 return (bit); 182 } 183 184 static inline unsigned long 185 find_next_zero_bit(unsigned long *addr, unsigned long size, 186 unsigned long offset) 187 { 188 long mask; 189 int offs; 190 int bit; 191 int pos; 192 193 if (offset >= size) 194 return (size); 195 pos = offset / BITS_PER_LONG; 196 offs = offset % BITS_PER_LONG; 197 bit = BITS_PER_LONG * pos; 198 addr += pos; 199 if (offs) { 200 mask = ~(*addr) & ~BIT_MASK(offs); 201 if (mask) 202 return (bit + __ffsl(mask)); 203 bit += BITS_PER_LONG; 204 addr++; 205 } 206 for (size -= bit; size >= BITS_PER_LONG; 207 size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) { 208 if (~(*addr) == 0) 209 continue; 210 return (bit + __ffsl(~(*addr))); 211 } 212 if (size) { 213 mask = ~(*addr) & BIT_MASK(size); 214 if (mask) 215 bit += __ffsl(mask); 216 else 217 bit += size; 218 } 219 return (bit); 220 } 221 222 static inline void 223 bitmap_zero(unsigned long *addr, int size) 224 { 225 int len; 226 227 len = BITS_TO_LONGS(size) * sizeof(long); 228 memset(addr, 0, len); 229 } 230 231 static inline void 232 bitmap_fill(unsigned long *addr, int size) 233 { 234 int tail; 235 int len; 236 237 len = (size / BITS_PER_LONG) * sizeof(long); 238 memset(addr, 0xff, len); 239 tail = size & (BITS_PER_LONG - 1); 240 if (tail) 241 addr[size / BITS_PER_LONG] = BIT_MASK(tail); 242 } 243 244 static inline int 245 bitmap_full(unsigned long *addr, int size) 246 { 247 long mask; 248 int tail; 249 int len; 250 int i; 251 252 len = size / BITS_PER_LONG; 253 for (i = 0; i < len; i++) 254 if (addr[i] != ~0UL) 255 return (0); 256 tail = size & (BITS_PER_LONG - 1); 257 if (tail) { 258 mask = BIT_MASK(tail); 259 if ((addr[i] & mask) != mask) 260 return (0); 261 } 262 return (1); 263 } 264 265 static inline int 266 bitmap_empty(unsigned long *addr, int size) 267 { 268 long mask; 269 int tail; 270 int len; 271 int i; 272 273 len = size / BITS_PER_LONG; 274 for (i = 0; i < len; i++) 275 if (addr[i] != 0) 276 return (0); 277 tail = size & (BITS_PER_LONG - 1); 278 if (tail) { 279 mask = BIT_MASK(tail); 280 if ((addr[i] & mask) != 0) 281 return (0); 282 } 283 return (1); 284 } 285 286 #define NBLONG (NBBY * sizeof(long)) 287 288 #define set_bit(i, a) \ 289 atomic_set_long(&((volatile long *)(a))[(i)/NBLONG], 1 << (i) % NBLONG) 290 291 #define clear_bit(i, a) \ 292 atomic_clear_long(&((volatile long *)(a))[(i)/NBLONG], 1 << (i) % NBLONG) 293 294 #define test_bit(i, a) \ 295 !!(atomic_load_acq_long(&((volatile long *)(a))[(i)/NBLONG]) & \ 296 1 << ((i) % NBLONG)) 297 298 static inline long 299 test_and_clear_bit(long bit, long *var) 300 { 301 long val; 302 303 var += bit / (sizeof(long) * NBBY); 304 bit %= sizeof(long) * NBBY; 305 bit = 1 << bit; 306 do { 307 val = *(volatile long *)var; 308 } while (atomic_cmpset_long(var, val, val & ~bit) == 0); 309 310 return !!(val & bit); 311 } 312 313 static inline long 314 test_and_set_bit(long bit, volatile unsigned long *var) 315 { 316 long val; 317 318 var += bit / (sizeof(long) * NBBY); 319 bit %= sizeof(long) * NBBY; 320 bit = 1 << bit; 321 do { 322 val = *(volatile long *)var; 323 } while (atomic_cmpset_long(var, val, val | bit) == 0); 324 325 return !!(val & bit); 326 } 327 328 329 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG)) 330 #define BITMAP_LAST_WORD_MASK(nbits) \ 331 ( \ 332 ((nbits) % BITS_PER_LONG) ? \ 333 (1UL<<((nbits) % BITS_PER_LONG))-1 : ~0UL \ 334 ) 335 336 337 static inline void 338 bitmap_set(unsigned long *map, int start, int nr) 339 { 340 unsigned long *p = map + BIT_WORD(start); 341 const int size = start + nr; 342 int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG); 343 unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start); 344 345 while (nr - bits_to_set >= 0) { 346 *p |= mask_to_set; 347 nr -= bits_to_set; 348 bits_to_set = BITS_PER_LONG; 349 mask_to_set = ~0UL; 350 p++; 351 } 352 if (nr) { 353 mask_to_set &= BITMAP_LAST_WORD_MASK(size); 354 *p |= mask_to_set; 355 } 356 } 357 358 static inline void 359 bitmap_clear(unsigned long *map, int start, int nr) 360 { 361 unsigned long *p = map + BIT_WORD(start); 362 const int size = start + nr; 363 int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG); 364 unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start); 365 366 while (nr - bits_to_clear >= 0) { 367 *p &= ~mask_to_clear; 368 nr -= bits_to_clear; 369 bits_to_clear = BITS_PER_LONG; 370 mask_to_clear = ~0UL; 371 p++; 372 } 373 if (nr) { 374 mask_to_clear &= BITMAP_LAST_WORD_MASK(size); 375 *p &= ~mask_to_clear; 376 } 377 } 378 379 enum { 380 REG_OP_ISFREE, /* true if region is all zero bits */ 381 REG_OP_ALLOC, /* set all bits in region */ 382 REG_OP_RELEASE, /* clear all bits in region */ 383 }; 384 385 static int __reg_op(unsigned long *bitmap, int pos, int order, int reg_op) 386 { 387 int nbits_reg; /* number of bits in region */ 388 int index; /* index first long of region in bitmap */ 389 int offset; /* bit offset region in bitmap[index] */ 390 int nlongs_reg; /* num longs spanned by region in bitmap */ 391 int nbitsinlong; /* num bits of region in each spanned long */ 392 unsigned long mask; /* bitmask for one long of region */ 393 int i; /* scans bitmap by longs */ 394 int ret = 0; /* return value */ 395 396 /* 397 * Either nlongs_reg == 1 (for small orders that fit in one long) 398 * or (offset == 0 && mask == ~0UL) (for larger multiword orders.) 399 */ 400 nbits_reg = 1 << order; 401 index = pos / BITS_PER_LONG; 402 offset = pos - (index * BITS_PER_LONG); 403 nlongs_reg = BITS_TO_LONGS(nbits_reg); 404 nbitsinlong = min(nbits_reg, BITS_PER_LONG); 405 406 /* 407 * Can't do "mask = (1UL << nbitsinlong) - 1", as that 408 * overflows if nbitsinlong == BITS_PER_LONG. 409 */ 410 mask = (1UL << (nbitsinlong - 1)); 411 mask += mask - 1; 412 mask <<= offset; 413 414 switch (reg_op) { 415 case REG_OP_ISFREE: 416 for (i = 0; i < nlongs_reg; i++) { 417 if (bitmap[index + i] & mask) 418 goto done; 419 } 420 ret = 1; /* all bits in region free (zero) */ 421 break; 422 423 case REG_OP_ALLOC: 424 for (i = 0; i < nlongs_reg; i++) 425 bitmap[index + i] |= mask; 426 break; 427 428 case REG_OP_RELEASE: 429 for (i = 0; i < nlongs_reg; i++) 430 bitmap[index + i] &= ~mask; 431 break; 432 } 433 done: 434 return ret; 435 } 436 437 /** 438 * bitmap_find_free_region - find a contiguous aligned mem region 439 * @bitmap: array of unsigned longs corresponding to the bitmap 440 * @bits: number of bits in the bitmap 441 * @order: region size (log base 2 of number of bits) to find 442 * 443 * Find a region of free (zero) bits in a @bitmap of @bits bits and 444 * allocate them (set them to one). Only consider regions of length 445 * a power (@order) of two, aligned to that power of two, which 446 * makes the search algorithm much faster. 447 * 448 * Return the bit offset in bitmap of the allocated region, 449 * or -errno on failure. 450 */ 451 static inline int 452 bitmap_find_free_region(unsigned long *bitmap, int bits, int order) 453 { 454 int pos, end; /* scans bitmap by regions of size order */ 455 456 for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) { 457 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE)) 458 continue; 459 __reg_op(bitmap, pos, order, REG_OP_ALLOC); 460 return pos; 461 } 462 return -ENOMEM; 463 } 464 465 /** 466 * bitmap_release_region - release allocated bitmap region 467 * @bitmap: array of unsigned longs corresponding to the bitmap 468 * @pos: beginning of bit region to release 469 * @order: region size (log base 2 of number of bits) to release 470 * 471 * This is the complement to __bitmap_find_free_region() and releases 472 * the found region (by clearing it in the bitmap). 473 * 474 * No return value. 475 */ 476 static inline void 477 bitmap_release_region(unsigned long *bitmap, int pos, int order) 478 { 479 __reg_op(bitmap, pos, order, REG_OP_RELEASE); 480 } 481 482 483 #endif /* _LINUX_BITOPS_H_ */ 484