xref: /dragonfly/sys/dev/drm/include/linux/bitops.h (revision b29f78b5)
1 /*-
2  * Copyright (c) 2010 Isilon Systems, Inc.
3  * Copyright (c) 2010 iX Systems, Inc.
4  * Copyright (c) 2010 Panasas, Inc.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice unmodified, this list of conditions, and the following
12  *    disclaimer.
13  * 2. Redistributions in binary form must reproduce the above copyright
14  *    notice, this list of conditions and the following disclaimer in the
15  *    documentation and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28 #ifndef	_LINUX_BITOPS_H_
29 #define	_LINUX_BITOPS_H_
30 
31 #include <sys/types.h>
32 #include <sys/systm.h>
33 
34 #define BIT(nr)			(1UL << (nr))
35 #ifdef __LP64__
36 #define	BITS_PER_LONG		64
37 #else
38 #define	BITS_PER_LONG		32
39 #endif
40 #define	BIT_MASK(n)		(~0UL >> (BITS_PER_LONG - (n)))
41 #define	BITS_TO_LONGS(n)	howmany((n), BITS_PER_LONG)
42 #define BIT_WORD(nr)		((nr) / BITS_PER_LONG)
43 
44 static inline int
45 __ffs(int mask)
46 {
47 	return (ffs(mask) - 1);
48 }
49 
50 static inline int
51 __fls(int mask)
52 {
53 	return (fls(mask) - 1);
54 }
55 
56 static inline int
57 __ffsl(long mask)
58 {
59 	return (ffsl(mask) - 1);
60 }
61 
62 static inline int
63 __flsl(long mask)
64 {
65 	return (flsl(mask) - 1);
66 }
67 
68 
69 #define	ffz(mask)	__ffs(~(mask))
70 
71 static inline int get_count_order(unsigned int count)
72 {
73         int order;
74 
75         order = fls(count) - 1;
76         if (count & (count - 1))
77                 order++;
78         return order;
79 }
80 
81 static inline unsigned long
82 find_first_bit(unsigned long *addr, unsigned long size)
83 {
84 	long mask;
85 	int bit;
86 
87 	for (bit = 0; size >= BITS_PER_LONG;
88 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
89 		if (*addr == 0)
90 			continue;
91 		return (bit + __ffsl(*addr));
92 	}
93 	if (size) {
94 		mask = (*addr) & BIT_MASK(size);
95 		if (mask)
96 			bit += __ffsl(mask);
97 		else
98 			bit += size;
99 	}
100 	return (bit);
101 }
102 
103 static inline unsigned long
104 find_first_zero_bit(unsigned long *addr, unsigned long size)
105 {
106 	long mask;
107 	int bit;
108 
109 	for (bit = 0; size >= BITS_PER_LONG;
110 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
111 		if (~(*addr) == 0)
112 			continue;
113 		return (bit + __ffsl(~(*addr)));
114 	}
115 	if (size) {
116 		mask = ~(*addr) & BIT_MASK(size);
117 		if (mask)
118 			bit += __ffsl(mask);
119 		else
120 			bit += size;
121 	}
122 	return (bit);
123 }
124 
125 static inline unsigned long
126 find_last_bit(unsigned long *addr, unsigned long size)
127 {
128 	long mask;
129 	int offs;
130 	int bit;
131 	int pos;
132 
133 	pos = size / BITS_PER_LONG;
134 	offs = size % BITS_PER_LONG;
135 	bit = BITS_PER_LONG * pos;
136 	addr += pos;
137 	if (offs) {
138 		mask = (*addr) & BIT_MASK(offs);
139 		if (mask)
140 			return (bit + __flsl(mask));
141 	}
142 	while (--pos) {
143 		addr--;
144 		bit -= BITS_PER_LONG;
145 		if (*addr)
146 			return (bit + __flsl(mask));
147 	}
148 	return (size);
149 }
150 
151 static inline unsigned long
152 find_next_bit(unsigned long *addr, unsigned long size, unsigned long offset)
153 {
154 	long mask;
155 	int offs;
156 	int bit;
157 	int pos;
158 
159 	if (offset >= size)
160 		return (size);
161 	pos = offset / BITS_PER_LONG;
162 	offs = offset % BITS_PER_LONG;
163 	bit = BITS_PER_LONG * pos;
164 	addr += pos;
165 	if (offs) {
166 		mask = (*addr) & ~BIT_MASK(offs);
167 		if (mask)
168 			return (bit + __ffsl(mask));
169 		bit += BITS_PER_LONG;
170 		addr++;
171 	}
172 	for (size -= bit; size >= BITS_PER_LONG;
173 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
174 		if (*addr == 0)
175 			continue;
176 		return (bit + __ffsl(*addr));
177 	}
178 	if (size) {
179 		mask = (*addr) & BIT_MASK(size);
180 		if (mask)
181 			bit += __ffsl(mask);
182 		else
183 			bit += size;
184 	}
185 	return (bit);
186 }
187 
188 static inline unsigned long
189 find_next_zero_bit(unsigned long *addr, unsigned long size,
190     unsigned long offset)
191 {
192 	long mask;
193 	int offs;
194 	int bit;
195 	int pos;
196 
197 	if (offset >= size)
198 		return (size);
199 	pos = offset / BITS_PER_LONG;
200 	offs = offset % BITS_PER_LONG;
201 	bit = BITS_PER_LONG * pos;
202 	addr += pos;
203 	if (offs) {
204 		mask = ~(*addr) & ~BIT_MASK(offs);
205 		if (mask)
206 			return (bit + __ffsl(mask));
207 		bit += BITS_PER_LONG;
208 		addr++;
209 	}
210 	for (size -= bit; size >= BITS_PER_LONG;
211 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
212 		if (~(*addr) == 0)
213 			continue;
214 		return (bit + __ffsl(~(*addr)));
215 	}
216 	if (size) {
217 		mask = ~(*addr) & BIT_MASK(size);
218 		if (mask)
219 			bit += __ffsl(mask);
220 		else
221 			bit += size;
222 	}
223 	return (bit);
224 }
225 
226 static inline void
227 bitmap_zero(unsigned long *addr, int size)
228 {
229 	int len;
230 
231 	len = BITS_TO_LONGS(size) * sizeof(long);
232 	memset(addr, 0, len);
233 }
234 
235 static inline void
236 bitmap_fill(unsigned long *addr, int size)
237 {
238 	int tail;
239 	int len;
240 
241 	len = (size / BITS_PER_LONG) * sizeof(long);
242 	memset(addr, 0xff, len);
243 	tail = size & (BITS_PER_LONG - 1);
244 	if (tail)
245 		addr[size / BITS_PER_LONG] = BIT_MASK(tail);
246 }
247 
248 static inline int
249 bitmap_full(unsigned long *addr, int size)
250 {
251 	long mask;
252 	int tail;
253 	int len;
254 	int i;
255 
256 	len = size / BITS_PER_LONG;
257 	for (i = 0; i < len; i++)
258 		if (addr[i] != ~0UL)
259 			return (0);
260 	tail = size & (BITS_PER_LONG - 1);
261 	if (tail) {
262 		mask = BIT_MASK(tail);
263 		if ((addr[i] & mask) != mask)
264 			return (0);
265 	}
266 	return (1);
267 }
268 
269 static inline int
270 bitmap_empty(unsigned long *addr, int size)
271 {
272 	long mask;
273 	int tail;
274 	int len;
275 	int i;
276 
277 	len = size / BITS_PER_LONG;
278 	for (i = 0; i < len; i++)
279 		if (addr[i] != 0)
280 			return (0);
281 	tail = size & (BITS_PER_LONG - 1);
282 	if (tail) {
283 		mask = BIT_MASK(tail);
284 		if ((addr[i] & mask) != 0)
285 			return (0);
286 	}
287 	return (1);
288 }
289 
290 #define	NBLONG	(NBBY * sizeof(long))
291 
292 #define	set_bit(i, a)							\
293     atomic_set_long(&((volatile long *)(a))[(i)/NBLONG], 1 << (i) % NBLONG)
294 
295 #define	clear_bit(i, a)							\
296     atomic_clear_long(&((volatile long *)(a))[(i)/NBLONG], 1 << (i) % NBLONG)
297 
298 #define	test_bit(i, a)							\
299     !!(atomic_load_acq_long(&((volatile long *)(a))[(i)/NBLONG]) &	\
300     1 << ((i) % NBLONG))
301 
302 static inline long
303 test_and_clear_bit(long bit, long *var)
304 {
305 	long val;
306 
307 	var += bit / (sizeof(long) * NBBY);
308 	bit %= sizeof(long) * NBBY;
309 	bit = 1 << bit;
310 	do {
311 		val = *(volatile long *)var;
312 	} while (atomic_cmpset_long(var, val, val & ~bit) == 0);
313 
314 	return !!(val & bit);
315 }
316 
317 static inline long
318 test_and_set_bit(long bit, volatile unsigned long *var)
319 {
320 	long val;
321 
322 	var += bit / (sizeof(long) * NBBY);
323 	bit %= sizeof(long) * NBBY;
324 	bit = 1 << bit;
325 	do {
326 		val = *(volatile long *)var;
327 	} while (atomic_cmpset_long(var, val, val | bit) == 0);
328 
329 	return !!(val & bit);
330 }
331 
332 
333 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG))
334 #define BITMAP_LAST_WORD_MASK(nbits)                                    \
335 (                                                                       \
336         ((nbits) % BITS_PER_LONG) ?                                     \
337                 (1UL<<((nbits) % BITS_PER_LONG))-1 : ~0UL               \
338 )
339 
340 
341 static inline void
342 bitmap_set(unsigned long *map, int start, int nr)
343 {
344 	unsigned long *p = map + BIT_WORD(start);
345 	const int size = start + nr;
346 	int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
347 	unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
348 
349 	while (nr - bits_to_set >= 0) {
350 		*p |= mask_to_set;
351 		nr -= bits_to_set;
352 		bits_to_set = BITS_PER_LONG;
353 		mask_to_set = ~0UL;
354 		p++;
355 	}
356 	if (nr) {
357 		mask_to_set &= BITMAP_LAST_WORD_MASK(size);
358 		*p |= mask_to_set;
359 	}
360 }
361 
362 static inline void
363 bitmap_clear(unsigned long *map, int start, int nr)
364 {
365 	unsigned long *p = map + BIT_WORD(start);
366 	const int size = start + nr;
367 	int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
368 	unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
369 
370 	while (nr - bits_to_clear >= 0) {
371 		*p &= ~mask_to_clear;
372 		nr -= bits_to_clear;
373 		bits_to_clear = BITS_PER_LONG;
374 		mask_to_clear = ~0UL;
375 		p++;
376 	}
377 	if (nr) {
378 		mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
379 		*p &= ~mask_to_clear;
380 	}
381 }
382 
383 enum {
384         REG_OP_ISFREE,          /* true if region is all zero bits */
385         REG_OP_ALLOC,           /* set all bits in region */
386         REG_OP_RELEASE,         /* clear all bits in region */
387 };
388 
389 static int __reg_op(unsigned long *bitmap, int pos, int order, int reg_op)
390 {
391         int nbits_reg;          /* number of bits in region */
392         int index;              /* index first long of region in bitmap */
393         int offset;             /* bit offset region in bitmap[index] */
394         int nlongs_reg;         /* num longs spanned by region in bitmap */
395         int nbitsinlong;        /* num bits of region in each spanned long */
396         unsigned long mask;     /* bitmask for one long of region */
397         int i;                  /* scans bitmap by longs */
398         int ret = 0;            /* return value */
399 
400         /*
401          * Either nlongs_reg == 1 (for small orders that fit in one long)
402          * or (offset == 0 && mask == ~0UL) (for larger multiword orders.)
403          */
404         nbits_reg = 1 << order;
405         index = pos / BITS_PER_LONG;
406         offset = pos - (index * BITS_PER_LONG);
407         nlongs_reg = BITS_TO_LONGS(nbits_reg);
408         nbitsinlong = min(nbits_reg,  BITS_PER_LONG);
409 
410         /*
411          * Can't do "mask = (1UL << nbitsinlong) - 1", as that
412          * overflows if nbitsinlong == BITS_PER_LONG.
413          */
414         mask = (1UL << (nbitsinlong - 1));
415         mask += mask - 1;
416         mask <<= offset;
417 
418         switch (reg_op) {
419         case REG_OP_ISFREE:
420                 for (i = 0; i < nlongs_reg; i++) {
421                         if (bitmap[index + i] & mask)
422                                 goto done;
423                 }
424                 ret = 1;        /* all bits in region free (zero) */
425                 break;
426 
427         case REG_OP_ALLOC:
428                 for (i = 0; i < nlongs_reg; i++)
429                         bitmap[index + i] |= mask;
430                 break;
431 
432         case REG_OP_RELEASE:
433                 for (i = 0; i < nlongs_reg; i++)
434                         bitmap[index + i] &= ~mask;
435                 break;
436         }
437 done:
438         return ret;
439 }
440 
441 /**
442  * bitmap_find_free_region - find a contiguous aligned mem region
443  *      @bitmap: array of unsigned longs corresponding to the bitmap
444  *      @bits: number of bits in the bitmap
445  *      @order: region size (log base 2 of number of bits) to find
446  *
447  * Find a region of free (zero) bits in a @bitmap of @bits bits and
448  * allocate them (set them to one).  Only consider regions of length
449  * a power (@order) of two, aligned to that power of two, which
450  * makes the search algorithm much faster.
451  *
452  * Return the bit offset in bitmap of the allocated region,
453  * or -errno on failure.
454  */
455 static inline int
456 bitmap_find_free_region(unsigned long *bitmap, int bits, int order)
457 {
458         int pos, end;           /* scans bitmap by regions of size order */
459 
460         for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) {
461                 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
462                         continue;
463                 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
464                 return pos;
465         }
466         return -ENOMEM;
467 }
468 
469 /**
470  * bitmap_release_region - release allocated bitmap region
471  *      @bitmap: array of unsigned longs corresponding to the bitmap
472  *      @pos: beginning of bit region to release
473  *      @order: region size (log base 2 of number of bits) to release
474  *
475  * This is the complement to __bitmap_find_free_region() and releases
476  * the found region (by clearing it in the bitmap).
477  *
478  * No return value.
479  */
480 static inline void
481 bitmap_release_region(unsigned long *bitmap, int pos, int order)
482 {
483         __reg_op(bitmap, pos, order, REG_OP_RELEASE);
484 }
485 
486 #include <asm/bitops/non-atomic.h>
487 #include <asm/bitops/const_hweight.h>
488 
489 #endif	/* _LINUX_BITOPS_H_ */
490