xref: /dragonfly/sys/dev/drm/include/linux/bitops.h (revision d4ef6694)
1 /*-
2  * Copyright (c) 2010 Isilon Systems, Inc.
3  * Copyright (c) 2010 iX Systems, Inc.
4  * Copyright (c) 2010 Panasas, Inc.
5  * All rights reserved.
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  * 1. Redistributions of source code must retain the above copyright
11  *    notice unmodified, this list of conditions, and the following
12  *    disclaimer.
13  * 2. Redistributions in binary form must reproduce the above copyright
14  *    notice, this list of conditions and the following disclaimer in the
15  *    documentation and/or other materials provided with the distribution.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28 #ifndef	_LINUX_BITOPS_H_
29 #define	_LINUX_BITOPS_H_
30 
31 #include <sys/types.h>
32 #include <sys/systm.h>
33 
34 #ifdef __LP64__
35 #define	BITS_PER_LONG		64
36 #else
37 #define	BITS_PER_LONG		32
38 #endif
39 #define	BIT_MASK(n)		(~0UL >> (BITS_PER_LONG - (n)))
40 #define	BITS_TO_LONGS(n)	howmany((n), BITS_PER_LONG)
41 #define BIT_WORD(nr)		((nr) / BITS_PER_LONG)
42 
43 static inline int
44 __ffs(int mask)
45 {
46 	return (ffs(mask) - 1);
47 }
48 
49 static inline int
50 __fls(int mask)
51 {
52 	return (fls(mask) - 1);
53 }
54 
55 static inline int
56 __ffsl(long mask)
57 {
58 	return (ffsl(mask) - 1);
59 }
60 
61 static inline int
62 __flsl(long mask)
63 {
64 	return (flsl(mask) - 1);
65 }
66 
67 
68 #define	ffz(mask)	__ffs(~(mask))
69 
70 static inline int get_count_order(unsigned int count)
71 {
72         int order;
73 
74         order = fls(count) - 1;
75         if (count & (count - 1))
76                 order++;
77         return order;
78 }
79 
80 static inline unsigned long
81 find_first_bit(unsigned long *addr, unsigned long size)
82 {
83 	long mask;
84 	int bit;
85 
86 	for (bit = 0; size >= BITS_PER_LONG;
87 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
88 		if (*addr == 0)
89 			continue;
90 		return (bit + __ffsl(*addr));
91 	}
92 	if (size) {
93 		mask = (*addr) & BIT_MASK(size);
94 		if (mask)
95 			bit += __ffsl(mask);
96 		else
97 			bit += size;
98 	}
99 	return (bit);
100 }
101 
102 static inline unsigned long
103 find_first_zero_bit(unsigned long *addr, unsigned long size)
104 {
105 	long mask;
106 	int bit;
107 
108 	for (bit = 0; size >= BITS_PER_LONG;
109 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
110 		if (~(*addr) == 0)
111 			continue;
112 		return (bit + __ffsl(~(*addr)));
113 	}
114 	if (size) {
115 		mask = ~(*addr) & BIT_MASK(size);
116 		if (mask)
117 			bit += __ffsl(mask);
118 		else
119 			bit += size;
120 	}
121 	return (bit);
122 }
123 
124 static inline unsigned long
125 find_last_bit(unsigned long *addr, unsigned long size)
126 {
127 	long mask;
128 	int offs;
129 	int bit;
130 	int pos;
131 
132 	pos = size / BITS_PER_LONG;
133 	offs = size % BITS_PER_LONG;
134 	bit = BITS_PER_LONG * pos;
135 	addr += pos;
136 	if (offs) {
137 		mask = (*addr) & BIT_MASK(offs);
138 		if (mask)
139 			return (bit + __flsl(mask));
140 	}
141 	while (--pos) {
142 		addr--;
143 		bit -= BITS_PER_LONG;
144 		if (*addr)
145 			return (bit + __flsl(mask));
146 	}
147 	return (size);
148 }
149 
150 static inline unsigned long
151 find_next_bit(unsigned long *addr, unsigned long size, unsigned long offset)
152 {
153 	long mask;
154 	int offs;
155 	int bit;
156 	int pos;
157 
158 	if (offset >= size)
159 		return (size);
160 	pos = offset / BITS_PER_LONG;
161 	offs = offset % BITS_PER_LONG;
162 	bit = BITS_PER_LONG * pos;
163 	addr += pos;
164 	if (offs) {
165 		mask = (*addr) & ~BIT_MASK(offs);
166 		if (mask)
167 			return (bit + __ffsl(mask));
168 		bit += BITS_PER_LONG;
169 		addr++;
170 	}
171 	for (size -= bit; size >= BITS_PER_LONG;
172 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
173 		if (*addr == 0)
174 			continue;
175 		return (bit + __ffsl(*addr));
176 	}
177 	if (size) {
178 		mask = (*addr) & BIT_MASK(size);
179 		if (mask)
180 			bit += __ffsl(mask);
181 		else
182 			bit += size;
183 	}
184 	return (bit);
185 }
186 
187 static inline unsigned long
188 find_next_zero_bit(unsigned long *addr, unsigned long size,
189     unsigned long offset)
190 {
191 	long mask;
192 	int offs;
193 	int bit;
194 	int pos;
195 
196 	if (offset >= size)
197 		return (size);
198 	pos = offset / BITS_PER_LONG;
199 	offs = offset % BITS_PER_LONG;
200 	bit = BITS_PER_LONG * pos;
201 	addr += pos;
202 	if (offs) {
203 		mask = ~(*addr) & ~BIT_MASK(offs);
204 		if (mask)
205 			return (bit + __ffsl(mask));
206 		bit += BITS_PER_LONG;
207 		addr++;
208 	}
209 	for (size -= bit; size >= BITS_PER_LONG;
210 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
211 		if (~(*addr) == 0)
212 			continue;
213 		return (bit + __ffsl(~(*addr)));
214 	}
215 	if (size) {
216 		mask = ~(*addr) & BIT_MASK(size);
217 		if (mask)
218 			bit += __ffsl(mask);
219 		else
220 			bit += size;
221 	}
222 	return (bit);
223 }
224 
225 static inline void
226 bitmap_zero(unsigned long *addr, int size)
227 {
228 	int len;
229 
230 	len = BITS_TO_LONGS(size) * sizeof(long);
231 	memset(addr, 0, len);
232 }
233 
234 static inline void
235 bitmap_fill(unsigned long *addr, int size)
236 {
237 	int tail;
238 	int len;
239 
240 	len = (size / BITS_PER_LONG) * sizeof(long);
241 	memset(addr, 0xff, len);
242 	tail = size & (BITS_PER_LONG - 1);
243 	if (tail)
244 		addr[size / BITS_PER_LONG] = BIT_MASK(tail);
245 }
246 
247 static inline int
248 bitmap_full(unsigned long *addr, int size)
249 {
250 	long mask;
251 	int tail;
252 	int len;
253 	int i;
254 
255 	len = size / BITS_PER_LONG;
256 	for (i = 0; i < len; i++)
257 		if (addr[i] != ~0UL)
258 			return (0);
259 	tail = size & (BITS_PER_LONG - 1);
260 	if (tail) {
261 		mask = BIT_MASK(tail);
262 		if ((addr[i] & mask) != mask)
263 			return (0);
264 	}
265 	return (1);
266 }
267 
268 static inline int
269 bitmap_empty(unsigned long *addr, int size)
270 {
271 	long mask;
272 	int tail;
273 	int len;
274 	int i;
275 
276 	len = size / BITS_PER_LONG;
277 	for (i = 0; i < len; i++)
278 		if (addr[i] != 0)
279 			return (0);
280 	tail = size & (BITS_PER_LONG - 1);
281 	if (tail) {
282 		mask = BIT_MASK(tail);
283 		if ((addr[i] & mask) != 0)
284 			return (0);
285 	}
286 	return (1);
287 }
288 
289 #define	NBLONG	(NBBY * sizeof(long))
290 
291 #define	set_bit(i, a)							\
292     atomic_set_long(&((volatile long *)(a))[(i)/NBLONG], 1 << (i) % NBLONG)
293 
294 #define	clear_bit(i, a)							\
295     atomic_clear_long(&((volatile long *)(a))[(i)/NBLONG], 1 << (i) % NBLONG)
296 
297 #define	test_bit(i, a)							\
298     !!(atomic_load_acq_long(&((volatile long *)(a))[(i)/NBLONG]) &	\
299     1 << ((i) % NBLONG))
300 
301 static inline long
302 test_and_clear_bit(long bit, long *var)
303 {
304 	long val;
305 
306 	var += bit / (sizeof(long) * NBBY);
307 	bit %= sizeof(long) * NBBY;
308 	bit = 1 << bit;
309 	do {
310 		val = *(volatile long *)var;
311 	} while (atomic_cmpset_long(var, val, val & ~bit) == 0);
312 
313 	return !!(val & bit);
314 }
315 
316 static inline long
317 test_and_set_bit(long bit, volatile unsigned long *var)
318 {
319 	long val;
320 
321 	var += bit / (sizeof(long) * NBBY);
322 	bit %= sizeof(long) * NBBY;
323 	bit = 1 << bit;
324 	do {
325 		val = *(volatile long *)var;
326 	} while (atomic_cmpset_long(var, val, val | bit) == 0);
327 
328 	return !!(val & bit);
329 }
330 
331 
332 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG))
333 #define BITMAP_LAST_WORD_MASK(nbits)                                    \
334 (                                                                       \
335         ((nbits) % BITS_PER_LONG) ?                                     \
336                 (1UL<<((nbits) % BITS_PER_LONG))-1 : ~0UL               \
337 )
338 
339 
340 static inline void
341 bitmap_set(unsigned long *map, int start, int nr)
342 {
343 	unsigned long *p = map + BIT_WORD(start);
344 	const int size = start + nr;
345 	int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
346 	unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
347 
348 	while (nr - bits_to_set >= 0) {
349 		*p |= mask_to_set;
350 		nr -= bits_to_set;
351 		bits_to_set = BITS_PER_LONG;
352 		mask_to_set = ~0UL;
353 		p++;
354 	}
355 	if (nr) {
356 		mask_to_set &= BITMAP_LAST_WORD_MASK(size);
357 		*p |= mask_to_set;
358 	}
359 }
360 
361 static inline void
362 bitmap_clear(unsigned long *map, int start, int nr)
363 {
364 	unsigned long *p = map + BIT_WORD(start);
365 	const int size = start + nr;
366 	int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
367 	unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
368 
369 	while (nr - bits_to_clear >= 0) {
370 		*p &= ~mask_to_clear;
371 		nr -= bits_to_clear;
372 		bits_to_clear = BITS_PER_LONG;
373 		mask_to_clear = ~0UL;
374 		p++;
375 	}
376 	if (nr) {
377 		mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
378 		*p &= ~mask_to_clear;
379 	}
380 }
381 
382 enum {
383         REG_OP_ISFREE,          /* true if region is all zero bits */
384         REG_OP_ALLOC,           /* set all bits in region */
385         REG_OP_RELEASE,         /* clear all bits in region */
386 };
387 
388 static int __reg_op(unsigned long *bitmap, int pos, int order, int reg_op)
389 {
390         int nbits_reg;          /* number of bits in region */
391         int index;              /* index first long of region in bitmap */
392         int offset;             /* bit offset region in bitmap[index] */
393         int nlongs_reg;         /* num longs spanned by region in bitmap */
394         int nbitsinlong;        /* num bits of region in each spanned long */
395         unsigned long mask;     /* bitmask for one long of region */
396         int i;                  /* scans bitmap by longs */
397         int ret = 0;            /* return value */
398 
399         /*
400          * Either nlongs_reg == 1 (for small orders that fit in one long)
401          * or (offset == 0 && mask == ~0UL) (for larger multiword orders.)
402          */
403         nbits_reg = 1 << order;
404         index = pos / BITS_PER_LONG;
405         offset = pos - (index * BITS_PER_LONG);
406         nlongs_reg = BITS_TO_LONGS(nbits_reg);
407         nbitsinlong = min(nbits_reg,  BITS_PER_LONG);
408 
409         /*
410          * Can't do "mask = (1UL << nbitsinlong) - 1", as that
411          * overflows if nbitsinlong == BITS_PER_LONG.
412          */
413         mask = (1UL << (nbitsinlong - 1));
414         mask += mask - 1;
415         mask <<= offset;
416 
417         switch (reg_op) {
418         case REG_OP_ISFREE:
419                 for (i = 0; i < nlongs_reg; i++) {
420                         if (bitmap[index + i] & mask)
421                                 goto done;
422                 }
423                 ret = 1;        /* all bits in region free (zero) */
424                 break;
425 
426         case REG_OP_ALLOC:
427                 for (i = 0; i < nlongs_reg; i++)
428                         bitmap[index + i] |= mask;
429                 break;
430 
431         case REG_OP_RELEASE:
432                 for (i = 0; i < nlongs_reg; i++)
433                         bitmap[index + i] &= ~mask;
434                 break;
435         }
436 done:
437         return ret;
438 }
439 
440 /**
441  * bitmap_find_free_region - find a contiguous aligned mem region
442  *      @bitmap: array of unsigned longs corresponding to the bitmap
443  *      @bits: number of bits in the bitmap
444  *      @order: region size (log base 2 of number of bits) to find
445  *
446  * Find a region of free (zero) bits in a @bitmap of @bits bits and
447  * allocate them (set them to one).  Only consider regions of length
448  * a power (@order) of two, aligned to that power of two, which
449  * makes the search algorithm much faster.
450  *
451  * Return the bit offset in bitmap of the allocated region,
452  * or -errno on failure.
453  */
454 static inline int
455 bitmap_find_free_region(unsigned long *bitmap, int bits, int order)
456 {
457         int pos, end;           /* scans bitmap by regions of size order */
458 
459         for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) {
460                 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
461                         continue;
462                 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
463                 return pos;
464         }
465         return -ENOMEM;
466 }
467 
468 /**
469  * bitmap_release_region - release allocated bitmap region
470  *      @bitmap: array of unsigned longs corresponding to the bitmap
471  *      @pos: beginning of bit region to release
472  *      @order: region size (log base 2 of number of bits) to release
473  *
474  * This is the complement to __bitmap_find_free_region() and releases
475  * the found region (by clearing it in the bitmap).
476  *
477  * No return value.
478  */
479 static inline void
480 bitmap_release_region(unsigned long *bitmap, int pos, int order)
481 {
482         __reg_op(bitmap, pos, order, REG_OP_RELEASE);
483 }
484 
485 #include <asm/bitops/non-atomic.h>
486 
487 #endif	/* _LINUX_BITOPS_H_ */
488