xref: /dragonfly/sys/dev/drm/include/linux/bitops.h (revision 71990c18)
1 /*-
2  * Copyright (c) 2010 Isilon Systems, Inc.
3  * Copyright (c) 2010 iX Systems, Inc.
4  * Copyright (c) 2010 Panasas, Inc.
5  * Copyright (c) 2015-2016 François Tigeot
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice unmodified, this list of conditions, and the following
13  *    disclaimer.
14  * 2. Redistributions in binary form must reproduce the above copyright
15  *    notice, this list of conditions and the following disclaimer in the
16  *    documentation and/or other materials provided with the distribution.
17  *
18  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  */
29 #ifndef	_LINUX_BITOPS_H_
30 #define	_LINUX_BITOPS_H_
31 
32 #include <sys/types.h>
33 #include <sys/systm.h>
34 
35 #define BIT(nr)			(1UL << (nr))
36 #ifdef __LP64__
37 #define	BITS_PER_LONG		64
38 #else
39 #define	BITS_PER_LONG		32
40 #endif
41 #define	BIT_MASK(n)		(~0UL >> (BITS_PER_LONG - (n)))
42 #define	BITS_TO_LONGS(n)	howmany((n), BITS_PER_LONG)
43 #define BIT_WORD(nr)		((nr) / BITS_PER_LONG)
44 
45 static inline int
46 __ffs(int mask)
47 {
48 	return (ffs(mask) - 1);
49 }
50 
51 static inline int
52 __fls(int mask)
53 {
54 	return (fls(mask) - 1);
55 }
56 
57 static inline int
58 __ffsl(long mask)
59 {
60 	return (ffsl(mask) - 1);
61 }
62 
63 static inline int
64 __flsl(long mask)
65 {
66 	return (flsl(mask) - 1);
67 }
68 
69 
70 #define	ffz(mask)	__ffs(~(mask))
71 
72 static inline int get_count_order(unsigned int count)
73 {
74         int order;
75 
76         order = fls(count) - 1;
77         if (count & (count - 1))
78                 order++;
79         return order;
80 }
81 
82 static inline unsigned long
83 find_first_bit(unsigned long *addr, unsigned long size)
84 {
85 	long mask;
86 	int bit;
87 
88 	for (bit = 0; size >= BITS_PER_LONG;
89 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
90 		if (*addr == 0)
91 			continue;
92 		return (bit + __ffsl(*addr));
93 	}
94 	if (size) {
95 		mask = (*addr) & BIT_MASK(size);
96 		if (mask)
97 			bit += __ffsl(mask);
98 		else
99 			bit += size;
100 	}
101 	return (bit);
102 }
103 
104 static inline unsigned long
105 find_first_zero_bit(unsigned long *addr, unsigned long size)
106 {
107 	long mask;
108 	int bit;
109 
110 	for (bit = 0; size >= BITS_PER_LONG;
111 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
112 		if (~(*addr) == 0)
113 			continue;
114 		return (bit + __ffsl(~(*addr)));
115 	}
116 	if (size) {
117 		mask = ~(*addr) & BIT_MASK(size);
118 		if (mask)
119 			bit += __ffsl(mask);
120 		else
121 			bit += size;
122 	}
123 	return (bit);
124 }
125 
126 static inline unsigned long
127 find_last_bit(unsigned long *addr, unsigned long size)
128 {
129 	long mask;
130 	int offs;
131 	int bit;
132 	int pos;
133 
134 	pos = size / BITS_PER_LONG;
135 	offs = size % BITS_PER_LONG;
136 	bit = BITS_PER_LONG * pos;
137 	addr += pos;
138 	if (offs) {
139 		mask = (*addr) & BIT_MASK(offs);
140 		if (mask)
141 			return (bit + __flsl(mask));
142 	}
143 	while (--pos) {
144 		addr--;
145 		bit -= BITS_PER_LONG;
146 		if (*addr)
147 			return (bit + __flsl(mask));
148 	}
149 	return (size);
150 }
151 
152 static inline unsigned long
153 find_next_bit(unsigned long *addr, unsigned long size, unsigned long offset)
154 {
155 	long mask;
156 	int offs;
157 	int bit;
158 	int pos;
159 
160 	if (offset >= size)
161 		return (size);
162 	pos = offset / BITS_PER_LONG;
163 	offs = offset % BITS_PER_LONG;
164 	bit = BITS_PER_LONG * pos;
165 	addr += pos;
166 	if (offs) {
167 		mask = (*addr) & ~BIT_MASK(offs);
168 		if (mask)
169 			return (bit + __ffsl(mask));
170 		bit += BITS_PER_LONG;
171 		addr++;
172 	}
173 	for (size -= bit; size >= BITS_PER_LONG;
174 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
175 		if (*addr == 0)
176 			continue;
177 		return (bit + __ffsl(*addr));
178 	}
179 	if (size) {
180 		mask = (*addr) & BIT_MASK(size);
181 		if (mask)
182 			bit += __ffsl(mask);
183 		else
184 			bit += size;
185 	}
186 	return (bit);
187 }
188 
189 static inline unsigned long
190 find_next_zero_bit(unsigned long *addr, unsigned long size,
191     unsigned long offset)
192 {
193 	long mask;
194 	int offs;
195 	int bit;
196 	int pos;
197 
198 	if (offset >= size)
199 		return (size);
200 	pos = offset / BITS_PER_LONG;
201 	offs = offset % BITS_PER_LONG;
202 	bit = BITS_PER_LONG * pos;
203 	addr += pos;
204 	if (offs) {
205 		mask = ~(*addr) & ~BIT_MASK(offs);
206 		if (mask)
207 			return (bit + __ffsl(mask));
208 		bit += BITS_PER_LONG;
209 		addr++;
210 	}
211 	for (size -= bit; size >= BITS_PER_LONG;
212 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
213 		if (~(*addr) == 0)
214 			continue;
215 		return (bit + __ffsl(~(*addr)));
216 	}
217 	if (size) {
218 		mask = ~(*addr) & BIT_MASK(size);
219 		if (mask)
220 			bit += __ffsl(mask);
221 		else
222 			bit += size;
223 	}
224 	return (bit);
225 }
226 
227 static inline void
228 bitmap_zero(unsigned long *addr, int size)
229 {
230 	int len;
231 
232 	len = BITS_TO_LONGS(size) * sizeof(long);
233 	memset(addr, 0, len);
234 }
235 
236 static inline void
237 bitmap_fill(unsigned long *addr, int size)
238 {
239 	int tail;
240 	int len;
241 
242 	len = (size / BITS_PER_LONG) * sizeof(long);
243 	memset(addr, 0xff, len);
244 	tail = size & (BITS_PER_LONG - 1);
245 	if (tail)
246 		addr[size / BITS_PER_LONG] = BIT_MASK(tail);
247 }
248 
249 static inline int
250 bitmap_full(unsigned long *addr, int size)
251 {
252 	long mask;
253 	int tail;
254 	int len;
255 	int i;
256 
257 	len = size / BITS_PER_LONG;
258 	for (i = 0; i < len; i++)
259 		if (addr[i] != ~0UL)
260 			return (0);
261 	tail = size & (BITS_PER_LONG - 1);
262 	if (tail) {
263 		mask = BIT_MASK(tail);
264 		if ((addr[i] & mask) != mask)
265 			return (0);
266 	}
267 	return (1);
268 }
269 
270 static inline int
271 bitmap_empty(unsigned long *addr, int size)
272 {
273 	long mask;
274 	int tail;
275 	int len;
276 	int i;
277 
278 	len = size / BITS_PER_LONG;
279 	for (i = 0; i < len; i++)
280 		if (addr[i] != 0)
281 			return (0);
282 	tail = size & (BITS_PER_LONG - 1);
283 	if (tail) {
284 		mask = BIT_MASK(tail);
285 		if ((addr[i] & mask) != 0)
286 			return (0);
287 	}
288 	return (1);
289 }
290 
291 #define	NBLONG	(NBBY * sizeof(long))
292 
293 #define	set_bit(i, a)							\
294     atomic_set_long(&((volatile long *)(a))[(i)/NBLONG], 1LU << ((i) % NBLONG))
295 
296 #define	clear_bit(i, a)							\
297     atomic_clear_long(&((volatile long *)(a))[(i)/NBLONG], 1LU << ((i) % NBLONG))
298 
299 #define	test_bit(i, a)							\
300     !!(atomic_load_acq_long(&((volatile long *)(a))[(i)/NBLONG]) &	\
301 			    (1LU << ((i) % NBLONG)))
302 
303 static inline long
304 test_and_clear_bit(long bit, long *var)
305 {
306 	long val;
307 
308 	var += bit / (sizeof(long) * NBBY);
309 	bit %= sizeof(long) * NBBY;
310 	bit = 1L << bit;
311 	do {
312 		val = *(volatile long *)var;
313 	} while (atomic_cmpset_long(var, val, val & ~bit) == 0);
314 
315 	return !!(val & bit);
316 }
317 
318 static inline unsigned long
319 __test_and_clear_bit(unsigned int bit, volatile unsigned long *ptr)
320 {
321 	const unsigned int units = (sizeof(*ptr) * NBBY);
322 	volatile unsigned long *const p = &ptr[bit / units];
323 	const unsigned long mask = (1UL << (bit % units));
324 	unsigned long v;
325 
326 	v = *p;
327 	*p &= ~mask;
328 
329 	return ((v & mask) != 0);
330 }
331 
332 static inline long
333 test_and_set_bit(long bit, volatile unsigned long *var)
334 {
335 	long val;
336 
337 	var += bit / (sizeof(long) * NBBY);
338 	bit %= sizeof(long) * NBBY;
339 	bit = 1L << bit;
340 	do {
341 		val = *(volatile long *)var;
342 	} while (atomic_cmpset_long(var, val, val | bit) == 0);
343 
344 	return !!(val & bit);
345 }
346 
347 
348 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG))
349 #define BITMAP_LAST_WORD_MASK(nbits)                                    \
350 (                                                                       \
351         ((nbits) % BITS_PER_LONG) ?                                     \
352                 (1UL<<((nbits) % BITS_PER_LONG))-1 : ~0UL               \
353 )
354 
355 
356 static inline void
357 bitmap_set(unsigned long *map, int start, int nr)
358 {
359 	unsigned long *p = map + BIT_WORD(start);
360 	const int size = start + nr;
361 	int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
362 	unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
363 
364 	while (nr - bits_to_set >= 0) {
365 		*p |= mask_to_set;
366 		nr -= bits_to_set;
367 		bits_to_set = BITS_PER_LONG;
368 		mask_to_set = ~0UL;
369 		p++;
370 	}
371 	if (nr) {
372 		mask_to_set &= BITMAP_LAST_WORD_MASK(size);
373 		*p |= mask_to_set;
374 	}
375 }
376 
377 static inline void
378 bitmap_clear(unsigned long *map, int start, int nr)
379 {
380 	unsigned long *p = map + BIT_WORD(start);
381 	const int size = start + nr;
382 	int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
383 	unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
384 
385 	while (nr - bits_to_clear >= 0) {
386 		*p &= ~mask_to_clear;
387 		nr -= bits_to_clear;
388 		bits_to_clear = BITS_PER_LONG;
389 		mask_to_clear = ~0UL;
390 		p++;
391 	}
392 	if (nr) {
393 		mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
394 		*p &= ~mask_to_clear;
395 	}
396 }
397 
398 enum {
399         REG_OP_ISFREE,          /* true if region is all zero bits */
400         REG_OP_ALLOC,           /* set all bits in region */
401         REG_OP_RELEASE,         /* clear all bits in region */
402 };
403 
404 static int __reg_op(unsigned long *bitmap, int pos, int order, int reg_op)
405 {
406         int nbits_reg;          /* number of bits in region */
407         int index;              /* index first long of region in bitmap */
408         int offset;             /* bit offset region in bitmap[index] */
409         int nlongs_reg;         /* num longs spanned by region in bitmap */
410         int nbitsinlong;        /* num bits of region in each spanned long */
411         unsigned long mask;     /* bitmask for one long of region */
412         int i;                  /* scans bitmap by longs */
413         int ret = 0;            /* return value */
414 
415         /*
416          * Either nlongs_reg == 1 (for small orders that fit in one long)
417          * or (offset == 0 && mask == ~0UL) (for larger multiword orders.)
418          */
419         nbits_reg = 1 << order;
420         index = pos / BITS_PER_LONG;
421         offset = pos - (index * BITS_PER_LONG);
422         nlongs_reg = BITS_TO_LONGS(nbits_reg);
423         nbitsinlong = min(nbits_reg,  BITS_PER_LONG);
424 
425         /*
426          * Can't do "mask = (1UL << nbitsinlong) - 1", as that
427          * overflows if nbitsinlong == BITS_PER_LONG.
428          */
429         mask = (1UL << (nbitsinlong - 1));
430         mask += mask - 1;
431         mask <<= offset;
432 
433         switch (reg_op) {
434         case REG_OP_ISFREE:
435                 for (i = 0; i < nlongs_reg; i++) {
436                         if (bitmap[index + i] & mask)
437                                 goto done;
438                 }
439                 ret = 1;        /* all bits in region free (zero) */
440                 break;
441 
442         case REG_OP_ALLOC:
443                 for (i = 0; i < nlongs_reg; i++)
444                         bitmap[index + i] |= mask;
445                 break;
446 
447         case REG_OP_RELEASE:
448                 for (i = 0; i < nlongs_reg; i++)
449                         bitmap[index + i] &= ~mask;
450                 break;
451         }
452 done:
453         return ret;
454 }
455 
456 /**
457  * bitmap_find_free_region - find a contiguous aligned mem region
458  *      @bitmap: array of unsigned longs corresponding to the bitmap
459  *      @bits: number of bits in the bitmap
460  *      @order: region size (log base 2 of number of bits) to find
461  *
462  * Find a region of free (zero) bits in a @bitmap of @bits bits and
463  * allocate them (set them to one).  Only consider regions of length
464  * a power (@order) of two, aligned to that power of two, which
465  * makes the search algorithm much faster.
466  *
467  * Return the bit offset in bitmap of the allocated region,
468  * or -errno on failure.
469  */
470 static inline int
471 bitmap_find_free_region(unsigned long *bitmap, int bits, int order)
472 {
473         int pos, end;           /* scans bitmap by regions of size order */
474 
475         for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) {
476                 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
477                         continue;
478                 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
479                 return pos;
480         }
481         return -ENOMEM;
482 }
483 
484 /**
485  * bitmap_release_region - release allocated bitmap region
486  *      @bitmap: array of unsigned longs corresponding to the bitmap
487  *      @pos: beginning of bit region to release
488  *      @order: region size (log base 2 of number of bits) to release
489  *
490  * This is the complement to __bitmap_find_free_region() and releases
491  * the found region (by clearing it in the bitmap).
492  *
493  * No return value.
494  */
495 static inline void
496 bitmap_release_region(unsigned long *bitmap, int pos, int order)
497 {
498         __reg_op(bitmap, pos, order, REG_OP_RELEASE);
499 }
500 
501 /* Returns a contiguous bitmask from bits h to l */
502 #define GENMASK(h, l)	\
503 	((~0UL) >> (BITS_PER_LONG - h - 1)) & ((~0UL) << l)
504 
505 #include <asm/bitops/non-atomic.h>
506 #include <asm/bitops/const_hweight.h>
507 
508 #define for_each_set_bit(bit, addr, size) \
509 	for ((bit) = find_first_bit((addr), (size));		\
510 	     (bit) < (size);					\
511 	     (bit) = find_next_bit((addr), (size), (bit) + 1))
512 
513 
514 static inline int64_t
515 sign_extend64(uint64_t value, int index)
516 {
517 	uint8_t shift = 63 - index;
518 	return (int64_t)(value << shift) >> shift;
519 }
520 
521 #endif	/* _LINUX_BITOPS_H_ */
522