xref: /dragonfly/sys/dev/drm/include/linux/bitops.h (revision 2cecdd68)
1 /*-
2  * Copyright (c) 2010 Isilon Systems, Inc.
3  * Copyright (c) 2010 iX Systems, Inc.
4  * Copyright (c) 2010 Panasas, Inc.
5  * Copyright (c) 2015-2020 François Tigeot <ftigeot@wolfpond.org>
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice unmodified, this list of conditions, and the following
13  *    disclaimer.
14  * 2. Redistributions in binary form must reproduce the above copyright
15  *    notice, this list of conditions and the following disclaimer in the
16  *    documentation and/or other materials provided with the distribution.
17  *
18  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  */
29 #ifndef	_LINUX_BITOPS_H_
30 #define	_LINUX_BITOPS_H_
31 
32 #include <asm/types.h>
33 
34 #define BIT(nr)			(1UL << (nr))
35 #define BIT_ULL(nb)		(1ULL << (nb))
36 #define	BIT_MASK(n)		(~0UL >> (BITS_PER_LONG - (n)))
37 #define	BITS_TO_LONGS(n)	howmany((n), BITS_PER_LONG)
38 #define BIT_WORD(nr)		((nr) / BITS_PER_LONG)
39 #define BITS_PER_BYTE		8
40 
41 #include <asm/bitops.h>
42 
43 #include <sys/types.h>
44 #include <sys/systm.h>
45 
46 static inline int
__ffs(int mask)47 __ffs(int mask)
48 {
49 	return (ffs(mask) - 1);
50 }
51 
52 static inline int
__fls(int mask)53 __fls(int mask)
54 {
55 	return (fls(mask) - 1);
56 }
57 
58 static inline int
__ffsl(long mask)59 __ffsl(long mask)
60 {
61 	return (ffsl(mask) - 1);
62 }
63 
64 static inline int
__flsl(long mask)65 __flsl(long mask)
66 {
67 	return (flsl(mask) - 1);
68 }
69 
70 /*
71  * fls64 - find leftmost set bit in a 64-bit word
72  *
73  * Returns 0 if no bit is set or the bit
74  * position counted from 1 to 64 otherwise
75  */
76 static inline int
fls64(__u64 x)77 fls64(__u64 x)
78 {
79 	return flsl(x);
80 }
81 
82 #define	ffz(mask)	__ffs(~(mask))
83 
get_count_order(unsigned int count)84 static inline int get_count_order(unsigned int count)
85 {
86         int order;
87 
88         order = fls(count) - 1;
89         if (count & (count - 1))
90                 order++;
91         return order;
92 }
93 
94 static inline unsigned long
find_first_bit(unsigned long * addr,unsigned long size)95 find_first_bit(unsigned long *addr, unsigned long size)
96 {
97 	long mask;
98 	int bit;
99 
100 	for (bit = 0; size >= BITS_PER_LONG;
101 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
102 		if (*addr == 0)
103 			continue;
104 		return (bit + __ffsl(*addr));
105 	}
106 	if (size) {
107 		mask = (*addr) & BIT_MASK(size);
108 		if (mask)
109 			bit += __ffsl(mask);
110 		else
111 			bit += size;
112 	}
113 	return (bit);
114 }
115 
116 static inline unsigned long
find_first_zero_bit(unsigned long * addr,unsigned long size)117 find_first_zero_bit(unsigned long *addr, unsigned long size)
118 {
119 	long mask;
120 	int bit;
121 
122 	for (bit = 0; size >= BITS_PER_LONG;
123 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
124 		if (~(*addr) == 0)
125 			continue;
126 		return (bit + __ffsl(~(*addr)));
127 	}
128 	if (size) {
129 		mask = ~(*addr) & BIT_MASK(size);
130 		if (mask)
131 			bit += __ffsl(mask);
132 		else
133 			bit += size;
134 	}
135 	return (bit);
136 }
137 
138 static inline unsigned long
find_last_bit(unsigned long * addr,unsigned long size)139 find_last_bit(unsigned long *addr, unsigned long size)
140 {
141 	long mask;
142 	int offs;
143 	int bit;
144 	int pos;
145 
146 	pos = size / BITS_PER_LONG;
147 	offs = size % BITS_PER_LONG;
148 	bit = BITS_PER_LONG * pos;
149 	addr += pos;
150 	if (offs) {
151 		mask = (*addr) & BIT_MASK(offs);
152 		if (mask)
153 			return (bit + __flsl(mask));
154 	}
155 	while (--pos) {
156 		addr--;
157 		bit -= BITS_PER_LONG;
158 		if (*addr)
159 			return (bit + __flsl(mask));
160 	}
161 	return (size);
162 }
163 
164 static inline unsigned long
find_next_bit(unsigned long * addr,unsigned long size,unsigned long offset)165 find_next_bit(unsigned long *addr, unsigned long size, unsigned long offset)
166 {
167 	long mask;
168 	int offs;
169 	int bit;
170 	int pos;
171 
172 	if (offset >= size)
173 		return (size);
174 	pos = offset / BITS_PER_LONG;
175 	offs = offset % BITS_PER_LONG;
176 	bit = BITS_PER_LONG * pos;
177 	addr += pos;
178 	if (offs) {
179 		mask = (*addr) & ~BIT_MASK(offs);
180 		if (mask)
181 			return (bit + __ffsl(mask));
182 		bit += BITS_PER_LONG;
183 		addr++;
184 	}
185 	for (size -= bit; size >= BITS_PER_LONG;
186 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
187 		if (*addr == 0)
188 			continue;
189 		return (bit + __ffsl(*addr));
190 	}
191 	if (size) {
192 		mask = (*addr) & BIT_MASK(size);
193 		if (mask)
194 			bit += __ffsl(mask);
195 		else
196 			bit += size;
197 	}
198 	return (bit);
199 }
200 
201 static inline unsigned long
find_next_zero_bit(unsigned long * addr,unsigned long size,unsigned long offset)202 find_next_zero_bit(unsigned long *addr, unsigned long size,
203     unsigned long offset)
204 {
205 	long mask;
206 	int offs;
207 	int bit;
208 	int pos;
209 
210 	if (offset >= size)
211 		return (size);
212 	pos = offset / BITS_PER_LONG;
213 	offs = offset % BITS_PER_LONG;
214 	bit = BITS_PER_LONG * pos;
215 	addr += pos;
216 	if (offs) {
217 		mask = ~(*addr) & ~BIT_MASK(offs);
218 		if (mask)
219 			return (bit + __ffsl(mask));
220 		bit += BITS_PER_LONG;
221 		addr++;
222 	}
223 	for (size -= bit; size >= BITS_PER_LONG;
224 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
225 		if (~(*addr) == 0)
226 			continue;
227 		return (bit + __ffsl(~(*addr)));
228 	}
229 	if (size) {
230 		mask = ~(*addr) & BIT_MASK(size);
231 		if (mask)
232 			bit += __ffsl(mask);
233 		else
234 			bit += size;
235 	}
236 	return (bit);
237 }
238 
239 static inline void
bitmap_zero(unsigned long * addr,int size)240 bitmap_zero(unsigned long *addr, int size)
241 {
242 	int len;
243 
244 	len = BITS_TO_LONGS(size) * sizeof(long);
245 	memset(addr, 0, len);
246 }
247 
248 static inline void
bitmap_fill(unsigned long * addr,int size)249 bitmap_fill(unsigned long *addr, int size)
250 {
251 	int tail;
252 	int len;
253 
254 	len = (size / BITS_PER_LONG) * sizeof(long);
255 	memset(addr, 0xff, len);
256 	tail = size & (BITS_PER_LONG - 1);
257 	if (tail)
258 		addr[size / BITS_PER_LONG] = BIT_MASK(tail);
259 }
260 
261 static inline int
bitmap_full(unsigned long * addr,int size)262 bitmap_full(unsigned long *addr, int size)
263 {
264 	long mask;
265 	int tail;
266 	int len;
267 	int i;
268 
269 	len = size / BITS_PER_LONG;
270 	for (i = 0; i < len; i++)
271 		if (addr[i] != ~0UL)
272 			return (0);
273 	tail = size & (BITS_PER_LONG - 1);
274 	if (tail) {
275 		mask = BIT_MASK(tail);
276 		if ((addr[i] & mask) != mask)
277 			return (0);
278 	}
279 	return (1);
280 }
281 
282 static inline int
bitmap_empty(unsigned long * addr,int size)283 bitmap_empty(unsigned long *addr, int size)
284 {
285 	long mask;
286 	int tail;
287 	int len;
288 	int i;
289 
290 	len = size / BITS_PER_LONG;
291 	for (i = 0; i < len; i++)
292 		if (addr[i] != 0)
293 			return (0);
294 	tail = size & (BITS_PER_LONG - 1);
295 	if (tail) {
296 		mask = BIT_MASK(tail);
297 		if ((addr[i] & mask) != 0)
298 			return (0);
299 	}
300 	return (1);
301 }
302 
303 #define	NBLONG	(NBBY * sizeof(long))
304 
305 #define	set_bit(i, a)							\
306     atomic_set_long(&((volatile long *)(a))[(i)/NBLONG], 1LU << ((i) % NBLONG))
307 
308 #define	clear_bit(i, a)							\
309     atomic_clear_long(&((volatile long *)(a))[(i)/NBLONG], 1LU << ((i) % NBLONG))
310 
311 #define	test_bit(i, a)							\
312     !!(atomic_load_acq_long(&((volatile long *)(a))[(i)/NBLONG]) &	\
313 			    (1LU << ((i) % NBLONG)))
314 
315 static inline long
test_and_clear_bit(long bit,long * var)316 test_and_clear_bit(long bit, long *var)
317 {
318 	long val;
319 
320 	var += bit / (sizeof(long) * NBBY);
321 	bit %= sizeof(long) * NBBY;
322 	bit = 1L << bit;
323 	do {
324 		val = *(volatile long *)var;
325 	} while (atomic_cmpset_long(var, val, val & ~bit) == 0);
326 
327 	return !!(val & bit);
328 }
329 
330 static inline unsigned long
__test_and_clear_bit(unsigned int bit,volatile unsigned long * ptr)331 __test_and_clear_bit(unsigned int bit, volatile unsigned long *ptr)
332 {
333 	const unsigned int units = (sizeof(*ptr) * NBBY);
334 	volatile unsigned long *const p = &ptr[bit / units];
335 	const unsigned long mask = (1UL << (bit % units));
336 	unsigned long v;
337 
338 	v = *p;
339 	*p &= ~mask;
340 
341 	return ((v & mask) != 0);
342 }
343 
344 static inline long
test_and_set_bit(long bit,volatile unsigned long * var)345 test_and_set_bit(long bit, volatile unsigned long *var)
346 {
347 	long val;
348 
349 	var += bit / (sizeof(long) * NBBY);
350 	bit %= sizeof(long) * NBBY;
351 	bit = 1L << bit;
352 	do {
353 		val = *(volatile long *)var;
354 	} while (atomic_cmpset_long(var, val, val | bit) == 0);
355 
356 	return !!(val & bit);
357 }
358 
359 
360 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG))
361 #define BITMAP_LAST_WORD_MASK(nbits)                                    \
362 (                                                                       \
363         ((nbits) % BITS_PER_LONG) ?                                     \
364                 (1UL<<((nbits) % BITS_PER_LONG))-1 : ~0UL               \
365 )
366 
367 
368 static inline void
bitmap_set(unsigned long * map,int start,int nr)369 bitmap_set(unsigned long *map, int start, int nr)
370 {
371 	unsigned long *p = map + BIT_WORD(start);
372 	const int size = start + nr;
373 	int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
374 	unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
375 
376 	while (nr - bits_to_set >= 0) {
377 		*p |= mask_to_set;
378 		nr -= bits_to_set;
379 		bits_to_set = BITS_PER_LONG;
380 		mask_to_set = ~0UL;
381 		p++;
382 	}
383 	if (nr) {
384 		mask_to_set &= BITMAP_LAST_WORD_MASK(size);
385 		*p |= mask_to_set;
386 	}
387 }
388 
389 static inline void
bitmap_clear(unsigned long * map,int start,int nr)390 bitmap_clear(unsigned long *map, int start, int nr)
391 {
392 	unsigned long *p = map + BIT_WORD(start);
393 	const int size = start + nr;
394 	int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
395 	unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
396 
397 	while (nr - bits_to_clear >= 0) {
398 		*p &= ~mask_to_clear;
399 		nr -= bits_to_clear;
400 		bits_to_clear = BITS_PER_LONG;
401 		mask_to_clear = ~0UL;
402 		p++;
403 	}
404 	if (nr) {
405 		mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
406 		*p &= ~mask_to_clear;
407 	}
408 }
409 
410 enum {
411         REG_OP_ISFREE,          /* true if region is all zero bits */
412         REG_OP_ALLOC,           /* set all bits in region */
413         REG_OP_RELEASE,         /* clear all bits in region */
414 };
415 
__reg_op(unsigned long * bitmap,int pos,int order,int reg_op)416 static int __reg_op(unsigned long *bitmap, int pos, int order, int reg_op)
417 {
418         int nbits_reg;          /* number of bits in region */
419         int index;              /* index first long of region in bitmap */
420         int offset;             /* bit offset region in bitmap[index] */
421         int nlongs_reg;         /* num longs spanned by region in bitmap */
422         int nbitsinlong;        /* num bits of region in each spanned long */
423         unsigned long mask;     /* bitmask for one long of region */
424         int i;                  /* scans bitmap by longs */
425         int ret = 0;            /* return value */
426 
427         /*
428          * Either nlongs_reg == 1 (for small orders that fit in one long)
429          * or (offset == 0 && mask == ~0UL) (for larger multiword orders.)
430          */
431         nbits_reg = 1 << order;
432         index = pos / BITS_PER_LONG;
433         offset = pos - (index * BITS_PER_LONG);
434         nlongs_reg = BITS_TO_LONGS(nbits_reg);
435         nbitsinlong = min(nbits_reg,  BITS_PER_LONG);
436 
437         /*
438          * Can't do "mask = (1UL << nbitsinlong) - 1", as that
439          * overflows if nbitsinlong == BITS_PER_LONG.
440          */
441         mask = (1UL << (nbitsinlong - 1));
442         mask += mask - 1;
443         mask <<= offset;
444 
445         switch (reg_op) {
446         case REG_OP_ISFREE:
447                 for (i = 0; i < nlongs_reg; i++) {
448                         if (bitmap[index + i] & mask)
449                                 goto done;
450                 }
451                 ret = 1;        /* all bits in region free (zero) */
452                 break;
453 
454         case REG_OP_ALLOC:
455                 for (i = 0; i < nlongs_reg; i++)
456                         bitmap[index + i] |= mask;
457                 break;
458 
459         case REG_OP_RELEASE:
460                 for (i = 0; i < nlongs_reg; i++)
461                         bitmap[index + i] &= ~mask;
462                 break;
463         }
464 done:
465         return ret;
466 }
467 
468 /**
469  * bitmap_find_free_region - find a contiguous aligned mem region
470  *      @bitmap: array of unsigned longs corresponding to the bitmap
471  *      @bits: number of bits in the bitmap
472  *      @order: region size (log base 2 of number of bits) to find
473  *
474  * Find a region of free (zero) bits in a @bitmap of @bits bits and
475  * allocate them (set them to one).  Only consider regions of length
476  * a power (@order) of two, aligned to that power of two, which
477  * makes the search algorithm much faster.
478  *
479  * Return the bit offset in bitmap of the allocated region,
480  * or -errno on failure.
481  */
482 static inline int
bitmap_find_free_region(unsigned long * bitmap,int bits,int order)483 bitmap_find_free_region(unsigned long *bitmap, int bits, int order)
484 {
485         int pos, end;           /* scans bitmap by regions of size order */
486 
487         for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) {
488                 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
489                         continue;
490                 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
491                 return pos;
492         }
493         return -ENOMEM;
494 }
495 
496 /**
497  * bitmap_release_region - release allocated bitmap region
498  *      @bitmap: array of unsigned longs corresponding to the bitmap
499  *      @pos: beginning of bit region to release
500  *      @order: region size (log base 2 of number of bits) to release
501  *
502  * This is the complement to __bitmap_find_free_region() and releases
503  * the found region (by clearing it in the bitmap).
504  *
505  * No return value.
506  */
507 static inline void
bitmap_release_region(unsigned long * bitmap,int pos,int order)508 bitmap_release_region(unsigned long *bitmap, int pos, int order)
509 {
510         __reg_op(bitmap, pos, order, REG_OP_RELEASE);
511 }
512 
513 /* Returns a contiguous bitmask from bits h to l */
514 #define GENMASK(h, l)	\
515 	(((~0UL) >> (BITS_PER_LONG - (h) - 1)) & ((~0UL) << (l)))
516 
517 #define GENMASK_ULL(h, l) \
518 	(((~0ULL) >> (BITS_PER_LONG_LONG - 1 - (h))) & ((~0ULL) << (l)))
519 
520 #include <asm/bitops/non-atomic.h>
521 #include <asm/bitops/const_hweight.h>
522 
523 #define for_each_set_bit(bit, addr, size) \
524 	for ((bit) = find_first_bit((addr), (size));		\
525 	     (bit) < (size);					\
526 	     (bit) = find_next_bit((addr), (size), (bit) + 1))
527 
528 #define for_each_clear_bit(bit, addr, size) \
529 	for ((bit) = find_first_zero_bit((addr), (size));	\
530 	     (bit) < (size);						\
531 	     (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
532 
533 static inline int64_t
sign_extend64(uint64_t value,int index)534 sign_extend64(uint64_t value, int index)
535 {
536 	uint8_t shift = 63 - index;
537 	return (int64_t)(value << shift) >> shift;
538 }
539 
540 #endif	/* _LINUX_BITOPS_H_ */
541