xref: /dragonfly/sys/dev/drm/include/linux/bitops.h (revision a1626531)
1 /*-
2  * Copyright (c) 2010 Isilon Systems, Inc.
3  * Copyright (c) 2010 iX Systems, Inc.
4  * Copyright (c) 2010 Panasas, Inc.
5  * Copyright (c) 2015-2020 François Tigeot <ftigeot@wolfpond.org>
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice unmodified, this list of conditions, and the following
13  *    disclaimer.
14  * 2. Redistributions in binary form must reproduce the above copyright
15  *    notice, this list of conditions and the following disclaimer in the
16  *    documentation and/or other materials provided with the distribution.
17  *
18  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  */
29 #ifndef	_LINUX_BITOPS_H_
30 #define	_LINUX_BITOPS_H_
31 
32 #include <asm/types.h>
33 
34 #define BIT(nr)			(1UL << (nr))
35 #define BIT_ULL(nb)		(1ULL << (nb))
36 #define	BIT_MASK(n)		(~0UL >> (BITS_PER_LONG - (n)))
37 #define	BITS_TO_LONGS(n)	howmany((n), BITS_PER_LONG)
38 #define BIT_WORD(nr)		((nr) / BITS_PER_LONG)
39 #define BITS_PER_BYTE		8
40 
41 #include <asm/bitops.h>
42 
43 #include <sys/types.h>
44 #include <sys/systm.h>
45 
46 static inline int
47 __ffs(int mask)
48 {
49 	return (ffs(mask) - 1);
50 }
51 
52 static inline int
53 __fls(int mask)
54 {
55 	return (fls(mask) - 1);
56 }
57 
58 static inline int
59 __ffsl(long mask)
60 {
61 	return (ffsl(mask) - 1);
62 }
63 
64 static inline int
65 __flsl(long mask)
66 {
67 	return (flsl(mask) - 1);
68 }
69 
70 
71 #define	ffz(mask)	__ffs(~(mask))
72 
73 static inline int get_count_order(unsigned int count)
74 {
75         int order;
76 
77         order = fls(count) - 1;
78         if (count & (count - 1))
79                 order++;
80         return order;
81 }
82 
83 static inline unsigned long
84 find_first_bit(unsigned long *addr, unsigned long size)
85 {
86 	long mask;
87 	int bit;
88 
89 	for (bit = 0; size >= BITS_PER_LONG;
90 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
91 		if (*addr == 0)
92 			continue;
93 		return (bit + __ffsl(*addr));
94 	}
95 	if (size) {
96 		mask = (*addr) & BIT_MASK(size);
97 		if (mask)
98 			bit += __ffsl(mask);
99 		else
100 			bit += size;
101 	}
102 	return (bit);
103 }
104 
105 static inline unsigned long
106 find_first_zero_bit(unsigned long *addr, unsigned long size)
107 {
108 	long mask;
109 	int bit;
110 
111 	for (bit = 0; size >= BITS_PER_LONG;
112 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
113 		if (~(*addr) == 0)
114 			continue;
115 		return (bit + __ffsl(~(*addr)));
116 	}
117 	if (size) {
118 		mask = ~(*addr) & BIT_MASK(size);
119 		if (mask)
120 			bit += __ffsl(mask);
121 		else
122 			bit += size;
123 	}
124 	return (bit);
125 }
126 
127 static inline unsigned long
128 find_last_bit(unsigned long *addr, unsigned long size)
129 {
130 	long mask;
131 	int offs;
132 	int bit;
133 	int pos;
134 
135 	pos = size / BITS_PER_LONG;
136 	offs = size % BITS_PER_LONG;
137 	bit = BITS_PER_LONG * pos;
138 	addr += pos;
139 	if (offs) {
140 		mask = (*addr) & BIT_MASK(offs);
141 		if (mask)
142 			return (bit + __flsl(mask));
143 	}
144 	while (--pos) {
145 		addr--;
146 		bit -= BITS_PER_LONG;
147 		if (*addr)
148 			return (bit + __flsl(mask));
149 	}
150 	return (size);
151 }
152 
153 static inline unsigned long
154 find_next_bit(unsigned long *addr, unsigned long size, unsigned long offset)
155 {
156 	long mask;
157 	int offs;
158 	int bit;
159 	int pos;
160 
161 	if (offset >= size)
162 		return (size);
163 	pos = offset / BITS_PER_LONG;
164 	offs = offset % BITS_PER_LONG;
165 	bit = BITS_PER_LONG * pos;
166 	addr += pos;
167 	if (offs) {
168 		mask = (*addr) & ~BIT_MASK(offs);
169 		if (mask)
170 			return (bit + __ffsl(mask));
171 		bit += BITS_PER_LONG;
172 		addr++;
173 	}
174 	for (size -= bit; size >= BITS_PER_LONG;
175 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
176 		if (*addr == 0)
177 			continue;
178 		return (bit + __ffsl(*addr));
179 	}
180 	if (size) {
181 		mask = (*addr) & BIT_MASK(size);
182 		if (mask)
183 			bit += __ffsl(mask);
184 		else
185 			bit += size;
186 	}
187 	return (bit);
188 }
189 
190 static inline unsigned long
191 find_next_zero_bit(unsigned long *addr, unsigned long size,
192     unsigned long offset)
193 {
194 	long mask;
195 	int offs;
196 	int bit;
197 	int pos;
198 
199 	if (offset >= size)
200 		return (size);
201 	pos = offset / BITS_PER_LONG;
202 	offs = offset % BITS_PER_LONG;
203 	bit = BITS_PER_LONG * pos;
204 	addr += pos;
205 	if (offs) {
206 		mask = ~(*addr) & ~BIT_MASK(offs);
207 		if (mask)
208 			return (bit + __ffsl(mask));
209 		bit += BITS_PER_LONG;
210 		addr++;
211 	}
212 	for (size -= bit; size >= BITS_PER_LONG;
213 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
214 		if (~(*addr) == 0)
215 			continue;
216 		return (bit + __ffsl(~(*addr)));
217 	}
218 	if (size) {
219 		mask = ~(*addr) & BIT_MASK(size);
220 		if (mask)
221 			bit += __ffsl(mask);
222 		else
223 			bit += size;
224 	}
225 	return (bit);
226 }
227 
228 static inline void
229 bitmap_zero(unsigned long *addr, int size)
230 {
231 	int len;
232 
233 	len = BITS_TO_LONGS(size) * sizeof(long);
234 	memset(addr, 0, len);
235 }
236 
237 static inline void
238 bitmap_fill(unsigned long *addr, int size)
239 {
240 	int tail;
241 	int len;
242 
243 	len = (size / BITS_PER_LONG) * sizeof(long);
244 	memset(addr, 0xff, len);
245 	tail = size & (BITS_PER_LONG - 1);
246 	if (tail)
247 		addr[size / BITS_PER_LONG] = BIT_MASK(tail);
248 }
249 
250 static inline int
251 bitmap_full(unsigned long *addr, int size)
252 {
253 	long mask;
254 	int tail;
255 	int len;
256 	int i;
257 
258 	len = size / BITS_PER_LONG;
259 	for (i = 0; i < len; i++)
260 		if (addr[i] != ~0UL)
261 			return (0);
262 	tail = size & (BITS_PER_LONG - 1);
263 	if (tail) {
264 		mask = BIT_MASK(tail);
265 		if ((addr[i] & mask) != mask)
266 			return (0);
267 	}
268 	return (1);
269 }
270 
271 static inline int
272 bitmap_empty(unsigned long *addr, int size)
273 {
274 	long mask;
275 	int tail;
276 	int len;
277 	int i;
278 
279 	len = size / BITS_PER_LONG;
280 	for (i = 0; i < len; i++)
281 		if (addr[i] != 0)
282 			return (0);
283 	tail = size & (BITS_PER_LONG - 1);
284 	if (tail) {
285 		mask = BIT_MASK(tail);
286 		if ((addr[i] & mask) != 0)
287 			return (0);
288 	}
289 	return (1);
290 }
291 
292 #define	NBLONG	(NBBY * sizeof(long))
293 
294 #define	set_bit(i, a)							\
295     atomic_set_long(&((volatile long *)(a))[(i)/NBLONG], 1LU << ((i) % NBLONG))
296 
297 #define	clear_bit(i, a)							\
298     atomic_clear_long(&((volatile long *)(a))[(i)/NBLONG], 1LU << ((i) % NBLONG))
299 
300 #define	test_bit(i, a)							\
301     !!(atomic_load_acq_long(&((volatile long *)(a))[(i)/NBLONG]) &	\
302 			    (1LU << ((i) % NBLONG)))
303 
304 static inline long
305 test_and_clear_bit(long bit, long *var)
306 {
307 	long val;
308 
309 	var += bit / (sizeof(long) * NBBY);
310 	bit %= sizeof(long) * NBBY;
311 	bit = 1L << bit;
312 	do {
313 		val = *(volatile long *)var;
314 	} while (atomic_cmpset_long(var, val, val & ~bit) == 0);
315 
316 	return !!(val & bit);
317 }
318 
319 static inline unsigned long
320 __test_and_clear_bit(unsigned int bit, volatile unsigned long *ptr)
321 {
322 	const unsigned int units = (sizeof(*ptr) * NBBY);
323 	volatile unsigned long *const p = &ptr[bit / units];
324 	const unsigned long mask = (1UL << (bit % units));
325 	unsigned long v;
326 
327 	v = *p;
328 	*p &= ~mask;
329 
330 	return ((v & mask) != 0);
331 }
332 
333 static inline long
334 test_and_set_bit(long bit, volatile unsigned long *var)
335 {
336 	long val;
337 
338 	var += bit / (sizeof(long) * NBBY);
339 	bit %= sizeof(long) * NBBY;
340 	bit = 1L << bit;
341 	do {
342 		val = *(volatile long *)var;
343 	} while (atomic_cmpset_long(var, val, val | bit) == 0);
344 
345 	return !!(val & bit);
346 }
347 
348 
349 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG))
350 #define BITMAP_LAST_WORD_MASK(nbits)                                    \
351 (                                                                       \
352         ((nbits) % BITS_PER_LONG) ?                                     \
353                 (1UL<<((nbits) % BITS_PER_LONG))-1 : ~0UL               \
354 )
355 
356 
357 static inline void
358 bitmap_set(unsigned long *map, int start, int nr)
359 {
360 	unsigned long *p = map + BIT_WORD(start);
361 	const int size = start + nr;
362 	int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
363 	unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
364 
365 	while (nr - bits_to_set >= 0) {
366 		*p |= mask_to_set;
367 		nr -= bits_to_set;
368 		bits_to_set = BITS_PER_LONG;
369 		mask_to_set = ~0UL;
370 		p++;
371 	}
372 	if (nr) {
373 		mask_to_set &= BITMAP_LAST_WORD_MASK(size);
374 		*p |= mask_to_set;
375 	}
376 }
377 
378 static inline void
379 bitmap_clear(unsigned long *map, int start, int nr)
380 {
381 	unsigned long *p = map + BIT_WORD(start);
382 	const int size = start + nr;
383 	int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
384 	unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
385 
386 	while (nr - bits_to_clear >= 0) {
387 		*p &= ~mask_to_clear;
388 		nr -= bits_to_clear;
389 		bits_to_clear = BITS_PER_LONG;
390 		mask_to_clear = ~0UL;
391 		p++;
392 	}
393 	if (nr) {
394 		mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
395 		*p &= ~mask_to_clear;
396 	}
397 }
398 
399 enum {
400         REG_OP_ISFREE,          /* true if region is all zero bits */
401         REG_OP_ALLOC,           /* set all bits in region */
402         REG_OP_RELEASE,         /* clear all bits in region */
403 };
404 
405 static int __reg_op(unsigned long *bitmap, int pos, int order, int reg_op)
406 {
407         int nbits_reg;          /* number of bits in region */
408         int index;              /* index first long of region in bitmap */
409         int offset;             /* bit offset region in bitmap[index] */
410         int nlongs_reg;         /* num longs spanned by region in bitmap */
411         int nbitsinlong;        /* num bits of region in each spanned long */
412         unsigned long mask;     /* bitmask for one long of region */
413         int i;                  /* scans bitmap by longs */
414         int ret = 0;            /* return value */
415 
416         /*
417          * Either nlongs_reg == 1 (for small orders that fit in one long)
418          * or (offset == 0 && mask == ~0UL) (for larger multiword orders.)
419          */
420         nbits_reg = 1 << order;
421         index = pos / BITS_PER_LONG;
422         offset = pos - (index * BITS_PER_LONG);
423         nlongs_reg = BITS_TO_LONGS(nbits_reg);
424         nbitsinlong = min(nbits_reg,  BITS_PER_LONG);
425 
426         /*
427          * Can't do "mask = (1UL << nbitsinlong) - 1", as that
428          * overflows if nbitsinlong == BITS_PER_LONG.
429          */
430         mask = (1UL << (nbitsinlong - 1));
431         mask += mask - 1;
432         mask <<= offset;
433 
434         switch (reg_op) {
435         case REG_OP_ISFREE:
436                 for (i = 0; i < nlongs_reg; i++) {
437                         if (bitmap[index + i] & mask)
438                                 goto done;
439                 }
440                 ret = 1;        /* all bits in region free (zero) */
441                 break;
442 
443         case REG_OP_ALLOC:
444                 for (i = 0; i < nlongs_reg; i++)
445                         bitmap[index + i] |= mask;
446                 break;
447 
448         case REG_OP_RELEASE:
449                 for (i = 0; i < nlongs_reg; i++)
450                         bitmap[index + i] &= ~mask;
451                 break;
452         }
453 done:
454         return ret;
455 }
456 
457 /**
458  * bitmap_find_free_region - find a contiguous aligned mem region
459  *      @bitmap: array of unsigned longs corresponding to the bitmap
460  *      @bits: number of bits in the bitmap
461  *      @order: region size (log base 2 of number of bits) to find
462  *
463  * Find a region of free (zero) bits in a @bitmap of @bits bits and
464  * allocate them (set them to one).  Only consider regions of length
465  * a power (@order) of two, aligned to that power of two, which
466  * makes the search algorithm much faster.
467  *
468  * Return the bit offset in bitmap of the allocated region,
469  * or -errno on failure.
470  */
471 static inline int
472 bitmap_find_free_region(unsigned long *bitmap, int bits, int order)
473 {
474         int pos, end;           /* scans bitmap by regions of size order */
475 
476         for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) {
477                 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
478                         continue;
479                 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
480                 return pos;
481         }
482         return -ENOMEM;
483 }
484 
485 /**
486  * bitmap_release_region - release allocated bitmap region
487  *      @bitmap: array of unsigned longs corresponding to the bitmap
488  *      @pos: beginning of bit region to release
489  *      @order: region size (log base 2 of number of bits) to release
490  *
491  * This is the complement to __bitmap_find_free_region() and releases
492  * the found region (by clearing it in the bitmap).
493  *
494  * No return value.
495  */
496 static inline void
497 bitmap_release_region(unsigned long *bitmap, int pos, int order)
498 {
499         __reg_op(bitmap, pos, order, REG_OP_RELEASE);
500 }
501 
502 /* Returns a contiguous bitmask from bits h to l */
503 #define GENMASK(h, l)	\
504 	(((~0UL) >> (BITS_PER_LONG - (h) - 1)) & ((~0UL) << (l)))
505 
506 #define GENMASK_ULL(h, l) \
507 	(((~0ULL) >> (BITS_PER_LONG_LONG - 1 - (h))) & ((~0ULL) << (l)))
508 
509 #include <asm/bitops/non-atomic.h>
510 #include <asm/bitops/const_hweight.h>
511 
512 #define for_each_set_bit(bit, addr, size) \
513 	for ((bit) = find_first_bit((addr), (size));		\
514 	     (bit) < (size);					\
515 	     (bit) = find_next_bit((addr), (size), (bit) + 1))
516 
517 #define for_each_clear_bit(bit, addr, size) \
518 	for ((bit) = find_first_zero_bit((addr), (size));	\
519 	     (bit) < (size);						\
520 	     (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
521 
522 static inline int64_t
523 sign_extend64(uint64_t value, int index)
524 {
525 	uint8_t shift = 63 - index;
526 	return (int64_t)(value << shift) >> shift;
527 }
528 
529 #endif	/* _LINUX_BITOPS_H_ */
530