xref: /dragonfly/sys/dev/drm/include/linux/bitops.h (revision 7b1120e5)
1 /*-
2  * Copyright (c) 2010 Isilon Systems, Inc.
3  * Copyright (c) 2010 iX Systems, Inc.
4  * Copyright (c) 2010 Panasas, Inc.
5  * Copyright (c) 2015-2017 François Tigeot
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice unmodified, this list of conditions, and the following
13  *    disclaimer.
14  * 2. Redistributions in binary form must reproduce the above copyright
15  *    notice, this list of conditions and the following disclaimer in the
16  *    documentation and/or other materials provided with the distribution.
17  *
18  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  */
29 #ifndef	_LINUX_BITOPS_H_
30 #define	_LINUX_BITOPS_H_
31 
32 #include <sys/types.h>
33 #include <sys/systm.h>
34 
35 #include <asm/types.h>
36 
37 #define BIT(nr)			(1UL << (nr))
38 #define	BITS_PER_LONG		64
39 #define	BIT_MASK(n)		(~0UL >> (BITS_PER_LONG - (n)))
40 #define	BITS_TO_LONGS(n)	howmany((n), BITS_PER_LONG)
41 #define BIT_WORD(nr)		((nr) / BITS_PER_LONG)
42 #define BITS_PER_BYTE		8
43 
44 static inline int
45 __ffs(int mask)
46 {
47 	return (ffs(mask) - 1);
48 }
49 
50 static inline int
51 __fls(int mask)
52 {
53 	return (fls(mask) - 1);
54 }
55 
56 static inline int
57 __ffsl(long mask)
58 {
59 	return (ffsl(mask) - 1);
60 }
61 
62 static inline int
63 __flsl(long mask)
64 {
65 	return (flsl(mask) - 1);
66 }
67 
68 
69 #define	ffz(mask)	__ffs(~(mask))
70 
71 static inline int get_count_order(unsigned int count)
72 {
73         int order;
74 
75         order = fls(count) - 1;
76         if (count & (count - 1))
77                 order++;
78         return order;
79 }
80 
81 static inline unsigned long
82 find_first_bit(unsigned long *addr, unsigned long size)
83 {
84 	long mask;
85 	int bit;
86 
87 	for (bit = 0; size >= BITS_PER_LONG;
88 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
89 		if (*addr == 0)
90 			continue;
91 		return (bit + __ffsl(*addr));
92 	}
93 	if (size) {
94 		mask = (*addr) & BIT_MASK(size);
95 		if (mask)
96 			bit += __ffsl(mask);
97 		else
98 			bit += size;
99 	}
100 	return (bit);
101 }
102 
103 static inline unsigned long
104 find_first_zero_bit(unsigned long *addr, unsigned long size)
105 {
106 	long mask;
107 	int bit;
108 
109 	for (bit = 0; size >= BITS_PER_LONG;
110 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
111 		if (~(*addr) == 0)
112 			continue;
113 		return (bit + __ffsl(~(*addr)));
114 	}
115 	if (size) {
116 		mask = ~(*addr) & BIT_MASK(size);
117 		if (mask)
118 			bit += __ffsl(mask);
119 		else
120 			bit += size;
121 	}
122 	return (bit);
123 }
124 
125 static inline unsigned long
126 find_last_bit(unsigned long *addr, unsigned long size)
127 {
128 	long mask;
129 	int offs;
130 	int bit;
131 	int pos;
132 
133 	pos = size / BITS_PER_LONG;
134 	offs = size % BITS_PER_LONG;
135 	bit = BITS_PER_LONG * pos;
136 	addr += pos;
137 	if (offs) {
138 		mask = (*addr) & BIT_MASK(offs);
139 		if (mask)
140 			return (bit + __flsl(mask));
141 	}
142 	while (--pos) {
143 		addr--;
144 		bit -= BITS_PER_LONG;
145 		if (*addr)
146 			return (bit + __flsl(mask));
147 	}
148 	return (size);
149 }
150 
151 static inline unsigned long
152 find_next_bit(unsigned long *addr, unsigned long size, unsigned long offset)
153 {
154 	long mask;
155 	int offs;
156 	int bit;
157 	int pos;
158 
159 	if (offset >= size)
160 		return (size);
161 	pos = offset / BITS_PER_LONG;
162 	offs = offset % BITS_PER_LONG;
163 	bit = BITS_PER_LONG * pos;
164 	addr += pos;
165 	if (offs) {
166 		mask = (*addr) & ~BIT_MASK(offs);
167 		if (mask)
168 			return (bit + __ffsl(mask));
169 		bit += BITS_PER_LONG;
170 		addr++;
171 	}
172 	for (size -= bit; size >= BITS_PER_LONG;
173 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
174 		if (*addr == 0)
175 			continue;
176 		return (bit + __ffsl(*addr));
177 	}
178 	if (size) {
179 		mask = (*addr) & BIT_MASK(size);
180 		if (mask)
181 			bit += __ffsl(mask);
182 		else
183 			bit += size;
184 	}
185 	return (bit);
186 }
187 
188 static inline unsigned long
189 find_next_zero_bit(unsigned long *addr, unsigned long size,
190     unsigned long offset)
191 {
192 	long mask;
193 	int offs;
194 	int bit;
195 	int pos;
196 
197 	if (offset >= size)
198 		return (size);
199 	pos = offset / BITS_PER_LONG;
200 	offs = offset % BITS_PER_LONG;
201 	bit = BITS_PER_LONG * pos;
202 	addr += pos;
203 	if (offs) {
204 		mask = ~(*addr) & ~BIT_MASK(offs);
205 		if (mask)
206 			return (bit + __ffsl(mask));
207 		bit += BITS_PER_LONG;
208 		addr++;
209 	}
210 	for (size -= bit; size >= BITS_PER_LONG;
211 	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
212 		if (~(*addr) == 0)
213 			continue;
214 		return (bit + __ffsl(~(*addr)));
215 	}
216 	if (size) {
217 		mask = ~(*addr) & BIT_MASK(size);
218 		if (mask)
219 			bit += __ffsl(mask);
220 		else
221 			bit += size;
222 	}
223 	return (bit);
224 }
225 
226 static inline void
227 bitmap_zero(unsigned long *addr, int size)
228 {
229 	int len;
230 
231 	len = BITS_TO_LONGS(size) * sizeof(long);
232 	memset(addr, 0, len);
233 }
234 
235 static inline void
236 bitmap_fill(unsigned long *addr, int size)
237 {
238 	int tail;
239 	int len;
240 
241 	len = (size / BITS_PER_LONG) * sizeof(long);
242 	memset(addr, 0xff, len);
243 	tail = size & (BITS_PER_LONG - 1);
244 	if (tail)
245 		addr[size / BITS_PER_LONG] = BIT_MASK(tail);
246 }
247 
248 static inline int
249 bitmap_full(unsigned long *addr, int size)
250 {
251 	long mask;
252 	int tail;
253 	int len;
254 	int i;
255 
256 	len = size / BITS_PER_LONG;
257 	for (i = 0; i < len; i++)
258 		if (addr[i] != ~0UL)
259 			return (0);
260 	tail = size & (BITS_PER_LONG - 1);
261 	if (tail) {
262 		mask = BIT_MASK(tail);
263 		if ((addr[i] & mask) != mask)
264 			return (0);
265 	}
266 	return (1);
267 }
268 
269 static inline int
270 bitmap_empty(unsigned long *addr, int size)
271 {
272 	long mask;
273 	int tail;
274 	int len;
275 	int i;
276 
277 	len = size / BITS_PER_LONG;
278 	for (i = 0; i < len; i++)
279 		if (addr[i] != 0)
280 			return (0);
281 	tail = size & (BITS_PER_LONG - 1);
282 	if (tail) {
283 		mask = BIT_MASK(tail);
284 		if ((addr[i] & mask) != 0)
285 			return (0);
286 	}
287 	return (1);
288 }
289 
290 #define	NBLONG	(NBBY * sizeof(long))
291 
292 #define	set_bit(i, a)							\
293     atomic_set_long(&((volatile long *)(a))[(i)/NBLONG], 1LU << ((i) % NBLONG))
294 
295 #define	clear_bit(i, a)							\
296     atomic_clear_long(&((volatile long *)(a))[(i)/NBLONG], 1LU << ((i) % NBLONG))
297 
298 #define	test_bit(i, a)							\
299     !!(atomic_load_acq_long(&((volatile long *)(a))[(i)/NBLONG]) &	\
300 			    (1LU << ((i) % NBLONG)))
301 
302 static inline long
303 test_and_clear_bit(long bit, long *var)
304 {
305 	long val;
306 
307 	var += bit / (sizeof(long) * NBBY);
308 	bit %= sizeof(long) * NBBY;
309 	bit = 1L << bit;
310 	do {
311 		val = *(volatile long *)var;
312 	} while (atomic_cmpset_long(var, val, val & ~bit) == 0);
313 
314 	return !!(val & bit);
315 }
316 
317 static inline unsigned long
318 __test_and_clear_bit(unsigned int bit, volatile unsigned long *ptr)
319 {
320 	const unsigned int units = (sizeof(*ptr) * NBBY);
321 	volatile unsigned long *const p = &ptr[bit / units];
322 	const unsigned long mask = (1UL << (bit % units));
323 	unsigned long v;
324 
325 	v = *p;
326 	*p &= ~mask;
327 
328 	return ((v & mask) != 0);
329 }
330 
331 static inline long
332 test_and_set_bit(long bit, volatile unsigned long *var)
333 {
334 	long val;
335 
336 	var += bit / (sizeof(long) * NBBY);
337 	bit %= sizeof(long) * NBBY;
338 	bit = 1L << bit;
339 	do {
340 		val = *(volatile long *)var;
341 	} while (atomic_cmpset_long(var, val, val | bit) == 0);
342 
343 	return !!(val & bit);
344 }
345 
346 
347 #define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG))
348 #define BITMAP_LAST_WORD_MASK(nbits)                                    \
349 (                                                                       \
350         ((nbits) % BITS_PER_LONG) ?                                     \
351                 (1UL<<((nbits) % BITS_PER_LONG))-1 : ~0UL               \
352 )
353 
354 
355 static inline void
356 bitmap_set(unsigned long *map, int start, int nr)
357 {
358 	unsigned long *p = map + BIT_WORD(start);
359 	const int size = start + nr;
360 	int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
361 	unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
362 
363 	while (nr - bits_to_set >= 0) {
364 		*p |= mask_to_set;
365 		nr -= bits_to_set;
366 		bits_to_set = BITS_PER_LONG;
367 		mask_to_set = ~0UL;
368 		p++;
369 	}
370 	if (nr) {
371 		mask_to_set &= BITMAP_LAST_WORD_MASK(size);
372 		*p |= mask_to_set;
373 	}
374 }
375 
376 static inline void
377 bitmap_clear(unsigned long *map, int start, int nr)
378 {
379 	unsigned long *p = map + BIT_WORD(start);
380 	const int size = start + nr;
381 	int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
382 	unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
383 
384 	while (nr - bits_to_clear >= 0) {
385 		*p &= ~mask_to_clear;
386 		nr -= bits_to_clear;
387 		bits_to_clear = BITS_PER_LONG;
388 		mask_to_clear = ~0UL;
389 		p++;
390 	}
391 	if (nr) {
392 		mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
393 		*p &= ~mask_to_clear;
394 	}
395 }
396 
397 enum {
398         REG_OP_ISFREE,          /* true if region is all zero bits */
399         REG_OP_ALLOC,           /* set all bits in region */
400         REG_OP_RELEASE,         /* clear all bits in region */
401 };
402 
403 static int __reg_op(unsigned long *bitmap, int pos, int order, int reg_op)
404 {
405         int nbits_reg;          /* number of bits in region */
406         int index;              /* index first long of region in bitmap */
407         int offset;             /* bit offset region in bitmap[index] */
408         int nlongs_reg;         /* num longs spanned by region in bitmap */
409         int nbitsinlong;        /* num bits of region in each spanned long */
410         unsigned long mask;     /* bitmask for one long of region */
411         int i;                  /* scans bitmap by longs */
412         int ret = 0;            /* return value */
413 
414         /*
415          * Either nlongs_reg == 1 (for small orders that fit in one long)
416          * or (offset == 0 && mask == ~0UL) (for larger multiword orders.)
417          */
418         nbits_reg = 1 << order;
419         index = pos / BITS_PER_LONG;
420         offset = pos - (index * BITS_PER_LONG);
421         nlongs_reg = BITS_TO_LONGS(nbits_reg);
422         nbitsinlong = min(nbits_reg,  BITS_PER_LONG);
423 
424         /*
425          * Can't do "mask = (1UL << nbitsinlong) - 1", as that
426          * overflows if nbitsinlong == BITS_PER_LONG.
427          */
428         mask = (1UL << (nbitsinlong - 1));
429         mask += mask - 1;
430         mask <<= offset;
431 
432         switch (reg_op) {
433         case REG_OP_ISFREE:
434                 for (i = 0; i < nlongs_reg; i++) {
435                         if (bitmap[index + i] & mask)
436                                 goto done;
437                 }
438                 ret = 1;        /* all bits in region free (zero) */
439                 break;
440 
441         case REG_OP_ALLOC:
442                 for (i = 0; i < nlongs_reg; i++)
443                         bitmap[index + i] |= mask;
444                 break;
445 
446         case REG_OP_RELEASE:
447                 for (i = 0; i < nlongs_reg; i++)
448                         bitmap[index + i] &= ~mask;
449                 break;
450         }
451 done:
452         return ret;
453 }
454 
455 /**
456  * bitmap_find_free_region - find a contiguous aligned mem region
457  *      @bitmap: array of unsigned longs corresponding to the bitmap
458  *      @bits: number of bits in the bitmap
459  *      @order: region size (log base 2 of number of bits) to find
460  *
461  * Find a region of free (zero) bits in a @bitmap of @bits bits and
462  * allocate them (set them to one).  Only consider regions of length
463  * a power (@order) of two, aligned to that power of two, which
464  * makes the search algorithm much faster.
465  *
466  * Return the bit offset in bitmap of the allocated region,
467  * or -errno on failure.
468  */
469 static inline int
470 bitmap_find_free_region(unsigned long *bitmap, int bits, int order)
471 {
472         int pos, end;           /* scans bitmap by regions of size order */
473 
474         for (pos = 0 ; (end = pos + (1 << order)) <= bits; pos = end) {
475                 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
476                         continue;
477                 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
478                 return pos;
479         }
480         return -ENOMEM;
481 }
482 
483 /**
484  * bitmap_release_region - release allocated bitmap region
485  *      @bitmap: array of unsigned longs corresponding to the bitmap
486  *      @pos: beginning of bit region to release
487  *      @order: region size (log base 2 of number of bits) to release
488  *
489  * This is the complement to __bitmap_find_free_region() and releases
490  * the found region (by clearing it in the bitmap).
491  *
492  * No return value.
493  */
494 static inline void
495 bitmap_release_region(unsigned long *bitmap, int pos, int order)
496 {
497         __reg_op(bitmap, pos, order, REG_OP_RELEASE);
498 }
499 
500 /* Returns a contiguous bitmask from bits h to l */
501 #define GENMASK(h, l)	\
502 	(((~0UL) >> (BITS_PER_LONG - (h) - 1)) & ((~0UL) << (l)))
503 
504 #include <asm/bitops/non-atomic.h>
505 #include <asm/bitops/const_hweight.h>
506 
507 #define for_each_set_bit(bit, addr, size) \
508 	for ((bit) = find_first_bit((addr), (size));		\
509 	     (bit) < (size);					\
510 	     (bit) = find_next_bit((addr), (size), (bit) + 1))
511 
512 
513 static inline int64_t
514 sign_extend64(uint64_t value, int index)
515 {
516 	uint8_t shift = 63 - index;
517 	return (int64_t)(value << shift) >> shift;
518 }
519 
520 #endif	/* _LINUX_BITOPS_H_ */
521