xref: /dragonfly/sys/dev/drm/include/linux/atomic.h (revision b8c93cad)
1 /*-
2  * Copyright (c) 2010 Isilon Systems, Inc.
3  * Copyright (c) 2010 iX Systems, Inc.
4  * Copyright (c) 2010 Panasas, Inc.
5  * Copyright (c) 2013-2017 François Tigeot
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice unmodified, this list of conditions, and the following
13  *    disclaimer.
14  * 2. Redistributions in binary form must reproduce the above copyright
15  *    notice, this list of conditions and the following disclaimer in the
16  *    documentation and/or other materials provided with the distribution.
17  *
18  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #ifndef	_LINUX_ATOMIC_H_
31 #define	_LINUX_ATOMIC_H_
32 
33 #include <asm/atomic.h>
34 
35 typedef struct {
36 	volatile u_int counter;
37 } atomic_t;
38 
39 typedef struct {
40 	volatile u_long counter;
41 } atomic64_t;
42 
43 #define	atomic_add(i, v)		atomic_add_return((i), (v))
44 #define	atomic_sub(i, v)		atomic_sub_return((i), (v))
45 #define	atomic_inc_return(v)		atomic_add_return(1, (v))
46 #define	atomic_add_negative(i, v)	(atomic_add_return((i), (v)) < 0)
47 #define	atomic_sub_and_test(i, v)	(atomic_sub_return((i), (v)) == 0)
48 #define	atomic_dec_and_test(v)		(atomic_sub_return(1, (v)) == 0)
49 #define	atomic_inc_and_test(v)		(atomic_add_return(1, (v)) == 0)
50 #define atomic_dec_return(v)             atomic_sub_return(1, (v))
51 
52 #define	atomic64_add(i, v)		atomic_add_return_long((i), (v))
53 #define	atomic64_sub(i, v)		atomic_sub_return_long((i), (v))
54 
55 #define atomic_xchg(p, v)		atomic_swap_int(&((p)->counter), v)
56 #define atomic64_xchg(p, v)		atomic_swap_long(&((p)->counter), v)
57 
58 #define atomic_cmpset(p, o, n)		atomic_cmpset_32(&((p)->counter), o, n)
59 
60 #define atomic64_cmpxchg(p, o, n)						\
61 	(atomic_cmpset_long((volatile uint64_t *)(p),(o),(n)) ? (o) : (0))
62 
63 static inline int
64 atomic_add_return(int i, atomic_t *v)
65 {
66 	return i + atomic_fetchadd_int(&v->counter, i);
67 }
68 
69 static inline int64_t
70 atomic_add_return_long(int64_t i, atomic64_t *v)
71 {
72 	return i + atomic_fetchadd_long(&v->counter, i);
73 }
74 
75 static inline int
76 atomic_sub_return(int i, atomic_t *v)
77 {
78 	return atomic_fetchadd_int(&v->counter, -i) - i;
79 }
80 
81 static inline int64_t
82 atomic_sub_return_long(int64_t i, atomic64_t *v)
83 {
84 	return atomic_fetchadd_long(&v->counter, -i) - i;
85 }
86 
87 static inline void
88 atomic_set(atomic_t *v, int i)
89 {
90 	atomic_store_rel_int(&v->counter, i);
91 }
92 
93 static inline void
94 atomic64_set(atomic64_t *v, long i)
95 {
96 	atomic_store_rel_long(&v->counter, i);
97 }
98 
99 static inline int
100 atomic_read(atomic_t *v)
101 {
102 	return atomic_load_acq_int(&v->counter);
103 }
104 
105 static inline int64_t
106 atomic64_read(atomic64_t *v)
107 {
108 	return atomic_load_acq_long(&v->counter);
109 }
110 
111 static inline int
112 atomic_inc(atomic_t *v)
113 {
114 	return atomic_fetchadd_int(&v->counter, 1) + 1;
115 }
116 
117 static inline int
118 atomic_dec(atomic_t *v)
119 {
120 	return atomic_fetchadd_int(&v->counter, -1) - 1;
121 }
122 
123 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
124 {
125 	return atomic_cmpxchg_int(&v->counter, old, new);
126 }
127 
128 static inline int atomic_add_unless(atomic_t *v, int add, int unless)
129 {
130         int c, old;
131         c = atomic_read(v);
132         for (;;) {
133                 if (unlikely(c == unless))
134                         break;
135                 old = atomic_cmpxchg_int(&v->counter, c, c + add);
136                 if (likely(old == c))
137                         break;
138                 c = old;
139         }
140         return c != unless;
141 }
142 
143 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
144 
145 /* atomic_clear_mask: atomically clears a variable from the bit set in mask */
146 #define atomic_clear_mask(mask, addr)		\
147 	/* atomic *addr &= ~mask; */		\
148 	__asm __volatile("lock andl %0, %1"	\
149 		:				\
150 		: "r" (~mask), "m" (*addr)	\
151 		: "memory");
152 
153 #define smp_mb__before_atomic()	cpu_ccfence()
154 #define smp_mb__after_atomic()	cpu_ccfence()
155 
156 static inline void
157 atomic_andnot(int i, atomic_t *v)
158 {
159 	/* v->counter = v->counter & ~i; */
160 	atomic_clear_int(&v->counter, i);
161 }
162 
163 #endif	/* _LINUX_ATOMIC_H_ */
164