xref: /dragonfly/sys/dev/drm/include/linux/atomic.h (revision a1282e19)
1 /*-
2  * Copyright (c) 2010 Isilon Systems, Inc.
3  * Copyright (c) 2010 iX Systems, Inc.
4  * Copyright (c) 2010 Panasas, Inc.
5  * Copyright (c) 2013-2015 François Tigeot
6  * All rights reserved.
7  *
8  * Redistribution and use in source and binary forms, with or without
9  * modification, are permitted provided that the following conditions
10  * are met:
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice unmodified, this list of conditions, and the following
13  *    disclaimer.
14  * 2. Redistributions in binary form must reproduce the above copyright
15  *    notice, this list of conditions and the following disclaimer in the
16  *    documentation and/or other materials provided with the distribution.
17  *
18  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #ifndef	_ASM_ATOMIC_H_
31 #define	_ASM_ATOMIC_H_
32 
33 #include <sys/types.h>
34 #include <machine/atomic.h>
35 #include <linux/compiler.h>
36 
37 typedef struct {
38 	volatile u_int counter;
39 } atomic_t;
40 
41 typedef struct {
42 	volatile u_long counter;
43 } atomic64_t;
44 
45 #define	atomic_add(i, v)		atomic_add_return((i), (v))
46 #define	atomic_sub(i, v)		atomic_sub_return((i), (v))
47 #define	atomic_inc_return(v)		atomic_add_return(1, (v))
48 #define	atomic_add_negative(i, v)	(atomic_add_return((i), (v)) < 0)
49 #define	atomic_sub_and_test(i, v)	(atomic_sub_return((i), (v)) == 0)
50 #define	atomic_dec_and_test(v)		(atomic_sub_return(1, (v)) == 0)
51 #define	atomic_inc_and_test(v)		(atomic_add_return(1, (v)) == 0)
52 #define atomic_dec_return(v)             atomic_sub_return(1, (v))
53 
54 #define	atomic64_add(i, v)		atomic_add_return_long((i), (v))
55 #define	atomic64_sub(i, v)		atomic_sub_return_long((i), (v))
56 
57 #define atomic_xchg(p, v)		atomic_swap_int(&((p)->counter), v)
58 #define atomic64_xchg(p, v)		atomic_swap_long(&((p)->counter), v)
59 
60 #define atomic_cmpset(p, o, n)		atomic_cmpset_32(&((p)->counter), o, n)
61 
62 static inline int
63 atomic_add_return(int i, atomic_t *v)
64 {
65 	return i + atomic_fetchadd_int(&v->counter, i);
66 }
67 
68 static inline int64_t
69 atomic_add_return_long(int64_t i, atomic64_t *v)
70 {
71 	return i + atomic_fetchadd_long(&v->counter, i);
72 }
73 
74 static inline int
75 atomic_sub_return(int i, atomic_t *v)
76 {
77 	return atomic_fetchadd_int(&v->counter, -i) - i;
78 }
79 
80 static inline int64_t
81 atomic_sub_return_long(int64_t i, atomic64_t *v)
82 {
83 	return atomic_fetchadd_long(&v->counter, -i) - i;
84 }
85 
86 static inline void
87 atomic_set(atomic_t *v, int i)
88 {
89 	atomic_store_rel_int(&v->counter, i);
90 }
91 
92 static inline void
93 atomic64_set(atomic64_t *v, long i)
94 {
95 	atomic_store_rel_long(&v->counter, i);
96 }
97 
98 static inline int
99 atomic_read(atomic_t *v)
100 {
101 	return atomic_load_acq_int(&v->counter);
102 }
103 
104 static inline int64_t
105 atomic64_read(atomic64_t *v)
106 {
107 	return atomic_load_acq_long(&v->counter);
108 }
109 
110 static inline int
111 atomic_inc(atomic_t *v)
112 {
113 	return atomic_fetchadd_int(&v->counter, 1) + 1;
114 }
115 
116 static inline int
117 atomic_dec(atomic_t *v)
118 {
119 	return atomic_fetchadd_int(&v->counter, -1) - 1;
120 }
121 
122 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
123 {
124 	return atomic_cmpxchg_int(&v->counter, old, new);
125 }
126 
127 static inline int atomic_add_unless(atomic_t *v, int add, int unless)
128 {
129         int c, old;
130         c = atomic_read(v);
131         for (;;) {
132                 if (unlikely(c == unless))
133                         break;
134                 old = atomic_cmpxchg_int(&v->counter, c, c + add);
135                 if (likely(old == c))
136                         break;
137                 c = old;
138         }
139         return c != unless;
140 }
141 
142 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
143 
144 /* atomic_clear_mask: atomically clears a variable from the bit set in mask */
145 #define atomic_clear_mask(mask, addr)		\
146 	/* atomic *addr &= ~mask; */		\
147 	__asm __volatile("lock andl %0, %1"	\
148 		:				\
149 		: "r" (~mask), "m" (*addr)	\
150 		: "memory");
151 
152 /* atomic_set_mask: atomically set bits in a variable */
153 #define atomic_set_mask(mask, addr)		\
154 	/* atomic *addr |= mask; */		\
155 	__asm __volatile("lock orl %0, %1"	\
156 		:				\
157 		: "r" (mask), "m" (*addr)	\
158 		: "memory");
159 
160 
161 #define smp_mb__before_atomic()	cpu_ccfence()
162 #define smp_mb__after_atomic()	cpu_ccfence()
163 
164 #endif	/* _ASM_ATOMIC_H_ */
165