1 #ifndef _ASM_GENERIC_ATOMIC_LONG_H
2 #define _ASM_GENERIC_ATOMIC_LONG_H
3 /*
4  * Copyright (C) 2005 Silicon Graphics, Inc.
5  *	Christoph Lameter
6  *
7  * Allows to provide arch independent atomic definitions without the need to
8  * edit all arch specific atomic.h files.
9  */
10 
11 #include <asm/types.h>
12 
13 /*
14  * Suppport for atomic_long_t
15  *
16  * Casts for parameters are avoided for existing atomic functions in order to
17  * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
18  * macros of a platform may have.
19  */
20 
21 #if BITS_PER_LONG == 64
22 
23 typedef atomic64_t atomic_long_t;
24 
25 #define ATOMIC_LONG_INIT(i)	ATOMIC64_INIT(i)
26 
atomic_long_read(atomic_long_t * l)27 static inline long atomic_long_read(atomic_long_t *l)
28 {
29 	atomic64_t *v = (atomic64_t *)l;
30 
31 	return (long)atomic64_read(v);
32 }
33 
atomic_long_set(atomic_long_t * l,long i)34 static inline void atomic_long_set(atomic_long_t *l, long i)
35 {
36 	atomic64_t *v = (atomic64_t *)l;
37 
38 	atomic64_set(v, i);
39 }
40 
atomic_long_inc(atomic_long_t * l)41 static inline void atomic_long_inc(atomic_long_t *l)
42 {
43 	atomic64_t *v = (atomic64_t *)l;
44 
45 	atomic64_inc(v);
46 }
47 
atomic_long_dec(atomic_long_t * l)48 static inline void atomic_long_dec(atomic_long_t *l)
49 {
50 	atomic64_t *v = (atomic64_t *)l;
51 
52 	atomic64_dec(v);
53 }
54 
atomic_long_add(long i,atomic_long_t * l)55 static inline void atomic_long_add(long i, atomic_long_t *l)
56 {
57 	atomic64_t *v = (atomic64_t *)l;
58 
59 	atomic64_add(i, v);
60 }
61 
atomic_long_sub(long i,atomic_long_t * l)62 static inline void atomic_long_sub(long i, atomic_long_t *l)
63 {
64 	atomic64_t *v = (atomic64_t *)l;
65 
66 	atomic64_sub(i, v);
67 }
68 
69 #ifndef __UBOOT__
atomic_long_sub_and_test(long i,atomic_long_t * l)70 static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
71 {
72 	atomic64_t *v = (atomic64_t *)l;
73 
74 	return atomic64_sub_and_test(i, v);
75 }
76 
atomic_long_dec_and_test(atomic_long_t * l)77 static inline int atomic_long_dec_and_test(atomic_long_t *l)
78 {
79 	atomic64_t *v = (atomic64_t *)l;
80 
81 	return atomic64_dec_and_test(v);
82 }
83 
atomic_long_inc_and_test(atomic_long_t * l)84 static inline int atomic_long_inc_and_test(atomic_long_t *l)
85 {
86 	atomic64_t *v = (atomic64_t *)l;
87 
88 	return atomic64_inc_and_test(v);
89 }
90 
atomic_long_add_negative(long i,atomic_long_t * l)91 static inline int atomic_long_add_negative(long i, atomic_long_t *l)
92 {
93 	atomic64_t *v = (atomic64_t *)l;
94 
95 	return atomic64_add_negative(i, v);
96 }
97 
atomic_long_add_return(long i,atomic_long_t * l)98 static inline long atomic_long_add_return(long i, atomic_long_t *l)
99 {
100 	atomic64_t *v = (atomic64_t *)l;
101 
102 	return (long)atomic64_add_return(i, v);
103 }
104 
atomic_long_sub_return(long i,atomic_long_t * l)105 static inline long atomic_long_sub_return(long i, atomic_long_t *l)
106 {
107 	atomic64_t *v = (atomic64_t *)l;
108 
109 	return (long)atomic64_sub_return(i, v);
110 }
111 
atomic_long_inc_return(atomic_long_t * l)112 static inline long atomic_long_inc_return(atomic_long_t *l)
113 {
114 	atomic64_t *v = (atomic64_t *)l;
115 
116 	return (long)atomic64_inc_return(v);
117 }
118 
atomic_long_dec_return(atomic_long_t * l)119 static inline long atomic_long_dec_return(atomic_long_t *l)
120 {
121 	atomic64_t *v = (atomic64_t *)l;
122 
123 	return (long)atomic64_dec_return(v);
124 }
125 
atomic_long_add_unless(atomic_long_t * l,long a,long u)126 static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
127 {
128 	atomic64_t *v = (atomic64_t *)l;
129 
130 	return (long)atomic64_add_unless(v, a, u);
131 }
132 
133 #define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))
134 
135 #define atomic_long_cmpxchg(l, old, new) \
136 	(atomic64_cmpxchg((atomic64_t *)(l), (old), (new)))
137 #define atomic_long_xchg(v, new) \
138 	(atomic64_xchg((atomic64_t *)(v), (new)))
139 #endif  /*  __UBOOT__ */
140 
141 #else  /*  BITS_PER_LONG == 64  */
142 
143 typedef atomic_t atomic_long_t;
144 
145 #define ATOMIC_LONG_INIT(i)	ATOMIC_INIT(i)
atomic_long_read(atomic_long_t * l)146 static inline long atomic_long_read(atomic_long_t *l)
147 {
148 	atomic_t *v = (atomic_t *)l;
149 
150 	return (long)atomic_read(v);
151 }
152 
atomic_long_set(atomic_long_t * l,long i)153 static inline void atomic_long_set(atomic_long_t *l, long i)
154 {
155 	atomic_t *v = (atomic_t *)l;
156 
157 	atomic_set(v, i);
158 }
159 
atomic_long_inc(atomic_long_t * l)160 static inline void atomic_long_inc(atomic_long_t *l)
161 {
162 	atomic_t *v = (atomic_t *)l;
163 
164 	atomic_inc(v);
165 }
166 
atomic_long_dec(atomic_long_t * l)167 static inline void atomic_long_dec(atomic_long_t *l)
168 {
169 	atomic_t *v = (atomic_t *)l;
170 
171 	atomic_dec(v);
172 }
173 
atomic_long_add(long i,atomic_long_t * l)174 static inline void atomic_long_add(long i, atomic_long_t *l)
175 {
176 	atomic_t *v = (atomic_t *)l;
177 
178 	atomic_add(i, v);
179 }
180 
atomic_long_sub(long i,atomic_long_t * l)181 static inline void atomic_long_sub(long i, atomic_long_t *l)
182 {
183 	atomic_t *v = (atomic_t *)l;
184 
185 	atomic_sub(i, v);
186 }
187 
188 #ifndef __UBOOT__
atomic_long_sub_and_test(long i,atomic_long_t * l)189 static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
190 {
191 	atomic_t *v = (atomic_t *)l;
192 
193 	return atomic_sub_and_test(i, v);
194 }
195 
atomic_long_dec_and_test(atomic_long_t * l)196 static inline int atomic_long_dec_and_test(atomic_long_t *l)
197 {
198 	atomic_t *v = (atomic_t *)l;
199 
200 	return atomic_dec_and_test(v);
201 }
202 
atomic_long_inc_and_test(atomic_long_t * l)203 static inline int atomic_long_inc_and_test(atomic_long_t *l)
204 {
205 	atomic_t *v = (atomic_t *)l;
206 
207 	return atomic_inc_and_test(v);
208 }
209 
atomic_long_add_negative(long i,atomic_long_t * l)210 static inline int atomic_long_add_negative(long i, atomic_long_t *l)
211 {
212 	atomic_t *v = (atomic_t *)l;
213 
214 	return atomic_add_negative(i, v);
215 }
216 
atomic_long_add_return(long i,atomic_long_t * l)217 static inline long atomic_long_add_return(long i, atomic_long_t *l)
218 {
219 	atomic_t *v = (atomic_t *)l;
220 
221 	return (long)atomic_add_return(i, v);
222 }
223 
atomic_long_sub_return(long i,atomic_long_t * l)224 static inline long atomic_long_sub_return(long i, atomic_long_t *l)
225 {
226 	atomic_t *v = (atomic_t *)l;
227 
228 	return (long)atomic_sub_return(i, v);
229 }
230 
atomic_long_inc_return(atomic_long_t * l)231 static inline long atomic_long_inc_return(atomic_long_t *l)
232 {
233 	atomic_t *v = (atomic_t *)l;
234 
235 	return (long)atomic_inc_return(v);
236 }
237 
atomic_long_dec_return(atomic_long_t * l)238 static inline long atomic_long_dec_return(atomic_long_t *l)
239 {
240 	atomic_t *v = (atomic_t *)l;
241 
242 	return (long)atomic_dec_return(v);
243 }
244 
atomic_long_add_unless(atomic_long_t * l,long a,long u)245 static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
246 {
247 	atomic_t *v = (atomic_t *)l;
248 
249 	return (long)atomic_add_unless(v, a, u);
250 }
251 
252 #define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l))
253 
254 #define atomic_long_cmpxchg(l, old, new) \
255 	(atomic_cmpxchg((atomic_t *)(l), (old), (new)))
256 #define atomic_long_xchg(v, new) \
257 	(atomic_xchg((atomic_t *)(v), (new)))
258 #endif  /*  __UBOOT__ */
259 
260 #endif  /*  BITS_PER_LONG == 64  */
261 
262 #endif  /*  _ASM_GENERIC_ATOMIC_LONG_H  */
263