1 /* atomic.c -- implement atomic routines for Go.
2 
3    Copyright 2011 The Go Authors. All rights reserved.
4    Use of this source code is governed by a BSD-style
5    license that can be found in the LICENSE file.  */
6 
7 #include <stdint.h>
8 
9 #include "runtime.h"
10 
11 extern void panicUnaligned(void)
12   __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.panicUnaligned")
13   __attribute__ ((noreturn));
14 
15 int32_t SwapInt32 (int32_t *, int32_t)
16   __asm__ (GOSYM_PREFIX "sync_1atomic.SwapInt32")
17   __attribute__ ((no_split_stack));
18 
19 int32_t
SwapInt32(int32_t * addr,int32_t new)20 SwapInt32 (int32_t *addr, int32_t new)
21 {
22   return __atomic_exchange_n (addr, new, __ATOMIC_SEQ_CST);
23 }
24 
25 int64_t SwapInt64 (int64_t *, int64_t)
26   __asm__ (GOSYM_PREFIX "sync_1atomic.SwapInt64")
27   __attribute__ ((no_split_stack));
28 
29 int64_t
SwapInt64(int64_t * addr,int64_t new)30 SwapInt64 (int64_t *addr, int64_t new)
31 {
32   if (((uintptr_t) addr & 7) != 0)
33     panicUnaligned ();
34   return __atomic_exchange_n (addr, new, __ATOMIC_SEQ_CST);
35 }
36 
37 uint32_t SwapUint32 (uint32_t *, uint32_t)
38   __asm__ (GOSYM_PREFIX "sync_1atomic.SwapUint32")
39   __attribute__ ((no_split_stack));
40 
41 uint32_t
SwapUint32(uint32_t * addr,uint32_t new)42 SwapUint32 (uint32_t *addr, uint32_t new)
43 {
44   return __atomic_exchange_n (addr, new, __ATOMIC_SEQ_CST);
45 }
46 
47 uint64_t SwapUint64 (uint64_t *, uint64_t)
48   __asm__ (GOSYM_PREFIX "sync_1atomic.SwapUint64")
49   __attribute__ ((no_split_stack));
50 
51 uint64_t
SwapUint64(uint64_t * addr,uint64_t new)52 SwapUint64 (uint64_t *addr, uint64_t new)
53 {
54   if (((uintptr_t) addr & 7) != 0)
55     panicUnaligned ();
56   return __atomic_exchange_n (addr, new, __ATOMIC_SEQ_CST);
57 }
58 
59 uintptr_t SwapUintptr (uintptr_t *, uintptr_t)
60   __asm__ (GOSYM_PREFIX "sync_1atomic.SwapUintptr")
61   __attribute__ ((no_split_stack));
62 
63 uintptr_t
SwapUintptr(uintptr_t * addr,uintptr_t new)64 SwapUintptr (uintptr_t *addr, uintptr_t new)
65 {
66   return __atomic_exchange_n (addr, new, __ATOMIC_SEQ_CST);
67 }
68 
69 _Bool CompareAndSwapInt32 (int32_t *, int32_t, int32_t)
70   __asm__ (GOSYM_PREFIX "sync_1atomic.CompareAndSwapInt32")
71   __attribute__ ((no_split_stack));
72 
73 _Bool
CompareAndSwapInt32(int32_t * val,int32_t old,int32_t new)74 CompareAndSwapInt32 (int32_t *val, int32_t old, int32_t new)
75 {
76   return __atomic_compare_exchange_n (val, &old, new, false, __ATOMIC_SEQ_CST,
77 				      __ATOMIC_RELAXED);
78 }
79 
80 _Bool CompareAndSwapInt64 (int64_t *, int64_t, int64_t)
81   __asm__ (GOSYM_PREFIX "sync_1atomic.CompareAndSwapInt64")
82   __attribute__ ((no_split_stack));
83 
84 _Bool
CompareAndSwapInt64(int64_t * val,int64_t old,int64_t new)85 CompareAndSwapInt64 (int64_t *val, int64_t old, int64_t new)
86 {
87   if (((uintptr_t) val & 7) != 0)
88     panicUnaligned ();
89   return __atomic_compare_exchange_n (val, &old, new, false, __ATOMIC_SEQ_CST,
90 				      __ATOMIC_RELAXED);
91 }
92 
93 _Bool CompareAndSwapUint32 (uint32_t *, uint32_t, uint32_t)
94   __asm__ (GOSYM_PREFIX "sync_1atomic.CompareAndSwapUint32")
95   __attribute__ ((no_split_stack));
96 
97 _Bool
CompareAndSwapUint32(uint32_t * val,uint32_t old,uint32_t new)98 CompareAndSwapUint32 (uint32_t *val, uint32_t old, uint32_t new)
99 {
100   return __atomic_compare_exchange_n (val, &old, new, false, __ATOMIC_SEQ_CST,
101 				      __ATOMIC_RELAXED);
102 }
103 
104 _Bool CompareAndSwapUint64 (uint64_t *, uint64_t, uint64_t)
105   __asm__ (GOSYM_PREFIX "sync_1atomic.CompareAndSwapUint64")
106   __attribute__ ((no_split_stack));
107 
108 _Bool
CompareAndSwapUint64(uint64_t * val,uint64_t old,uint64_t new)109 CompareAndSwapUint64 (uint64_t *val, uint64_t old, uint64_t new)
110 {
111   if (((uintptr_t) val & 7) != 0)
112     panicUnaligned ();
113   return __atomic_compare_exchange_n (val, &old, new, false, __ATOMIC_SEQ_CST,
114 				      __ATOMIC_RELAXED);
115 }
116 
117 _Bool CompareAndSwapUintptr (uintptr_t *, uintptr_t, uintptr_t)
118   __asm__ (GOSYM_PREFIX "sync_1atomic.CompareAndSwapUintptr")
119   __attribute__ ((no_split_stack));
120 
121 _Bool
CompareAndSwapUintptr(uintptr_t * val,uintptr_t old,uintptr_t new)122 CompareAndSwapUintptr (uintptr_t *val, uintptr_t old, uintptr_t new)
123 {
124   return __atomic_compare_exchange_n (val, &old, new, false, __ATOMIC_SEQ_CST,
125 				      __ATOMIC_RELAXED);
126 }
127 
128 int32_t AddInt32 (int32_t *, int32_t)
129   __asm__ (GOSYM_PREFIX "sync_1atomic.AddInt32")
130   __attribute__ ((no_split_stack));
131 
132 int32_t
AddInt32(int32_t * val,int32_t delta)133 AddInt32 (int32_t *val, int32_t delta)
134 {
135   return __atomic_add_fetch (val, delta, __ATOMIC_SEQ_CST);
136 }
137 
138 uint32_t AddUint32 (uint32_t *, uint32_t)
139   __asm__ (GOSYM_PREFIX "sync_1atomic.AddUint32")
140   __attribute__ ((no_split_stack));
141 
142 uint32_t
AddUint32(uint32_t * val,uint32_t delta)143 AddUint32 (uint32_t *val, uint32_t delta)
144 {
145   return __atomic_add_fetch (val, delta, __ATOMIC_SEQ_CST);
146 }
147 
148 int64_t AddInt64 (int64_t *, int64_t)
149   __asm__ (GOSYM_PREFIX "sync_1atomic.AddInt64")
150   __attribute__ ((no_split_stack));
151 
152 int64_t
AddInt64(int64_t * val,int64_t delta)153 AddInt64 (int64_t *val, int64_t delta)
154 {
155   if (((uintptr_t) val & 7) != 0)
156     panicUnaligned ();
157   return __atomic_add_fetch (val, delta, __ATOMIC_SEQ_CST);
158 }
159 
160 uint64_t AddUint64 (uint64_t *, uint64_t)
161   __asm__ (GOSYM_PREFIX "sync_1atomic.AddUint64")
162   __attribute__ ((no_split_stack));
163 
164 uint64_t
AddUint64(uint64_t * val,uint64_t delta)165 AddUint64 (uint64_t *val, uint64_t delta)
166 {
167   if (((uintptr_t) val & 7) != 0)
168     panicUnaligned ();
169   return __atomic_add_fetch (val, delta, __ATOMIC_SEQ_CST);
170 }
171 
172 uintptr_t AddUintptr (uintptr_t *, uintptr_t)
173   __asm__ (GOSYM_PREFIX "sync_1atomic.AddUintptr")
174   __attribute__ ((no_split_stack));
175 
176 uintptr_t
AddUintptr(uintptr_t * val,uintptr_t delta)177 AddUintptr (uintptr_t *val, uintptr_t delta)
178 {
179   return __atomic_add_fetch (val, delta, __ATOMIC_SEQ_CST);
180 }
181 
182 int32_t LoadInt32 (int32_t *addr)
183   __asm__ (GOSYM_PREFIX "sync_1atomic.LoadInt32")
184   __attribute__ ((no_split_stack));
185 
186 int32_t
LoadInt32(int32_t * addr)187 LoadInt32 (int32_t *addr)
188 {
189   return __atomic_load_n (addr, __ATOMIC_SEQ_CST);
190 }
191 
192 int64_t LoadInt64 (int64_t *addr)
193   __asm__ (GOSYM_PREFIX "sync_1atomic.LoadInt64")
194   __attribute__ ((no_split_stack));
195 
196 int64_t
LoadInt64(int64_t * addr)197 LoadInt64 (int64_t *addr)
198 {
199   if (((uintptr_t) addr & 7) != 0)
200     panicUnaligned ();
201   return __atomic_load_n (addr, __ATOMIC_SEQ_CST);
202 }
203 
204 uint32_t LoadUint32 (uint32_t *addr)
205   __asm__ (GOSYM_PREFIX "sync_1atomic.LoadUint32")
206   __attribute__ ((no_split_stack));
207 
208 uint32_t
LoadUint32(uint32_t * addr)209 LoadUint32 (uint32_t *addr)
210 {
211   return __atomic_load_n (addr, __ATOMIC_SEQ_CST);
212 }
213 
214 uint64_t LoadUint64 (uint64_t *addr)
215   __asm__ (GOSYM_PREFIX "sync_1atomic.LoadUint64")
216   __attribute__ ((no_split_stack));
217 
218 uint64_t
LoadUint64(uint64_t * addr)219 LoadUint64 (uint64_t *addr)
220 {
221   if (((uintptr_t) addr & 7) != 0)
222     panicUnaligned ();
223   return __atomic_load_n (addr, __ATOMIC_SEQ_CST);
224 }
225 
226 uintptr_t LoadUintptr (uintptr_t *addr)
227   __asm__ (GOSYM_PREFIX "sync_1atomic.LoadUintptr")
228   __attribute__ ((no_split_stack));
229 
230 uintptr_t
LoadUintptr(uintptr_t * addr)231 LoadUintptr (uintptr_t *addr)
232 {
233   return __atomic_load_n (addr, __ATOMIC_SEQ_CST);
234 }
235 
236 void *LoadPointer (void **addr)
237   __asm__ (GOSYM_PREFIX "sync_1atomic.LoadPointer")
238   __attribute__ ((no_split_stack));
239 
240 void *
LoadPointer(void ** addr)241 LoadPointer (void **addr)
242 {
243   return __atomic_load_n (addr, __ATOMIC_SEQ_CST);
244 }
245 
246 void StoreInt32 (int32_t *addr, int32_t val)
247   __asm__ (GOSYM_PREFIX "sync_1atomic.StoreInt32")
248   __attribute__ ((no_split_stack));
249 
250 void
StoreInt32(int32_t * addr,int32_t val)251 StoreInt32 (int32_t *addr, int32_t val)
252 {
253   __atomic_store_n (addr, val, __ATOMIC_SEQ_CST);
254 }
255 
256 void StoreInt64 (int64_t *addr, int64_t val)
257   __asm__ (GOSYM_PREFIX "sync_1atomic.StoreInt64")
258   __attribute__ ((no_split_stack));
259 
260 void
StoreInt64(int64_t * addr,int64_t val)261 StoreInt64 (int64_t *addr, int64_t val)
262 {
263   if (((uintptr_t) addr & 7) != 0)
264     panicUnaligned ();
265   __atomic_store_n (addr, val, __ATOMIC_SEQ_CST);
266 }
267 
268 void StoreUint32 (uint32_t *addr, uint32_t val)
269   __asm__ (GOSYM_PREFIX "sync_1atomic.StoreUint32")
270   __attribute__ ((no_split_stack));
271 
272 void
StoreUint32(uint32_t * addr,uint32_t val)273 StoreUint32 (uint32_t *addr, uint32_t val)
274 {
275   __atomic_store_n (addr, val, __ATOMIC_SEQ_CST);
276 }
277 
278 void StoreUint64 (uint64_t *addr, uint64_t val)
279   __asm__ (GOSYM_PREFIX "sync_1atomic.StoreUint64")
280   __attribute__ ((no_split_stack));
281 
282 void
StoreUint64(uint64_t * addr,uint64_t val)283 StoreUint64 (uint64_t *addr, uint64_t val)
284 {
285   if (((uintptr_t) addr & 7) != 0)
286     panicUnaligned ();
287   __atomic_store_n (addr, val, __ATOMIC_SEQ_CST);
288 }
289 
290 void StoreUintptr (uintptr_t *addr, uintptr_t val)
291   __asm__ (GOSYM_PREFIX "sync_1atomic.StoreUintptr")
292   __attribute__ ((no_split_stack));
293 
294 void
StoreUintptr(uintptr_t * addr,uintptr_t val)295 StoreUintptr (uintptr_t *addr, uintptr_t val)
296 {
297   __atomic_store_n (addr, val, __ATOMIC_SEQ_CST);
298 }
299