1 // Copyright 2016 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
4
5 #include <stdint.h>
6
7 #include "runtime.h"
8
9 extern void panicUnaligned(void)
10 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.panicUnaligned")
11 __attribute__ ((noreturn));
12
13 uint32_t Load (uint32_t *ptr)
14 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Load")
15 __attribute__ ((no_split_stack));
16
17 uint32_t
Load(uint32_t * ptr)18 Load (uint32_t *ptr)
19 {
20 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
21 }
22
23 void *Loadp (void *ptr)
24 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Loadp")
25 __attribute__ ((no_split_stack));
26
27 void *
Loadp(void * ptr)28 Loadp (void *ptr)
29 {
30 return __atomic_load_n ((void **) ptr, __ATOMIC_SEQ_CST);
31 }
32
33 uint8_t Load8 (uint8_t *ptr)
34 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Load8")
35 __attribute__ ((no_split_stack));
36
37 uint8_t
Load8(uint8_t * ptr)38 Load8 (uint8_t *ptr)
39 {
40 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
41 }
42
43 uint64_t Load64 (uint64_t *ptr)
44 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Load64")
45 __attribute__ ((no_split_stack));
46
47 uint64_t
Load64(uint64_t * ptr)48 Load64 (uint64_t *ptr)
49 {
50 if (((uintptr_t) ptr & 7) != 0)
51 panicUnaligned ();
52 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
53 }
54
55 uint32_t LoadAcq (uint32_t *ptr)
56 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.LoadAcq")
57 __attribute__ ((no_split_stack));
58
59 uint32_t
LoadAcq(uint32_t * ptr)60 LoadAcq (uint32_t *ptr)
61 {
62 return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
63 }
64
65 uint64_t LoadAcq64 (uint64_t *ptr)
66 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.LoadAcq64")
67 __attribute__ ((no_split_stack));
68
69 uint64_t
LoadAcq64(uint64_t * ptr)70 LoadAcq64 (uint64_t *ptr)
71 {
72 if (((uintptr_t) ptr & 7) != 0)
73 panicUnaligned ();
74 return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
75 }
76
77 uintptr_t LoadAcquintptr (uintptr_t *ptr)
78 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.LoadAcquintptr")
79 __attribute__ ((no_split_stack));
80
81 uintptr_t
LoadAcquintptr(uintptr_t * ptr)82 LoadAcquintptr (uintptr_t *ptr)
83 {
84 return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
85 }
86
87 uintptr_t Loaduintptr (uintptr_t *ptr)
88 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Loaduintptr")
89 __attribute__ ((no_split_stack));
90
91 uintptr_t
Loaduintptr(uintptr_t * ptr)92 Loaduintptr (uintptr_t *ptr)
93 {
94 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
95 }
96
97 uintgo Loaduint (uintgo *ptr)
98 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Loaduint")
99 __attribute__ ((no_split_stack));
100
101 uintgo
Loaduint(uintgo * ptr)102 Loaduint (uintgo *ptr)
103 {
104 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
105 }
106
107 int32_t Loadint32 (int32_t *ptr)
108 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Loadint32")
109 __attribute__ ((no_split_stack));
110
111 int32_t
Loadint32(int32_t * ptr)112 Loadint32 (int32_t *ptr)
113 {
114 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
115 }
116
117 int64_t Loadint64 (int64_t *ptr)
118 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Loadint64")
119 __attribute__ ((no_split_stack));
120
121 int64_t
Loadint64(int64_t * ptr)122 Loadint64 (int64_t *ptr)
123 {
124 if (((uintptr_t) ptr & 7) != 0)
125 panicUnaligned ();
126 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
127 }
128
129 uint32_t Xadd (uint32_t *ptr, int32_t delta)
130 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xadd")
131 __attribute__ ((no_split_stack));
132
133 uint32_t
Xadd(uint32_t * ptr,int32_t delta)134 Xadd (uint32_t *ptr, int32_t delta)
135 {
136 return __atomic_add_fetch (ptr, (uint32_t) delta, __ATOMIC_SEQ_CST);
137 }
138
139 int32_t Xaddint32 (int32_t *ptr, int32_t delta)
140 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xaddint32")
141 __attribute__ ((no_split_stack));
142
143 int32_t
Xaddint32(int32_t * ptr,int32_t delta)144 Xaddint32 (int32_t *ptr, int32_t delta)
145 {
146 return __atomic_add_fetch (ptr, delta, __ATOMIC_SEQ_CST);
147 }
148
149 uint64_t Xadd64 (uint64_t *ptr, int64_t delta)
150 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xadd64")
151 __attribute__ ((no_split_stack));
152
153 uint64_t
Xadd64(uint64_t * ptr,int64_t delta)154 Xadd64 (uint64_t *ptr, int64_t delta)
155 {
156 if (((uintptr_t) ptr & 7) != 0)
157 panicUnaligned ();
158 return __atomic_add_fetch (ptr, (uint64_t) delta, __ATOMIC_SEQ_CST);
159 }
160
161 uintptr_t Xadduintptr (uintptr_t *ptr, uintptr_t delta)
162 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xadduintptr")
163 __attribute__ ((no_split_stack));
164
165 uintptr_t
Xadduintptr(uintptr_t * ptr,uintptr_t delta)166 Xadduintptr (uintptr_t *ptr, uintptr_t delta)
167 {
168 return __atomic_add_fetch (ptr, delta, __ATOMIC_SEQ_CST);
169 }
170
171 int64_t Xaddint64 (int64_t *ptr, int64_t delta)
172 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xaddint64")
173 __attribute__ ((no_split_stack));
174
175 int64_t
Xaddint64(int64_t * ptr,int64_t delta)176 Xaddint64 (int64_t *ptr, int64_t delta)
177 {
178 if (((uintptr_t) ptr & 7) != 0)
179 panicUnaligned ();
180 return __atomic_add_fetch (ptr, delta, __ATOMIC_SEQ_CST);
181 }
182
183 uint32_t Xchg (uint32_t *ptr, uint32_t new)
184 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xchg")
185 __attribute__ ((no_split_stack));
186
187 uint32_t
Xchg(uint32_t * ptr,uint32_t new)188 Xchg (uint32_t *ptr, uint32_t new)
189 {
190 return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
191 }
192
193 int32_t Xchgint32 (int32_t *ptr, int32_t new)
194 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xchgint32")
195 __attribute__ ((no_split_stack));
196
197 int32_t
Xchgint32(int32_t * ptr,int32_t new)198 Xchgint32 (int32_t *ptr, int32_t new)
199 {
200 return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
201 }
202
203 uint64_t Xchg64 (uint64_t *ptr, uint64_t new)
204 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xchg64")
205 __attribute__ ((no_split_stack));
206
207 uint64_t
Xchg64(uint64_t * ptr,uint64_t new)208 Xchg64 (uint64_t *ptr, uint64_t new)
209 {
210 if (((uintptr_t) ptr & 7) != 0)
211 panicUnaligned ();
212 return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
213 }
214
215 int64_t Xchgint64 (int64_t *ptr, int64_t new)
216 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xchgint64")
217 __attribute__ ((no_split_stack));
218
219 int64_t
Xchgint64(int64_t * ptr,int64_t new)220 Xchgint64 (int64_t *ptr, int64_t new)
221 {
222 return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
223 }
224
225 uintptr_t Xchguintptr (uintptr_t *ptr, uintptr_t new)
226 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xchguintptr")
227 __attribute__ ((no_split_stack));
228
229 uintptr_t
Xchguintptr(uintptr_t * ptr,uintptr_t new)230 Xchguintptr (uintptr_t *ptr, uintptr_t new)
231 {
232 return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
233 }
234
235 void And8 (uint8_t *ptr, uint8_t val)
236 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.And8")
237 __attribute__ ((no_split_stack));
238
239 void
And8(uint8_t * ptr,uint8_t val)240 And8 (uint8_t *ptr, uint8_t val)
241 {
242 __atomic_and_fetch (ptr, val, __ATOMIC_SEQ_CST);
243 }
244
245 void Or8 (uint8_t *ptr, uint8_t val)
246 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Or8")
247 __attribute__ ((no_split_stack));
248
249 void
Or8(uint8_t * ptr,uint8_t val)250 Or8 (uint8_t *ptr, uint8_t val)
251 {
252 __atomic_or_fetch (ptr, val, __ATOMIC_SEQ_CST);
253 }
254
255 void And (uint32_t *ptr, uint32_t val)
256 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.And")
257 __attribute__ ((no_split_stack));
258
259 void
And(uint32_t * ptr,uint32_t val)260 And (uint32_t *ptr, uint32_t val)
261 {
262 __atomic_and_fetch (ptr, val, __ATOMIC_SEQ_CST);
263 }
264
265 void Or (uint32_t *ptr, uint32_t val)
266 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Or")
267 __attribute__ ((no_split_stack));
268
269 void
Or(uint32_t * ptr,uint32_t val)270 Or (uint32_t *ptr, uint32_t val)
271 {
272 __atomic_or_fetch (ptr, val, __ATOMIC_SEQ_CST);
273 }
274
275 _Bool Cas (uint32_t *ptr, uint32_t old, uint32_t new)
276 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Cas")
277 __attribute__ ((no_split_stack));
278
279 _Bool
Cas(uint32_t * ptr,uint32_t old,uint32_t new)280 Cas (uint32_t *ptr, uint32_t old, uint32_t new)
281 {
282 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
283 }
284
285 _Bool Cas64 (uint64_t *ptr, uint64_t old, uint64_t new)
286 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Cas64")
287 __attribute__ ((no_split_stack));
288
289 _Bool
Cas64(uint64_t * ptr,uint64_t old,uint64_t new)290 Cas64 (uint64_t *ptr, uint64_t old, uint64_t new)
291 {
292 if (((uintptr_t) ptr & 7) != 0)
293 panicUnaligned ();
294 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
295 }
296
297 _Bool CasRel (uint32_t *ptr, uint32_t old, uint32_t new)
298 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.CasRel")
299 __attribute__ ((no_split_stack));
300
301 _Bool
CasRel(uint32_t * ptr,uint32_t old,uint32_t new)302 CasRel (uint32_t *ptr, uint32_t old, uint32_t new)
303 {
304 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_RELEASE, __ATOMIC_RELAXED);
305 }
306
307 _Bool Casint32 (int32_t *ptr, int32_t old, int32_t new)
308 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Casint32")
309 __attribute__ ((no_split_stack));
310
311 _Bool
Casint32(int32_t * ptr,int32_t old,int32_t new)312 Casint32 (int32_t *ptr, int32_t old, int32_t new)
313 {
314 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
315 }
316
317 _Bool Casint64 (int64_t *ptr, int64_t old, int64_t new)
318 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Casint64")
319 __attribute__ ((no_split_stack));
320
321 _Bool
Casint64(int64_t * ptr,int64_t old,int64_t new)322 Casint64 (int64_t *ptr, int64_t old, int64_t new)
323 {
324 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
325 }
326
327 _Bool Casp1 (void **ptr, void *old, void *new)
328 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Casp1")
329 __attribute__ ((no_split_stack));
330
331 _Bool
Casp1(void ** ptr,void * old,void * new)332 Casp1 (void **ptr, void *old, void *new)
333 {
334 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
335 }
336
337 _Bool Casuintptr (uintptr_t *ptr, uintptr_t old, uintptr_t new)
338 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Casuintptr")
339 __attribute__ ((no_split_stack));
340
341 _Bool
Casuintptr(uintptr_t * ptr,uintptr_t old,uintptr_t new)342 Casuintptr (uintptr_t *ptr, uintptr_t old, uintptr_t new)
343 {
344 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
345 }
346
347 void Store (uint32_t *ptr, uint32_t val)
348 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Store")
349 __attribute__ ((no_split_stack));
350
351 void
Store(uint32_t * ptr,uint32_t val)352 Store (uint32_t *ptr, uint32_t val)
353 {
354 __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
355 }
356
357 void Store8 (uint8_t *ptr, uint8_t val)
358 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Store8")
359 __attribute__ ((no_split_stack));
360
361 void
Store8(uint8_t * ptr,uint8_t val)362 Store8 (uint8_t *ptr, uint8_t val)
363 {
364 __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
365 }
366
367 void Storeint32 (int32_t *ptr, int32_t val)
368 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Storeint32")
369 __attribute__ ((no_split_stack));
370
371 void
Storeint32(int32_t * ptr,int32_t val)372 Storeint32 (int32_t *ptr, int32_t val)
373 {
374 __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
375 }
376
377 void Store64 (uint64_t *ptr, uint64_t val)
378 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Store64")
379 __attribute__ ((no_split_stack));
380
381 void
Store64(uint64_t * ptr,uint64_t val)382 Store64 (uint64_t *ptr, uint64_t val)
383 {
384 if (((uintptr_t) ptr & 7) != 0)
385 panicUnaligned ();
386 __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
387 }
388
389 void StoreRel (uint32_t *ptr, uint32_t val)
390 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.StoreRel")
391 __attribute__ ((no_split_stack));
392
393 void
StoreRel(uint32_t * ptr,uint32_t val)394 StoreRel (uint32_t *ptr, uint32_t val)
395 {
396 __atomic_store_n (ptr, val, __ATOMIC_RELEASE);
397 }
398
399 void StoreRel64 (uint64_t *ptr, uint64_t val)
400 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.StoreRel64")
401 __attribute__ ((no_split_stack));
402
403 void
StoreRel64(uint64_t * ptr,uint64_t val)404 StoreRel64 (uint64_t *ptr, uint64_t val)
405 {
406 if (((uintptr_t) ptr & 7) != 0)
407 panicUnaligned ();
408 __atomic_store_n (ptr, val, __ATOMIC_RELEASE);
409 }
410
411 void Storeint64 (int64_t *ptr, int64_t val)
412 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Storeint64")
413 __attribute__ ((no_split_stack));
414
415 void
Storeint64(int64_t * ptr,int64_t val)416 Storeint64 (int64_t *ptr, int64_t val)
417 {
418 __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
419 }
420
421 void StoreReluintptr (uintptr_t *ptr, uintptr_t val)
422 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.StoreReluintptr")
423 __attribute__ ((no_split_stack));
424
425 void
StoreReluintptr(uintptr_t * ptr,uintptr_t val)426 StoreReluintptr (uintptr_t *ptr, uintptr_t val)
427 {
428 __atomic_store_n (ptr, val, __ATOMIC_RELEASE);
429 }
430
431 void Storeuintptr (uintptr_t *ptr, uintptr_t val)
432 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Storeuintptr")
433 __attribute__ ((no_split_stack));
434
435 void
Storeuintptr(uintptr_t * ptr,uintptr_t val)436 Storeuintptr (uintptr_t *ptr, uintptr_t val)
437 {
438 __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
439 }
440
441 void StorepNoWB (void *ptr, void *val)
442 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.StorepNoWB")
443 __attribute__ ((no_split_stack));
444
445 void
StorepNoWB(void * ptr,void * val)446 StorepNoWB (void *ptr, void *val)
447 {
448 __atomic_store_n ((void**) ptr, val, __ATOMIC_SEQ_CST);
449 }
450