1 // Copyright 2016 The Go Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style
3 // license that can be found in the LICENSE file.
4
5 #include <stdint.h>
6
7 #include "runtime.h"
8
9 extern void panicUnaligned(void)
10 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.panicUnaligned")
11 __attribute__ ((noreturn));
12
13 uint32_t Load (uint32_t *ptr)
14 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Load")
15 __attribute__ ((no_split_stack));
16
17 uint32_t
Load(uint32_t * ptr)18 Load (uint32_t *ptr)
19 {
20 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
21 }
22
23 void *Loadp (void *ptr)
24 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Loadp")
25 __attribute__ ((no_split_stack));
26
27 void *
Loadp(void * ptr)28 Loadp (void *ptr)
29 {
30 return __atomic_load_n ((void **) ptr, __ATOMIC_SEQ_CST);
31 }
32
33 uint8_t Load8 (uint8_t *ptr)
34 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Load8")
35 __attribute__ ((no_split_stack));
36
37 uint8_t
Load8(uint8_t * ptr)38 Load8 (uint8_t *ptr)
39 {
40 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
41 }
42
43 uint64_t Load64 (uint64_t *ptr)
44 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Load64")
45 __attribute__ ((no_split_stack));
46
47 uint64_t
Load64(uint64_t * ptr)48 Load64 (uint64_t *ptr)
49 {
50 if (((uintptr_t) ptr & 7) != 0)
51 panicUnaligned ();
52 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
53 }
54
55 uint32_t LoadAcq (uint32_t *ptr)
56 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.LoadAcq")
57 __attribute__ ((no_split_stack));
58
59 uint32_t
LoadAcq(uint32_t * ptr)60 LoadAcq (uint32_t *ptr)
61 {
62 return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
63 }
64
65 uint64_t LoadAcq64 (uint64_t *ptr)
66 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.LoadAcq64")
67 __attribute__ ((no_split_stack));
68
69 uint64_t
LoadAcq64(uint64_t * ptr)70 LoadAcq64 (uint64_t *ptr)
71 {
72 if (((uintptr_t) ptr & 7) != 0)
73 panicUnaligned ();
74 return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
75 }
76
77 uintptr_t LoadAcquintptr (uintptr_t *ptr)
78 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.LoadAcquintptr")
79 __attribute__ ((no_split_stack));
80
81 uintptr_t
LoadAcquintptr(uintptr_t * ptr)82 LoadAcquintptr (uintptr_t *ptr)
83 {
84 return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
85 }
86
87 uintptr_t Loaduintptr (uintptr_t *ptr)
88 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Loaduintptr")
89 __attribute__ ((no_split_stack));
90
91 uintptr_t
Loaduintptr(uintptr_t * ptr)92 Loaduintptr (uintptr_t *ptr)
93 {
94 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
95 }
96
97 uintgo Loaduint (uintgo *ptr)
98 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Loaduint")
99 __attribute__ ((no_split_stack));
100
101 uintgo
Loaduint(uintgo * ptr)102 Loaduint (uintgo *ptr)
103 {
104 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
105 }
106
107 int64_t Loadint64 (int64_t *ptr)
108 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Loadint64")
109 __attribute__ ((no_split_stack));
110
111 int64_t
Loadint64(int64_t * ptr)112 Loadint64 (int64_t *ptr)
113 {
114 if (((uintptr_t) ptr & 7) != 0)
115 panicUnaligned ();
116 return __atomic_load_n (ptr, __ATOMIC_SEQ_CST);
117 }
118
119 uint32_t Xadd (uint32_t *ptr, int32_t delta)
120 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xadd")
121 __attribute__ ((no_split_stack));
122
123 uint32_t
Xadd(uint32_t * ptr,int32_t delta)124 Xadd (uint32_t *ptr, int32_t delta)
125 {
126 return __atomic_add_fetch (ptr, (uint32_t) delta, __ATOMIC_SEQ_CST);
127 }
128
129 uint64_t Xadd64 (uint64_t *ptr, int64_t delta)
130 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xadd64")
131 __attribute__ ((no_split_stack));
132
133 uint64_t
Xadd64(uint64_t * ptr,int64_t delta)134 Xadd64 (uint64_t *ptr, int64_t delta)
135 {
136 if (((uintptr_t) ptr & 7) != 0)
137 panicUnaligned ();
138 return __atomic_add_fetch (ptr, (uint64_t) delta, __ATOMIC_SEQ_CST);
139 }
140
141 uintptr_t Xadduintptr (uintptr_t *ptr, uintptr_t delta)
142 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xadduintptr")
143 __attribute__ ((no_split_stack));
144
145 uintptr_t
Xadduintptr(uintptr_t * ptr,uintptr_t delta)146 Xadduintptr (uintptr_t *ptr, uintptr_t delta)
147 {
148 return __atomic_add_fetch (ptr, delta, __ATOMIC_SEQ_CST);
149 }
150
151 int64_t Xaddint64 (int64_t *ptr, int64_t delta)
152 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xaddint64")
153 __attribute__ ((no_split_stack));
154
155 int64_t
Xaddint64(int64_t * ptr,int64_t delta)156 Xaddint64 (int64_t *ptr, int64_t delta)
157 {
158 if (((uintptr_t) ptr & 7) != 0)
159 panicUnaligned ();
160 return __atomic_add_fetch (ptr, delta, __ATOMIC_SEQ_CST);
161 }
162
163 uint32_t Xchg (uint32_t *ptr, uint32_t new)
164 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xchg")
165 __attribute__ ((no_split_stack));
166
167 uint32_t
Xchg(uint32_t * ptr,uint32_t new)168 Xchg (uint32_t *ptr, uint32_t new)
169 {
170 return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
171 }
172
173 uint64_t Xchg64 (uint64_t *ptr, uint64_t new)
174 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xchg64")
175 __attribute__ ((no_split_stack));
176
177 uint64_t
Xchg64(uint64_t * ptr,uint64_t new)178 Xchg64 (uint64_t *ptr, uint64_t new)
179 {
180 if (((uintptr_t) ptr & 7) != 0)
181 panicUnaligned ();
182 return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
183 }
184
185 uintptr_t Xchguintptr (uintptr_t *ptr, uintptr_t new)
186 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Xchguintptr")
187 __attribute__ ((no_split_stack));
188
189 uintptr_t
Xchguintptr(uintptr_t * ptr,uintptr_t new)190 Xchguintptr (uintptr_t *ptr, uintptr_t new)
191 {
192 return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
193 }
194
195 void And8 (uint8_t *ptr, uint8_t val)
196 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.And8")
197 __attribute__ ((no_split_stack));
198
199 void
And8(uint8_t * ptr,uint8_t val)200 And8 (uint8_t *ptr, uint8_t val)
201 {
202 __atomic_and_fetch (ptr, val, __ATOMIC_SEQ_CST);
203 }
204
205 void Or8 (uint8_t *ptr, uint8_t val)
206 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Or8")
207 __attribute__ ((no_split_stack));
208
209 void
Or8(uint8_t * ptr,uint8_t val)210 Or8 (uint8_t *ptr, uint8_t val)
211 {
212 __atomic_or_fetch (ptr, val, __ATOMIC_SEQ_CST);
213 }
214
215 void And (uint32_t *ptr, uint32_t val)
216 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.And")
217 __attribute__ ((no_split_stack));
218
219 void
And(uint32_t * ptr,uint32_t val)220 And (uint32_t *ptr, uint32_t val)
221 {
222 __atomic_and_fetch (ptr, val, __ATOMIC_SEQ_CST);
223 }
224
225 void Or (uint32_t *ptr, uint32_t val)
226 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Or")
227 __attribute__ ((no_split_stack));
228
229 void
Or(uint32_t * ptr,uint32_t val)230 Or (uint32_t *ptr, uint32_t val)
231 {
232 __atomic_or_fetch (ptr, val, __ATOMIC_SEQ_CST);
233 }
234
235 _Bool Cas (uint32_t *ptr, uint32_t old, uint32_t new)
236 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Cas")
237 __attribute__ ((no_split_stack));
238
239 _Bool
Cas(uint32_t * ptr,uint32_t old,uint32_t new)240 Cas (uint32_t *ptr, uint32_t old, uint32_t new)
241 {
242 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
243 }
244
245 _Bool Cas64 (uint64_t *ptr, uint64_t old, uint64_t new)
246 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Cas64")
247 __attribute__ ((no_split_stack));
248
249 _Bool
Cas64(uint64_t * ptr,uint64_t old,uint64_t new)250 Cas64 (uint64_t *ptr, uint64_t old, uint64_t new)
251 {
252 if (((uintptr_t) ptr & 7) != 0)
253 panicUnaligned ();
254 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
255 }
256
257 _Bool CasRel (uint32_t *ptr, uint32_t old, uint32_t new)
258 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.CasRel")
259 __attribute__ ((no_split_stack));
260
261 _Bool
CasRel(uint32_t * ptr,uint32_t old,uint32_t new)262 CasRel (uint32_t *ptr, uint32_t old, uint32_t new)
263 {
264 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_RELEASE, __ATOMIC_RELAXED);
265 }
266
267 _Bool Casp1 (void **ptr, void *old, void *new)
268 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Casp1")
269 __attribute__ ((no_split_stack));
270
271 _Bool
Casp1(void ** ptr,void * old,void * new)272 Casp1 (void **ptr, void *old, void *new)
273 {
274 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
275 }
276
277 _Bool Casuintptr (uintptr_t *ptr, uintptr_t old, uintptr_t new)
278 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Casuintptr")
279 __attribute__ ((no_split_stack));
280
281 _Bool
Casuintptr(uintptr_t * ptr,uintptr_t old,uintptr_t new)282 Casuintptr (uintptr_t *ptr, uintptr_t old, uintptr_t new)
283 {
284 return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
285 }
286
287 void Store (uint32_t *ptr, uint32_t val)
288 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Store")
289 __attribute__ ((no_split_stack));
290
291 void
Store(uint32_t * ptr,uint32_t val)292 Store (uint32_t *ptr, uint32_t val)
293 {
294 __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
295 }
296
297 void Store8 (uint8_t *ptr, uint8_t val)
298 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Store8")
299 __attribute__ ((no_split_stack));
300
301 void
Store8(uint8_t * ptr,uint8_t val)302 Store8 (uint8_t *ptr, uint8_t val)
303 {
304 __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
305 }
306
307 void Store64 (uint64_t *ptr, uint64_t val)
308 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Store64")
309 __attribute__ ((no_split_stack));
310
311 void
Store64(uint64_t * ptr,uint64_t val)312 Store64 (uint64_t *ptr, uint64_t val)
313 {
314 if (((uintptr_t) ptr & 7) != 0)
315 panicUnaligned ();
316 __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
317 }
318
319 void StoreRel (uint32_t *ptr, uint32_t val)
320 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.StoreRel")
321 __attribute__ ((no_split_stack));
322
323 void
StoreRel(uint32_t * ptr,uint32_t val)324 StoreRel (uint32_t *ptr, uint32_t val)
325 {
326 __atomic_store_n (ptr, val, __ATOMIC_RELEASE);
327 }
328
329 void StoreRel64 (uint64_t *ptr, uint64_t val)
330 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.StoreRel64")
331 __attribute__ ((no_split_stack));
332
333 void
StoreRel64(uint64_t * ptr,uint64_t val)334 StoreRel64 (uint64_t *ptr, uint64_t val)
335 {
336 if (((uintptr_t) ptr & 7) != 0)
337 panicUnaligned ();
338 __atomic_store_n (ptr, val, __ATOMIC_RELEASE);
339 }
340
341 void StoreReluintptr (uintptr_t *ptr, uintptr_t val)
342 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.StoreReluintptr")
343 __attribute__ ((no_split_stack));
344
345 void
StoreReluintptr(uintptr_t * ptr,uintptr_t val)346 StoreReluintptr (uintptr_t *ptr, uintptr_t val)
347 {
348 __atomic_store_n (ptr, val, __ATOMIC_RELEASE);
349 }
350
351 void Storeuintptr (uintptr_t *ptr, uintptr_t val)
352 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.Storeuintptr")
353 __attribute__ ((no_split_stack));
354
355 void
Storeuintptr(uintptr_t * ptr,uintptr_t val)356 Storeuintptr (uintptr_t *ptr, uintptr_t val)
357 {
358 __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
359 }
360
361 void StorepNoWB (void *ptr, void *val)
362 __asm__ (GOSYM_PREFIX "runtime_1internal_1atomic.StorepNoWB")
363 __attribute__ ((no_split_stack));
364
365 void
StorepNoWB(void * ptr,void * val)366 StorepNoWB (void *ptr, void *val)
367 {
368 __atomic_store_n ((void**) ptr, val, __ATOMIC_SEQ_CST);
369 }
370