1set	prototyped
2
3cat{
4	#include <ast_common.h>
5	#define asointegralof(x)	(((char*)(x))-((char*)0))
6}end
7
8if	aso note{ gcc 4.1+ 64 bit memory atomic operations model }end link{
9		#include "FEATURE/common"
10		int main()
11		{
12			uint64_t i = 0;
13			return __sync_fetch_and_add(&i,7);
14		}
15	}end && {
16		#define _ASO_INTRINSIC		1
17
18		#define asocas8(p,o,n)		__sync_val_compare_and_swap(p,o,n)
19		#define asoadd8(p,n)		__sync_fetch_and_add(p,n)
20		#define asosub8(p,n)		__sync_fetch_and_sub(p,n)
21		#define asoinc8(p)		__sync_fetch_and_add(p,1)
22		#define asodec8(p)		__sync_fetch_and_sub(p,1)
23		#define asocas16(p,o,n)		__sync_val_compare_and_swap(p,o,n)
24		#define asoadd16(p,n)		__sync_fetch_and_add(p,n)
25		#define asosub16(p,n)		__sync_fetch_and_sub(p,n)
26		#define asoinc16(p)		__sync_fetch_and_add(p,1)
27		#define asodec16(p)		__sync_fetch_and_sub(p,1)
28		#define asocas32(p,o,n)		__sync_val_compare_and_swap(p,o,n)
29		#define asoadd32(p,n)		__sync_fetch_and_add(p,n)
30		#define asosub32(p,n)		__sync_fetch_and_sub(p,n)
31		#define asoinc32(p)		__sync_fetch_and_add(p,1)
32		#define asodec32(p)		__sync_fetch_and_sub(p,1)
33		#define asocas64(p,o,n)		__sync_val_compare_and_swap(p,o,n)
34		#define asoadd64(p,n)		__sync_fetch_and_add(p,n)
35		#define asosub64(p,n)		__sync_fetch_and_sub(p,n)
36		#define asoinc64(p)		__sync_fetch_and_add(p,1)
37		#define asodec64(p)		__sync_fetch_and_sub(p,1)
38		#if _ast_sizeof_pointer == 8
39		#define asocasptr(p,o,n)	((void*)__sync_val_compare_and_swap(p,asointegralof(o),asointegralof(n)))
40		#else
41		#define asocasptr(p,o,n)	((void*)__sync_val_compare_and_swap(p,asointegralof(o),asointegralof(n)))
42		#endif
43	}
44elif	aso note{ gcc 4.1+ 32 bit memory atomic operations model }end link{
45		#include "FEATURE/common"
46		int main()
47		{
48			uint32_t i = 0;
49			return __sync_fetch_and_add(&i,7);
50		}
51	}end && {
52		#define _ASO_INTRINSIC		1
53
54		#define asocas8(p,o,n)		__sync_val_compare_and_swap(p,o,n)
55		#define asoadd8(p,n)		__sync_fetch_and_add(p,n)
56		#define asosub8(p,n)		__sync_fetch_and_sub(p,n)
57		#define asoinc8(p)		__sync_fetch_and_add(p,1)
58		#define asodec8(p)		__sync_fetch_and_sub(p,1)
59		#define asocas16(p,o,n)		__sync_val_compare_and_swap(p,o,n)
60		#define asoadd16(p,n)		__sync_fetch_and_add(p,n)
61		#define asosub16(p,n)		__sync_fetch_and_sub(p,n)
62		#define asoinc16(p)		__sync_fetch_and_add(p,1)
63		#define asodec16(p)		__sync_fetch_and_sub(p,1)
64		#define asocas32(p,o,n)		__sync_val_compare_and_swap(p,o,n)
65		#define asoadd32(p,n)		__sync_fetch_and_add(p,n)
66		#define asosub32(p,n)		__sync_fetch_and_sub(p,n)
67		#define asoinc32(p)		__sync_fetch_and_add(p,1)
68		#define asodec32(p)		__sync_fetch_and_sub(p,1)
69		#define asocasptr(p,o,n)	((void*)__sync_val_compare_and_swap(p,asointegralof(o),asointegralof(n)))
70	}
71elif	aso note{ <atomic.h> atomic_cas_64 }end link{
72		#include "FEATURE/common"
73		#include <atomic.h>
74		int main()
75		{
76			uint64_t i = 0;
77			uint32_t j = 1;
78			return atomic_cas_64(&i, 0, 1) != 0 || (atomic_add_32_nv(&j, 1) - 1) != 1;
79		}
80	}end && {
81		#include <atomic.h>
82
83		#define _ASO_INTRINSIC		1
84
85		#define asocas8(p,o,n)		atomic_cas_8(p,o,n)
86		#define asoadd8(p,n)		(atomic_add_8_nv(p,n)-(n))
87		#define asosub8(p,n)		(atomic_add_8_nv(p,(-(n))+(n))
88		#define asoinc8(p)		(atomic_add_8_nv(p,1)-1)
89		#define asodec8(p)		(atomic_add_8_nv(p,-1)+1)
90		#define asocas16(p,o,n)		atomic_cas_16(p,o,n)
91		#define asoadd16(p,n)		(atomic_add_16_nv(p,n)-(n))
92		#define asosub16(p,n)		(atomic_add_16_nv(p,-(n))+(n))
93		#define asoinc16(p)		(atomic_add_16_nv(p,1)-1)
94		#define asodec16(p)		(atomic_add_16_nv(p,-1)+1)
95		#define asocas32(p,o,n)		atomic_cas_32(p,o,n)
96		#define asoadd32(p,n)		(atomic_add_32_nv(p,n)-(n))
97		#define asosub32(p,n)		(atomic_add_32_nv(p,-(n))+(n))
98		#define asoinc32(p)		(atomic_add_32_nv(p,1)-1)
99		#define asodec32(p)		(atomic_add_32_nv(p,-1)+1)
100		#define asocas64(p,o,n)		atomic_cas_64(p,o,n)
101		#define asoadd64(p,n)		(atomic_add_64_nv(p,n)-(n))
102		#define asosub64(p,n)		(atomic_add_64_nv(p,-(n))+(n))
103		#define asoinc64(p)		(atomic_add_64_nv(p,1)-1)
104		#define asodec64(p)		(atomic_add_64_nv(p,-1)+1)
105		#if _ast_sizeof_pointer == 8
106		#define asocasptr(p,o,n)	((void*)atomic_cas_64((uint64_t*)(p),asointegralof(o),asointegralof(n)))
107		#else
108		#define asocasptr(p,o,n)	((void*)atomic_cas_32((uint32_t*)(p),asointegralof(o),asointegralof(n)))
109		#endif
110	}
111elif	aso note{ <atomic.h> atomic_cas_32 }end link{
112		#include "FEATURE/common"
113		#include <atomic.h>
114		int main()
115		{
116			uint32_t i = 0;
117			return atomic_cas_32(&i, 0, 1) != 0 || (atomic_add_32_nv(&i, 1) - 1) != 1;
118		}
119	}end && {
120		#include <atomic.h>
121
122		#define _ASO_INTRINSIC		1
123
124		#define asocas8(p,o,n)		atomic_cas_8(p,o,n)
125		#define asoadd8(p,n)		(atomic_add_8_nv(p,n)-(n))
126		#define asosub8(p,n)		(atomic_add_8_nv(p,(-(n))+(n))
127		#define asoinc8(p)		(atomic_add_8_nv(p,1)-1)
128		#define asodec8(p)		(atomic_add_8_nv(p,-1)+1)
129		#define asocas16(p,o,n)		atomic_cas_16(p,o,n)
130		#define asoadd16(p,n)		(atomic_add_16_nv(p,n)-(n))
131		#define asosub16(p,n)		(atomic_add_16_nv(p,-(n))+(n))
132		#define asoinc16(p)		(atomic_add_16_nv(p,1)-1)
133		#define asodec16(p)		(atomic_add_16_nv(p,-1)+1)
134		#define asocas32(p,o,n)		atomic_cas_32(p,o,n)
135		#define asoadd32(p,n)		(atomic_add_32_nv(p,n)-(n))
136		#define asosub32(p,n)		(atomic_add_32_nv(p,-(n))+(n))
137		#define asoinc32(p)		(atomic_add_32_nv(p,1)-1)
138		#define asodec32(p)		(atomic_add_32_nv(p,-1)+1)
139		#define asocasptr(p,o,n)	((void*)atomic_cas_32((uint32_t*)(p),asointegralof(o),asointegralof(n)))
140	}
141elif	aso -latomic note{ <atomic.h> atomic_cas_64 with -latomic }end link{
142		#include "FEATURE/common"
143		#include <atomic.h>
144		int main()
145		{
146			uint64_t i = 0;
147			uint32_t j = 1;
148			return atomic_cas_64(&i, 0, 1) != 0 || (atomic_add_32_nv(&j, 1) - 1) != 1;
149		}
150	}end && {
151		#include <atomic.h>
152
153		#define _ASO_INTRINSIC		1
154		#define _REQ_atomic		1
155
156		#define asocas8(p,o,n)		atomic_cas_8(p,o,n)
157		#define asoadd8(p,n)		(atomic_add_8_nv(p,n)-(n))
158		#define asosub8(p,n)		(atomic_add_8_nv(p,(-(n))+(n))
159		#define asoinc8(p)		(atomic_add_8_nv(p,1)-1)
160		#define asodec8(p)		(atomic_add_8_nv(p,-1)+1)
161		#define asocas16(p,o,n)		atomic_cas_16(p,o,n)
162		#define asoadd16(p,n)		(atomic_add_16_nv(p,n)-(n))
163		#define asosub16(p,n)		(atomic_add_16_nv(p,-(n))+(n))
164		#define asoinc16(p)		(atomic_add_16_nv(p,1)-1)
165		#define asodec16(p)		(atomic_add_16_nv(p,-1)+1)
166		#define asocas32(p,o,n)		atomic_cas_32(p,o,n)
167		#define asoadd32(p,n)		(atomic_add_32_nv(p,n)-(n))
168		#define asosub32(p,n)		(atomic_add_32_nv(p,-(n))+(n))
169		#define asoinc32(p)		(atomic_add_32_nv(p,1)-1)
170		#define asodec32(p)		(atomic_add_32_nv(p,-1)+1)
171		#define asocas64(p,o,n)		atomic_cas_64(p,o,n)
172		#define asoadd64(p,n)		(atomic_add_64_nv(p,n)-(n))
173		#define asosub64(p,n)		(atomic_add_64_nv(p,-(n))+(n))
174		#define asoinc64(p)		(atomic_add_64_nv(p,1)-1)
175		#define asodec64(p)		(atomic_add_64_nv(p,-1)+1)
176		#if _ast_sizeof_pointer == 8
177		#define asocasptr(p,o,n)	((void*)atomic_cas_64((uint64_t*)(p),asointegralof(o),asointegralof(n)))
178		#else
179		#define asocasptr(p,o,n)	((void*)atomic_cas_32((uint32_t*)(p),asointegralof(o),asointegralof(n)))
180		#endif
181	}
182elif	aso -latomic note{ <atomic.h> atomic_cas_32 with -latomic }end link{
183		#include "FEATURE/common"
184		#include <atomic.h>
185		int main()
186		{
187			uint32_t i = 0;
188			return atomic_cas_32(&i, 0, 1) != 0 || (atomic_add_32_nv(&i, 1) - 1) != 1;
189		}
190	}end && {
191		#include <atomic.h>
192
193		#define _ASO_INTRINSIC		1
194		#define _REQ_atomic		1
195
196		#define asocas8(p,o,n)		atomic_cas_8(p,o,n)
197		#define asoadd8(p,n)		(atomic_add_8_nv(p,n)-(n))
198		#define asosub8(p,n)		(atomic_add_8_nv(p,(-(n))+(n))
199		#define asoinc8(p)		(atomic_add_8_nv(p,1)-1)
200		#define asodec8(p)		(atomic_add_8_nv(p,-1)+1)
201		#define asocas16(p,o,n)		atomic_cas_16(p,o,n)
202		#define asoadd16(p,n)		(atomic_add_16_nv(p,n)-(n))
203		#define asosub16(p,n)		(atomic_add_16_nv(p,-(n))+(n))
204		#define asoinc16(p)		(atomic_add_16_nv(p,1)-1)
205		#define asodec16(p)		(atomic_add_16_nv(p,-1)+1)
206		#define asocas32(p,o,n)		atomic_cas_32(p,o,n)
207		#define asoadd32(p,n)		(atomic_add_32_nv(p,n)-(n))
208		#define asosub32(p,n)		(atomic_add_32_nv(p,-(n))+(n))
209		#define asoinc32(p)		(atomic_add_32_nv(p,1)-1)
210		#define asodec32(p)		(atomic_add_32_nv(p,-1)+1)
211		#define asocasptr(p,o,n)	((void*)atomic_cas_32((uint32_t*)(p),asointegralof(o),asointegralof(n)))
212	}
213elif	aso note{ <atomic.h> cas64 }end link{
214		#include "FEATURE/common"
215		#include <atomic.h>
216		int main()
217		{
218			uint64_t i = 0;
219			uint32_t j = 1;
220			return cas64(&i, 0, 1) != 0 || atomic_add_32(&j, 1) != 1;
221		}
222	}end && {
223		#include <atomic.h>
224
225		#define _ASO_INTRINSIC		1
226
227		#define asocas8(p,o,n)		cas8(p,o,n)
228		#define asoadd8(p,n)		atomic_add_8(p,n)
229		#define asosub8(p,n)		atomic_sub_8(p,n)
230		#define asoinc8(p)		atomic_add_8(p,1)
231		#define asodec8(p)		atomic_add_8(p,-1)
232		#define asocas16(p,o,n)		cas16(p,o,n)
233		#define asoadd16(p,n)		atomic_add_16(p,n)
234		#define asosub16(p,n)		atomic_sub_16(p,n)
235		#define asoinc16(p)		atomic_add_16(p,1)
236		#define asodec16(p)		atomic_add_16(p,-1)
237		#define asocas32(p,o,n)		cas32(p,o,n)
238		#define asoadd32(p,n)		atomic_add_32(p,n)
239		#define asosub32(p,n)		atomic_sub_32(p,n)
240		#define asoinc32(p)		atomic_add_32(p,1)
241		#define asodec32(p)		atomic_add_32(p,-1)
242		#define asocas64(p,o,n)		cas64(p,o,n)
243		#define asoadd64(p,n)		atomic_add_64(p,n)
244		#define asosub64(p,n)		atomic_sub_64(p,n)
245		#define asoinc64(p)		atomic_add_64(p,1)
246		#define asodec64(p)		atomic_add_64(p,-1)
247		#if _ast_sizeof_pointer == 8
248		#define asocasptr(p,o,n)	((void*)cas64((uint64_t*)(p),asointegralof(o),asointegralof(n)))
249		#else
250		#define asocasptr(p,o,n)	((void*)cas32((uint32_t*)(p),asointegralof(o),asointegralof(n)))
251		#endif
252	}
253elif	aso note{ <atomic.h> just cas64 }end link{
254		#include "FEATURE/common"
255		#include <atomic.h>
256		int main()
257		{
258			uint64_t i = 0;
259			uint32_t j = 1;
260			uint16_t k = 1;
261			uint8_t l = 1;
262			return cas64(&i, 0, 1) != 0 || cas32(&j, 0, 1) != 0 || cas16(&k, 0, 1) != 0 || cas8(&l, 0, 1) != 0;
263		}
264	}end && {
265		#include <atomic.h>
266
267		#define _ASO_INTRINSIC		1
268
269		#define asocas8(p,o,n)		cas8(p,o,n)
270		#define asocas16(p,o,n)		cas16(p,o,n)
271		#define asocas32(p,o,n)		cas32(p,o,n)
272		#define asocas64(p,o,n)		cas64(p,o,n)
273		#if _ast_sizeof_pointer == 8
274		#define asocasptr(p,o,n)	((void*)cas64((uint64_t*)(p),asointegralof(o),asointegralof(n)))
275		#else
276		#define asocasptr(p,o,n)	((void*)cas32((uint32_t*)(p),asointegralof(o),asointegralof(n)))
277		#endif
278	}
279elif	aso note{ <atomic.h> cas32 }end link{
280		#include "FEATURE/common"
281		#include <atomic.h>
282		int main()
283		{
284			uint32_t i = 0;
285			return cas32(&i, 0, 1) != 0 || (atomic_add_32(&i, 1)) != 1;
286		}
287	}end && {
288		#include <atomic.h>
289
290		#define _ASO_INTRINSIC		1
291
292		#define asocas8(p,o,n)		cas8(p,o,n)
293		#define asoadd8(p,n)		atomic_add_8(p,n)
294		#define asosub8(p,n)		atomic_sub_8(p,n)
295		#define asoinc8(p)		atomic_add_8(p,1)
296		#define asodec8(p)		atomic_add_8(p,-1)
297		#define asocas16(p,o,n)		cas16(p,o,n)
298		#define asoadd16(p,n)		atomic_add_16(p,n)
299		#define asosub16(p,n)		atomic_sub_16(p,n)
300		#define asoinc16(p)		atomic_add_16(p,1)
301		#define asodec16(p)		atomic_add_16(p,-1)
302		#define asocas32(p,o,n)		cas32(p,o,n)
303		#define asoadd32(p,n)		atomic_add_32(p,n)
304		#define asosub32(p,n)		atomic_sub_32(p,n)
305		#define asoinc32(p)		atomic_add_32(p,1)
306		#define asodec32(p)		atomic_add_32(p,-1)
307		#define asocasptr(p,o,n)	((void*)cas32((uint32_t*)(p),asointegralof(o),asointegralof(n)))
308	}
309elif	aso note{ <atomic.h> just cas32 }end link{
310		#include "FEATURE/common"
311		#include <atomic.h>
312		int main()
313		{
314			uint32_t j = 1;
315			uint16_t k = 1;
316			uint8_t l = 1;
317			return cas32(&j, 0, 1) != 0 || cas16(&k, 0, 1) != 0 || cas8(&l, 0, 1) != 0;
318		}
319	}end && {
320		#include <atomic.h>
321
322		#define _ASO_INTRINSIC		1
323
324		#define asocas8(p,o,n)		cas8(p,o,n)
325		#define asocas16(p,o,n)		cas16(p,o,n)
326		#define asocas32(p,o,n)		cas32(p,o,n)
327		#define asocas64(p,o,n)		cas64(p,o,n)
328		#define asocasptr(p,o,n)	((void*)cas32((uint32_t*)(p),asointegralof(o),asointegralof(n)))
329	}
330elif	aso note{ winix Interlocked }end link{
331		#include <windows.h>
332		int main()
333		{
334			LONG		i = 0;
335			return InterlockedCompareExchange(&i, 1, 0) != 0 ||
336			       InterlockedExchangeAdd(&i,1) != 1 ||
337			       InterlockedExchangeAdd(&i,-1) != 2;
338		}
339	}end && {
340		#include <ast_windows.h>
341
342		#define _ASO_INTRINSIC		1
343		#define _ASO_Interlocked	1
344
345		#define asocas32(p,o,n)		InterlockedCompareExchange((LONG volatile*)(p),n,o)
346		#define asoadd32(p,n)		InterlockedExchangeAdd((LONG volatile*)(p),n)
347		#define asosub32(p,n)		InterlockedExchangeAdd((LONG volatile*)(p),-(n))
348		#define asoinc32(p)		InterlockedExchangeAdd((LONG volatile*)(p),1)
349		#define asodec32(p)		InterlockedExchangeAdd((LONG volatile*)(p),-1)
350
351		#if _X64
352
353		#define asocas64(p,o,n)		InterlockedCompareExchange64((LONGLONG volatile*)(p),n,o)
354		#define asoadd64(p,n)		InterlockedExchangeAdd64((LONGLONG volatile*)(p),n)
355		#define asosub64(p,n)		InterlockedExchangeAdd64((LONGLONG volatile*)(p),-(n))
356		#define asoinc64(p)		InterlockedExchangeAdd64((LONGLONG volatile*)(p),1)
357		#define asodec64(p)		InterlockedExchangeAdd64((LONGLONG volatile*)(p),-1)
358		#define asocasptr(p,o,n)	((void*)InterlockedCompareExchange64((LONGLONG volatile*)(p),asointegralof(n),asointegralof(o)))
359
360		#else
361
362		typedef LONGLONG (*_aso_InterlockedCompareExchange64_f)(LONGLONG volatile*, LONGLONG, LONGLONG);
363		typedef LONGLONG (*_aso_InterlockedExchangeAdd64_f)(LONGLONG volatile*, LONGLONG);
364
365		#if _BLD_aso && defined(__EXPORT__)
366		#define extern		extern __EXPORT__
367		#endif
368		#if !_BLD_aso && defined(__IMPORT__)
369		#define extern		extern __IMPORT__
370		#endif
371
372		extern _aso_InterlockedCompareExchange64_f	_aso_InterlockedCompareExchange64;
373		extern _aso_InterlockedExchangeAdd64_f		_aso_InterlockedExchangeAdd64;
374
375		#undef extern
376
377		#define asocas64(p,o,n)		_aso_InterlockedCompareExchange64((LONGLONG volatile*)(p),n,o)
378		#define asoadd64(p,n)		_aso_InterlockedExchangeAdd64((LONGLONG volatile*)(p),n)
379		#define asosub64(p,n)		_aso_InterlockedExchangeAdd64((LONGLONG volatile*)(p),-(n))
380		#define asoinc64(p)		_aso_InterlockedExchangeAdd64((LONGLONG volatile*)(p),1)
381		#define asodec64(p)		_aso_InterlockedExchangeAdd64((LONGLONG volatile*)(p),-1)
382
383		#if _ast_sizeof_pointer == 8
384		#define asocasptr(p,o,n)	((void*)_aso_InterlockedCompareExchange64((LONGLONG volatile*)(p),asointegralof(n),asointegralof(o)))
385		#else
386		#define asocasptr(p,o,n)	((void*)InterlockedCompareExchange((LONG volatile*)(p),asointegralof(n),asointegralof(o)))
387		#endif
388
389		#endif
390	}
391elif	aso note{ aix fetch and add }end link{
392		#include <sys/atomic_op.h>
393		int main()
394		{
395			int i = 0;
396			return fetch_and_add((atomic_p)&i,1);
397		}
398	}end && {
399		#include <sys/atomic_op.h>
400
401		#define _ASO_INTRINSIC		1
402
403		#define asocas32(p,o,n)		(compare_and_swap((atomic_p)(p),(int*)&(o),(int)(n)) ? (o) : *(p))
404		#define asoadd32(p,n)		fetch_and_add((atomic_p)(p),n)
405		#define asosub32(p,n)		fetch_and_add((atomic_p)(p),-(n))
406		#define asoinc32(p)		fetch_and_add((atomic_p)(p),1)
407		#define asodec32(p)		fetch_and_add((atomic_p)(p),-1)
408		#if _ast_sizeof_long == 8
409		#define asocas64(p,o,n)		(compare_and_swap((atomic_p)(p),(long*)&(o),(long)(n)) ? (o) : *(p))
410		#endif
411		#if _ast_sizeof_pointer == 8
412		#define asocasptr(p,o,n)	(compare_and_swaplp((atomic_l)(p),(long*)&o,(long)n) ? (o) : *(void**)(p))
413		#else
414		#define asocasptr(p,o,n)	(compare_and_swap((atomic_p)(p),(int*)&o,(int)(n)) ? (o) : *(void**)(p))
415		#endif
416	}
417elif	aso note{ mips compare and swap }end link{
418		int main()
419		{
420			int i = 1;
421			return __compare_and_swap(&i, 0, 1) != 1;
422		}
423	}end && {
424		#define _ASO_INTRINSIC		1
425
426		#define asocas32(p,o,n)		(__compare_and_swap((p),(o),(n)) ? (o) : *(p))
427		#define asocasptr(p,o,n)	(__compare_and_swap((long*)(p),asointegralof(o),asointegralof(n)) ? (o) : *(void**)(p))
428	}
429elif	aso note{ i386|i386-64 asm compare and swap }end link{
430		#include "FEATURE/common"
431
432		static uint32_t
433		cas32(uint32_t volatile* p, uint32_t o, uint32_t n)
434		{
435			uint32_t	r;
436
437			__asm__ __volatile__ (
438				"lock ; cmpxchg %3,%4"
439				: "=a"(r), "=m"(*p)
440				: "0"(o), "q"(n), "m"(*p)
441				: "memory", "cc"
442				);
443			return r;
444		}
445
446		#if _ast_sizeof_pointer == 8
447
448		static uint64_t
449		cas64(uint64_t volatile* p, uint64_t o, uint64_t n)
450		{
451			uint64_t	r;
452
453			__asm__ __volatile__ (
454				"lock ; cmpxchg %3,%4"
455				: "=a"(r), "=m"(*p)
456				: "0"(o), "q"(n), "m"(*p)
457				: "memory", "cc"
458				);
459			return r;
460		}
461
462		#else
463
464		#define cas64(p,o,n)	(*(p))
465
466		#endif
467
468		int main()
469		{
470			uint32_t	i = 0;
471			uint64_t	j = 0;
472			return cas32(&i, 0, 1) || cas64(&j, 0, 1);
473		}
474	}end && {
475		#define _ASO_INTRINSIC		2
476		#define _ASO_i386		1
477
478		#define asocas32		_aso_cas32
479		#if _ast_sizeof_pointer == 8
480		#define asocas64		_aso_cas64
481		#define asocasptr(p,o,n)	((void*)asocas64((uint64_t*)(p),asointegralof(o),asointegralof(n)))
482		#else
483		#define asocasptr(p,o,n)	((void*)asocas32((uint32_t*)(p),asointegralof(o),asointegralof(n)))
484		#endif
485	}
486elif	aso note{ ia64 asm compare and swap }end link{
487		#include "FEATURE/common"
488
489		static uint32_t
490		cas32(uint32_t volatile* p, uint32_t o, uint32_t n)
491		{
492			uint32_t	r;
493
494			__asm__ __volatile__ (
495				"zxt4 %3=%3 ;; mov ar.ccv=%3 ;; cmpxchg4.acq %0=%1,%2,ar.ccv"
496			        : "=r"(r), "+S"(*p)
497				: "r"(n), "r"(o) : "memory"
498				);
499			return r;
500		}
501
502		static uint64_t
503		cas64(uint64_t volatile* p, uint64_t o, uint64_t n)
504		{
505			uint64_t	r;
506
507			__asm__ __volatile__ (
508				"mov ar.ccv=%3 ;; cmpxchg8.acq %0=%1,%2,ar.ccv"
509			        : "=r"(r), "+S"(*p)
510				: "r"(n), "r"(o) : "memory"
511				);
512			return r;
513		}
514
515		int main()
516		{
517			uint32_t	i = 0;
518			uint64_t	j = 0;
519			return cas32(&i, 0, 1) || cas64(&j, 0, 1);
520		}
521	}end && {
522		#define _ASO_INTRINSIC		2
523		#define _ASO_ia64		1
524
525		#define asocas32		_aso_cas32
526		#define asocas64		_aso_cas64
527		#define asocasptr(p,o,n)	((void*)asocas64((uint64_t*)(p),asointegralof(o),asointegralof(n)))
528	}
529elif	aso note{ ppc asm compare and swap }end link{
530		#include "FEATURE/common"
531
532		static uint32_t
533		cas32(uint32_t volatile* p, uint32_t o, uint32_t n)
534		{
535			int	r;
536
537			__asm__ __volatile__ (
538				"0:	lwarx %0,0,%1 ;"
539				"	xor. %0,%3,%0;"
540				"	bne 1f;"
541				"	stwcx. %2,0,%1;"
542				"	bne- 0b;"
543				"1:"
544				: "=&r"(r)
545				: "r"(p), "r"(n), "r"(o)
546				: "cr0", "memory"
547				);
548			__asm__ __volatile__ ("isync" : : : "memory");
549			return r ? *p : o;
550		}
551
552		static uint64_t
553		cas64(uint64_t volatile* p, uint64_t o, uint64_t n)
554		{
555			long	r;
556
557			__asm__ __volatile__ (
558				"0:	ldarx %0,0,%1 ;"
559				"	xor. %0,%3,%0;"
560				"	bne 1f;"
561				"	stdcx. %2,0,%1;"
562				"	bne- 0b;"
563				"1:"
564				: "=&r"(r)
565				: "r"(p), "r"(n), "r"(o)
566				: "cr0", "memory"
567				);
568			__asm__ __volatile__ ("isync" : : : "memory");
569			return r ? *p : o;
570		}
571
572		int main()
573		{
574			uint32_t	i = 0;
575			uint64_t	j = 0;
576			return cas32(&i, 0, 1) || cas64(&j, 0, 1);
577		}
578	}end && {
579		#define _ASO_INTRINSIC		2
580		#define _ASO_ppc		1
581
582		#define asocas32		_aso_cas32
583		#if _ast_sizeof_pointer == 8
584		#define asocas64		_aso_cas64
585		#define asocasptr(p,o,n)	((void*)asocas64((uint64_t*)(p),asointegralof(o),asointegralof(n)))
586		#else
587		#define asocasptr(p,o,n)	((void*)asocas32((uint32_t*)(p),asointegralof(o),asointegralof(n)))
588		#endif
589	}
590else	aso note{ no intrinsic aso operations -- time to upgrade }end cat{
591		#define _ASO_INTRINSIC		0
592	}end
593endif
594
595cat{
596	#if _BLD_aso && defined(__EXPORT__)
597	#define extern	extern __EXPORT__
598	#endif
599	#if !_BLD_aso && defined(__IMPORT__)
600	#define extern	extern __IMPORT__
601	#endif
602
603	#ifndef asocas8
604	extern uint8_t			asocas8(uint8_t volatile*, int, int);
605	#endif
606	#ifndef asoget8
607	extern uint8_t			asoget8(uint8_t volatile*);
608	#endif
609	#ifndef asoadd8
610	extern uint8_t			asoadd8(uint8_t volatile*, int);
611	#endif
612	#ifndef asosub8
613	extern uint8_t			asosub8(uint8_t volatile*, int);
614	#endif
615	#ifndef asoinc8
616	extern uint8_t			asoinc8(uint8_t volatile*);
617	#endif
618	#ifndef asodec8
619	extern uint8_t			asodec8(uint8_t volatile*);
620	#endif
621	#ifndef asomin8
622	extern uint8_t			asomin8(uint8_t volatile*, int);
623	#endif
624	#ifndef asomax8
625	extern uint8_t			asomax8(uint8_t volatile*, int);
626	#endif
627
628	#ifndef asocas16
629	extern uint16_t			asocas16(uint16_t volatile*, int, int);
630	#endif
631	#ifndef asoget16
632	extern uint16_t			asoget16(uint16_t volatile*);
633	#endif
634	#ifndef asoadd16
635	extern uint16_t			asoadd16(uint16_t volatile*, int);
636	#endif
637	#ifndef asosub16
638	extern uint16_t			asosub16(uint16_t volatile*, int);
639	#endif
640	#ifndef asoinc16
641	extern uint16_t			asoinc16(uint16_t volatile*);
642	#endif
643	#ifndef asodec16
644	extern uint16_t			asodec16(uint16_t volatile*);
645	#endif
646	#ifndef asomin16
647	extern uint16_t			asomin16(uint16_t volatile*, int);
648	#endif
649	#ifndef asomax16
650	extern uint16_t			asomax16(uint16_t volatile*, int);
651	#endif
652
653	#if !defined(asocas32) || _ASO_INTRINSIC > 1
654	extern uint32_t			asocas32(uint32_t volatile*, uint32_t, uint32_t);
655	#endif
656	#ifndef asoget32
657	extern uint32_t			asoget32(uint32_t volatile*);
658	#endif
659	#ifndef asoadd32
660	extern uint32_t			asoadd32(uint32_t volatile*, uint32_t);
661	#endif
662	#ifndef asosub32
663	extern uint32_t			asosub32(uint32_t volatile*, uint32_t);
664	#endif
665	#ifndef asoinc32
666	extern uint32_t			asoinc32(uint32_t volatile*);
667	#endif
668	#ifndef asodec32
669	extern uint32_t			asodec32(uint32_t volatile*);
670	#endif
671	#ifndef asomin32
672	extern uint32_t			asomin32(uint32_t volatile*, uint32_t);
673	#endif
674	#ifndef asomax32
675	extern uint32_t			asomax32(uint32_t volatile*, uint32_t);
676	#endif
677
678	#ifdef _ast_int8_t
679
680	#if !defined(asocas64) || _ASO_INTRINSIC > 1
681	extern uint64_t			asocas64(uint64_t volatile*, uint64_t, uint64_t);
682	#endif
683	#ifndef asoget64
684	extern uint64_t			asoget64(uint64_t volatile*);
685	#endif
686	#ifndef asoadd64
687	extern uint64_t			asoadd64(uint64_t volatile*, uint64_t);
688	#endif
689	#ifndef asosub64
690	extern uint64_t			asosub64(uint64_t volatile*, uint64_t);
691	#endif
692	#ifndef asoinc64
693	extern uint64_t			asoinc64(uint64_t volatile*);
694	#endif
695	#ifndef asodec64
696	extern uint64_t			asodec64(uint64_t volatile*);
697	#endif
698	#ifndef asomin64
699	extern uint64_t			asomin64(uint64_t volatile*, uint64_t);
700	#endif
701	#ifndef asomax64
702	extern uint64_t			asomax64(uint64_t volatile*, uint64_t);
703	#endif
704
705	#endif
706
707	#ifndef asocasptr
708	extern void*			asocasptr(void volatile*, void*, void*);
709	#endif
710	#ifndef asogetptr
711	extern void*			asogetptr(void volatile*);
712	#endif
713
714	#undef	extern
715
716}end
717