1 /*
2  * kmp_os.h -- KPTS runtime header file.
3  */
4 
5 //===----------------------------------------------------------------------===//
6 //
7 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
8 // See https://llvm.org/LICENSE.txt for license information.
9 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #ifndef KMP_OS_H
14 #define KMP_OS_H
15 
16 #include "kmp_config.h"
17 #include <atomic>
18 #include <stdarg.h>
19 #include <stdlib.h>
20 #include <string.h>
21 
22 #define KMP_FTN_PLAIN 1
23 #define KMP_FTN_APPEND 2
24 #define KMP_FTN_UPPER 3
25 /*
26 #define KMP_FTN_PREPEND 4
27 #define KMP_FTN_UAPPEND 5
28 */
29 
30 #define KMP_PTR_SKIP (sizeof(void *))
31 
32 /* -------------------------- Compiler variations ------------------------ */
33 
34 #define KMP_OFF 0
35 #define KMP_ON 1
36 
37 #define KMP_MEM_CONS_VOLATILE 0
38 #define KMP_MEM_CONS_FENCE 1
39 
40 #ifndef KMP_MEM_CONS_MODEL
41 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
42 #endif
43 
44 #ifndef __has_cpp_attribute
45 #define __has_cpp_attribute(x) 0
46 #endif
47 
48 #ifndef __has_attribute
49 #define __has_attribute(x) 0
50 #endif
51 
52 /* ------------------------- Compiler recognition ---------------------- */
53 #define KMP_COMPILER_ICC 0
54 #define KMP_COMPILER_GCC 0
55 #define KMP_COMPILER_CLANG 0
56 #define KMP_COMPILER_MSVC 0
57 #define KMP_COMPILER_ICX 0
58 
59 #if __INTEL_CLANG_COMPILER
60 #undef KMP_COMPILER_ICX
61 #define KMP_COMPILER_ICX 1
62 #elif defined(__INTEL_COMPILER)
63 #undef KMP_COMPILER_ICC
64 #define KMP_COMPILER_ICC 1
65 #elif defined(__clang__)
66 #undef KMP_COMPILER_CLANG
67 #define KMP_COMPILER_CLANG 1
68 #elif defined(__GNUC__)
69 #undef KMP_COMPILER_GCC
70 #define KMP_COMPILER_GCC 1
71 #elif defined(_MSC_VER)
72 #undef KMP_COMPILER_MSVC
73 #define KMP_COMPILER_MSVC 1
74 #else
75 #error Unknown compiler
76 #endif
77 
78 #if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD || KMP_OS_AIX) &&        \
79     !KMP_OS_WASI
80 #define KMP_AFFINITY_SUPPORTED 1
81 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
82 #define KMP_GROUP_AFFINITY 1
83 #else
84 #define KMP_GROUP_AFFINITY 0
85 #endif
86 #else
87 #define KMP_AFFINITY_SUPPORTED 0
88 #define KMP_GROUP_AFFINITY 0
89 #endif
90 
91 #if (KMP_OS_LINUX || (KMP_OS_FREEBSD && __FreeBSD_version >= 1301000))
92 #define KMP_HAVE_SCHED_GETCPU 1
93 #else
94 #define KMP_HAVE_SCHED_GETCPU 0
95 #endif
96 
97 /* Check for quad-precision extension. */
98 #define KMP_HAVE_QUAD 0
99 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
100 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
101 /* _Quad is already defined for icc */
102 #undef KMP_HAVE_QUAD
103 #define KMP_HAVE_QUAD 1
104 #elif KMP_COMPILER_CLANG
105 /* Clang doesn't support a software-implemented
106    128-bit extended precision type yet */
107 typedef long double _Quad;
108 #elif KMP_COMPILER_GCC
109 /* GCC on NetBSD lacks __multc3/__divtc3 builtins needed for quad until
110    NetBSD 10.0 which ships with GCC 10.5 */
111 #if (!KMP_OS_NETBSD || __GNUC__ >= 10)
112 typedef __float128 _Quad;
113 #undef KMP_HAVE_QUAD
114 #define KMP_HAVE_QUAD 1
115 #endif
116 #elif KMP_COMPILER_MSVC
117 typedef long double _Quad;
118 #endif
119 #else
120 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
121 typedef long double _Quad;
122 #undef KMP_HAVE_QUAD
123 #define KMP_HAVE_QUAD 1
124 #endif
125 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
126 
127 #define KMP_USE_X87CONTROL 0
128 #if KMP_OS_WINDOWS
129 #define KMP_END_OF_LINE "\r\n"
130 typedef char kmp_int8;
131 typedef unsigned char kmp_uint8;
132 typedef short kmp_int16;
133 typedef unsigned short kmp_uint16;
134 typedef int kmp_int32;
135 typedef unsigned int kmp_uint32;
136 #define KMP_INT32_SPEC "d"
137 #define KMP_UINT32_SPEC "u"
138 #ifndef KMP_STRUCT64
139 typedef __int64 kmp_int64;
140 typedef unsigned __int64 kmp_uint64;
141 #define KMP_INT64_SPEC "I64d"
142 #define KMP_UINT64_SPEC "I64u"
143 #else
144 struct kmp_struct64 {
145   kmp_int32 a, b;
146 };
147 typedef struct kmp_struct64 kmp_int64;
148 typedef struct kmp_struct64 kmp_uint64;
149 /* Not sure what to use for KMP_[U]INT64_SPEC here */
150 #endif
151 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
152 #undef KMP_USE_X87CONTROL
153 #define KMP_USE_X87CONTROL 1
154 #endif
155 #if KMP_ARCH_X86_64 || KMP_ARCH_AARCH64
156 #define KMP_INTPTR 1
157 typedef __int64 kmp_intptr_t;
158 typedef unsigned __int64 kmp_uintptr_t;
159 #define KMP_INTPTR_SPEC "I64d"
160 #define KMP_UINTPTR_SPEC "I64u"
161 #endif
162 #endif /* KMP_OS_WINDOWS */
163 
164 #if KMP_OS_UNIX
165 #define KMP_END_OF_LINE "\n"
166 typedef char kmp_int8;
167 typedef unsigned char kmp_uint8;
168 typedef short kmp_int16;
169 typedef unsigned short kmp_uint16;
170 typedef int kmp_int32;
171 typedef unsigned int kmp_uint32;
172 typedef long long kmp_int64;
173 typedef unsigned long long kmp_uint64;
174 #define KMP_INT32_SPEC "d"
175 #define KMP_UINT32_SPEC "u"
176 #define KMP_INT64_SPEC "lld"
177 #define KMP_UINT64_SPEC "llu"
178 #endif /* KMP_OS_UNIX */
179 
180 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS || KMP_ARCH_WASM ||          \
181     KMP_ARCH_PPC
182 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
183 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 ||                 \
184     KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64 || KMP_ARCH_LOONGARCH64 ||             \
185     KMP_ARCH_VE || KMP_ARCH_S390X
186 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
187 #else
188 #error "Can't determine size_t printf format specifier."
189 #endif
190 
191 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_WASM || KMP_ARCH_PPC
192 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
193 #else
194 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
195 #endif
196 
197 typedef size_t kmp_size_t;
198 typedef float kmp_real32;
199 typedef double kmp_real64;
200 
201 #ifndef KMP_INTPTR
202 #define KMP_INTPTR 1
203 typedef long kmp_intptr_t;
204 typedef unsigned long kmp_uintptr_t;
205 #define KMP_INTPTR_SPEC "ld"
206 #define KMP_UINTPTR_SPEC "lu"
207 #endif
208 
209 #ifdef BUILD_I8
210 typedef kmp_int64 kmp_int;
211 typedef kmp_uint64 kmp_uint;
212 #else
213 typedef kmp_int32 kmp_int;
214 typedef kmp_uint32 kmp_uint;
215 #endif /* BUILD_I8 */
216 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
217 #define KMP_INT_MIN ((kmp_int32)0x80000000)
218 
219 // stdarg handling
220 #if (KMP_ARCH_ARM || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64 || KMP_ARCH_WASM) &&  \
221     (KMP_OS_FREEBSD || KMP_OS_LINUX || KMP_OS_WASI)
222 typedef va_list *kmp_va_list;
223 #define kmp_va_deref(ap) (*(ap))
224 #define kmp_va_addr_of(ap) (&(ap))
225 #else
226 typedef va_list kmp_va_list;
227 #define kmp_va_deref(ap) (ap)
228 #define kmp_va_addr_of(ap) (ap)
229 #endif
230 
231 #ifdef __cplusplus
232 // macros to cast out qualifiers and to re-interpret types
233 #define CCAST(type, var) const_cast<type>(var)
234 #define RCAST(type, var) reinterpret_cast<type>(var)
235 //-------------------------------------------------------------------------
236 // template for debug prints specification ( d, u, lld, llu ), and to obtain
237 // signed/unsigned flavors of a type
238 template <typename T> struct traits_t {};
239 // int
240 template <> struct traits_t<signed int> {
241   typedef signed int signed_t;
242   typedef unsigned int unsigned_t;
243   typedef double floating_t;
244   static char const *spec;
245   static const signed_t max_value = 0x7fffffff;
246   static const signed_t min_value = 0x80000000;
247   static const int type_size = sizeof(signed_t);
248 };
249 // unsigned int
250 template <> struct traits_t<unsigned int> {
251   typedef signed int signed_t;
252   typedef unsigned int unsigned_t;
253   typedef double floating_t;
254   static char const *spec;
255   static const unsigned_t max_value = 0xffffffff;
256   static const unsigned_t min_value = 0x00000000;
257   static const int type_size = sizeof(unsigned_t);
258 };
259 // long
260 template <> struct traits_t<signed long> {
261   typedef signed long signed_t;
262   typedef unsigned long unsigned_t;
263   typedef long double floating_t;
264   static char const *spec;
265   static const int type_size = sizeof(signed_t);
266 };
267 // long long
268 template <> struct traits_t<signed long long> {
269   typedef signed long long signed_t;
270   typedef unsigned long long unsigned_t;
271   typedef long double floating_t;
272   static char const *spec;
273   static const signed_t max_value = 0x7fffffffffffffffLL;
274   static const signed_t min_value = 0x8000000000000000LL;
275   static const int type_size = sizeof(signed_t);
276 };
277 // unsigned long long
278 template <> struct traits_t<unsigned long long> {
279   typedef signed long long signed_t;
280   typedef unsigned long long unsigned_t;
281   typedef long double floating_t;
282   static char const *spec;
283   static const unsigned_t max_value = 0xffffffffffffffffLL;
284   static const unsigned_t min_value = 0x0000000000000000LL;
285   static const int type_size = sizeof(unsigned_t);
286 };
287 //-------------------------------------------------------------------------
288 #else
289 #define CCAST(type, var) (type)(var)
290 #define RCAST(type, var) (type)(var)
291 #endif // __cplusplus
292 
293 #define KMP_EXPORT extern /* export declaration in guide libraries */
294 
295 #if __GNUC__ >= 4 && !defined(__MINGW32__)
296 #define __forceinline __inline
297 #endif
298 
299 /* Check if the OS/arch can support user-level mwait */
300 // All mwait code tests for UMWAIT first, so it should only fall back to ring3
301 // MWAIT for KNL.
302 #define KMP_HAVE_MWAIT                                                         \
303   ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) &&    \
304    !KMP_MIC2)
305 #define KMP_HAVE_UMWAIT                                                        \
306   ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) &&    \
307    !KMP_MIC)
308 
309 #if KMP_OS_WINDOWS
310 // Don't include everything related to NT status code, we'll do that explicitly
311 #define WIN32_NO_STATUS
312 #include <windows.h>
313 
314 static inline int KMP_GET_PAGE_SIZE(void) {
315   SYSTEM_INFO si;
316   GetSystemInfo(&si);
317   return si.dwPageSize;
318 }
319 #else
320 #define KMP_GET_PAGE_SIZE() getpagesize()
321 #endif
322 
323 #define PAGE_ALIGNED(_addr)                                                    \
324   (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
325 #define ALIGN_TO_PAGE(x)                                                       \
326   (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
327 
328 /* ---------- Support for cache alignment, padding, etc. ----------------*/
329 
330 #ifdef __cplusplus
331 extern "C" {
332 #endif // __cplusplus
333 
334 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
335 
336 /* Define the default size of the cache line */
337 #ifndef CACHE_LINE
338 #define CACHE_LINE 128 /* cache line size in bytes */
339 #else
340 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
341 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
342 #warning CACHE_LINE is too small.
343 #endif
344 #endif /* CACHE_LINE */
345 
346 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
347 
348 // Define attribute that indicates that the fall through from the previous
349 // case label is intentional and should not be diagnosed by a compiler
350 //   Code from libcxx/include/__config
351 // Use a function like macro to imply that it must be followed by a semicolon
352 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
353 #define KMP_FALLTHROUGH() [[fallthrough]]
354 // icc cannot properly tell this attribute is absent so force off
355 #elif KMP_COMPILER_ICC
356 #define KMP_FALLTHROUGH() ((void)0)
357 #elif __has_cpp_attribute(clang::fallthrough)
358 #define KMP_FALLTHROUGH() [[clang::fallthrough]]
359 #elif __has_attribute(fallthrough) || __GNUC__ >= 7
360 #define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
361 #else
362 #define KMP_FALLTHROUGH() ((void)0)
363 #endif
364 
365 #if KMP_HAVE_ATTRIBUTE_WAITPKG
366 #define KMP_ATTRIBUTE_TARGET_WAITPKG __attribute__((target("waitpkg")))
367 #else
368 #define KMP_ATTRIBUTE_TARGET_WAITPKG /* Nothing */
369 #endif
370 
371 #if KMP_HAVE_ATTRIBUTE_RTM
372 #define KMP_ATTRIBUTE_TARGET_RTM __attribute__((target("rtm")))
373 #else
374 #define KMP_ATTRIBUTE_TARGET_RTM /* Nothing */
375 #endif
376 
377 // Define attribute that indicates a function does not return
378 #if __cplusplus >= 201103L
379 #define KMP_NORETURN [[noreturn]]
380 #elif KMP_OS_WINDOWS
381 #define KMP_NORETURN __declspec(noreturn)
382 #else
383 #define KMP_NORETURN __attribute__((noreturn))
384 #endif
385 
386 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
387 #define KMP_ALIGN(bytes) __declspec(align(bytes))
388 #define KMP_THREAD_LOCAL __declspec(thread)
389 #define KMP_ALIAS /* Nothing */
390 #else
391 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
392 #define KMP_THREAD_LOCAL __thread
393 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
394 #endif
395 
396 #if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
397 #define KMP_WEAK_ATTRIBUTE_EXTERNAL __attribute__((weak))
398 #else
399 #define KMP_WEAK_ATTRIBUTE_EXTERNAL /* Nothing */
400 #endif
401 
402 #if KMP_HAVE_WEAK_ATTRIBUTE
403 #define KMP_WEAK_ATTRIBUTE_INTERNAL __attribute__((weak))
404 #else
405 #define KMP_WEAK_ATTRIBUTE_INTERNAL /* Nothing */
406 #endif
407 
408 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
409 #ifndef KMP_STR
410 #define KMP_STR(x) _KMP_STR(x)
411 #define _KMP_STR(x) #x
412 #endif
413 
414 #ifdef KMP_USE_VERSION_SYMBOLS
415 // If using versioned symbols, KMP_EXPAND_NAME prepends
416 // __kmp_api_ to the real API name
417 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
418 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
419 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str)                         \
420   _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
421 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver)            \
422   __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias     \
423       __attribute__((alias(KMP_STR(__kmp_api_##api_name))));                    \
424   __asm__(                                                                      \
425       ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
426           api_name) "@" ver_str "\n\t");                                        \
427   __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR(                 \
428       api_name) "@@" default_ver "\n\t")
429 
430 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str)         \
431   _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, "VERSION")
432 #define _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str,          \
433                                  default_ver)                                    \
434   __typeof__(__kmp_api_##apic_name) __kmp_api_##apic_name##_##ver_num##_alias    \
435       __attribute__((alias(KMP_STR(__kmp_api_##apic_name))));                    \
436   __asm__(".symver " KMP_STR(__kmp_api_##apic_name) "," KMP_STR(                 \
437       apic_name) "@@" default_ver "\n\t");                                       \
438   __asm__(                                                                       \
439       ".symver " KMP_STR(__kmp_api_##apic_name##_##ver_num##_alias) "," KMP_STR( \
440           api_name) "@" ver_str "\n\t")
441 
442 #else // KMP_USE_VERSION_SYMBOLS
443 #define KMP_EXPAND_NAME(api_name) api_name
444 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
445 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num,                  \
446                                 ver_str) /* Nothing */
447 #endif // KMP_USE_VERSION_SYMBOLS
448 
449 /* Temporary note: if performance testing of this passes, we can remove
450    all references to KMP_DO_ALIGN and replace with KMP_ALIGN.  */
451 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
452 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
453 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
454 
455 /* General purpose fence types for memory operations */
456 enum kmp_mem_fence_type {
457   kmp_no_fence, /* No memory fence */
458   kmp_acquire_fence, /* Acquire (read) memory fence */
459   kmp_release_fence, /* Release (write) memory fence */
460   kmp_full_fence /* Full (read+write) memory fence */
461 };
462 
463 // Synchronization primitives
464 
465 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS && !((KMP_ARCH_AARCH64 || KMP_ARCH_ARM) && (KMP_COMPILER_CLANG || KMP_COMPILER_GCC))
466 
467 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
468 #pragma intrinsic(InterlockedExchangeAdd)
469 #pragma intrinsic(InterlockedCompareExchange)
470 #pragma intrinsic(InterlockedExchange)
471 #if !(KMP_COMPILER_ICX && KMP_32_BIT_ARCH)
472 #pragma intrinsic(InterlockedExchange64)
473 #endif
474 #endif
475 
476 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
477 // ordering problem, so we use InterlockedExchangeAdd instead.
478 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
479 #define KMP_TEST_THEN_INC_ACQ32(p)                                             \
480   InterlockedExchangeAdd((volatile long *)(p), 1)
481 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
482 #define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
483   InterlockedExchangeAdd((volatile long *)(p), 4)
484 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
485 #define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
486   InterlockedExchangeAdd((volatile long *)(p), -1)
487 #define KMP_TEST_THEN_ADD32(p, v)                                              \
488   InterlockedExchangeAdd((volatile long *)(p), (v))
489 
490 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
491   InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
492 
493 #define KMP_XCHG_FIXED32(p, v)                                                 \
494   InterlockedExchange((volatile long *)(p), (long)(v))
495 #define KMP_XCHG_FIXED64(p, v)                                                 \
496   InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
497 
498 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
499   kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
500   return *(kmp_real32 *)&tmp;
501 }
502 
503 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
504 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
505 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
506 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
507 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
508 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
509 
510 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
511 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
512 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
513 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
514 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
515 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
516 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
517 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
518 
519 #if KMP_ARCH_AARCH64 && KMP_COMPILER_MSVC && !KMP_COMPILER_CLANG
520 #define KMP_TEST_THEN_INC64(p) _InterlockedExchangeAdd64((p), 1LL)
521 #define KMP_TEST_THEN_INC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 1LL)
522 #define KMP_TEST_THEN_ADD4_64(p) _InterlockedExchangeAdd64((p), 4LL)
523 // #define KMP_TEST_THEN_ADD4_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 4LL)
524 // #define KMP_TEST_THEN_DEC64(p) _InterlockedExchangeAdd64((p), -1LL)
525 // #define KMP_TEST_THEN_DEC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), -1LL)
526 // #define KMP_TEST_THEN_ADD8(p, v) _InterlockedExchangeAdd8((p), (v))
527 #define KMP_TEST_THEN_ADD64(p, v) _InterlockedExchangeAdd64((p), (v))
528 
529 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
530   __kmp_compare_and_store_acq8((p), (cv), (sv))
531 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
532   __kmp_compare_and_store_rel8((p), (cv), (sv))
533 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
534   __kmp_compare_and_store_acq16((p), (cv), (sv))
535 /*
536 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
537   __kmp_compare_and_store_rel16((p), (cv), (sv))
538 */
539 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
540   __kmp_compare_and_store_acq32((volatile kmp_int32 *)(p), (kmp_int32)(cv),    \
541                                 (kmp_int32)(sv))
542 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
543   __kmp_compare_and_store_rel32((volatile kmp_int32 *)(p), (kmp_int32)(cv),    \
544                                 (kmp_int32)(sv))
545 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
546   __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
547                                 (kmp_int64)(sv))
548 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
549   __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
550                                 (kmp_int64)(sv))
551 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
552   __kmp_compare_and_store_ptr((void *volatile *)(p), (void *)(cv), (void *)(sv))
553 
554 //  KMP_COMPARE_AND_STORE expects this order:       pointer, compare, exchange
555 // _InterlockedCompareExchange expects this order:  pointer, exchange, compare
556 // KMP_COMPARE_AND_STORE also returns a bool indicating a successful write. A
557 // write is successful if the return value of _InterlockedCompareExchange is the
558 // same as the compare value.
559 inline kmp_int8 __kmp_compare_and_store_acq8(volatile kmp_int8 *p, kmp_int8 cv,
560                                              kmp_int8 sv) {
561   return _InterlockedCompareExchange8_acq(p, sv, cv) == cv;
562 }
563 
564 inline kmp_int8 __kmp_compare_and_store_rel8(volatile kmp_int8 *p, kmp_int8 cv,
565                                              kmp_int8 sv) {
566   return _InterlockedCompareExchange8_rel(p, sv, cv) == cv;
567 }
568 
569 inline kmp_int16 __kmp_compare_and_store_acq16(volatile kmp_int16 *p,
570                                                kmp_int16 cv, kmp_int16 sv) {
571   return _InterlockedCompareExchange16_acq(p, sv, cv) == cv;
572 }
573 
574 inline kmp_int16 __kmp_compare_and_store_rel16(volatile kmp_int16 *p,
575                                                kmp_int16 cv, kmp_int16 sv) {
576   return _InterlockedCompareExchange16_rel(p, sv, cv) == cv;
577 }
578 
579 inline kmp_int32 __kmp_compare_and_store_acq32(volatile kmp_int32 *p,
580                                                kmp_int32 cv, kmp_int32 sv) {
581   return _InterlockedCompareExchange_acq((volatile long *)p, sv, cv) == cv;
582 }
583 
584 inline kmp_int32 __kmp_compare_and_store_rel32(volatile kmp_int32 *p,
585                                                kmp_int32 cv, kmp_int32 sv) {
586   return _InterlockedCompareExchange_rel((volatile long *)p, sv, cv) == cv;
587 }
588 
589 inline kmp_int32 __kmp_compare_and_store_acq64(volatile kmp_int64 *p,
590                                                kmp_int64 cv, kmp_int64 sv) {
591   return _InterlockedCompareExchange64_acq(p, sv, cv) == cv;
592 }
593 
594 inline kmp_int32 __kmp_compare_and_store_rel64(volatile kmp_int64 *p,
595                                                kmp_int64 cv, kmp_int64 sv) {
596   return _InterlockedCompareExchange64_rel(p, sv, cv) == cv;
597 }
598 
599 inline kmp_int32 __kmp_compare_and_store_ptr(void *volatile *p, void *cv,
600                                              void *sv) {
601   return _InterlockedCompareExchangePointer(p, sv, cv) == cv;
602 }
603 
604 // The _RET versions return the value instead of a bool
605 
606 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
607    _InterlockedCompareExchange8((p), (sv), (cv))
608 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
609   _InterlockedCompareExchange16((p), (sv), (cv))
610 
611 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
612   _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv),    \
613                                 (kmp_int64)(cv))
614 
615 
616 #define KMP_XCHG_FIXED8(p, v)                                                  \
617   _InterlockedExchange8((volatile kmp_int8 *)(p), (kmp_int8)(v));
618 #define KMP_XCHG_FIXED16(p, v) _InterlockedExchange16((p), (v));
619 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
620 
621 inline kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v) {
622   kmp_int64 tmp = _InterlockedExchange64((volatile kmp_int64 *)p, *(kmp_int64
623   *)&v); return *(kmp_real64 *)&tmp;
624 }
625 
626 #else // !KMP_ARCH_AARCH64
627 
628 // Routines that we still need to implement in assembly.
629 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
630 
631 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
632                                          kmp_int8 sv);
633 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
634                                            kmp_int16 sv);
635 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
636                                            kmp_int32 sv);
637 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
638                                            kmp_int64 sv);
639 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
640                                              kmp_int8 sv);
641 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
642                                                kmp_int16 cv, kmp_int16 sv);
643 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
644                                                kmp_int32 cv, kmp_int32 sv);
645 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
646                                                kmp_int64 cv, kmp_int64 sv);
647 
648 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
649 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
650 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
651 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
652 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
653 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
654 
655 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
656 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
657 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
658 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
659 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
660 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
661 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
662 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
663 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
664 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
665 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
666 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
667 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
668 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
669 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
670 
671 
672 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
673   __kmp_compare_and_store8((p), (cv), (sv))
674 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
675   __kmp_compare_and_store8((p), (cv), (sv))
676 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
677   __kmp_compare_and_store16((p), (cv), (sv))
678 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
679   __kmp_compare_and_store16((p), (cv), (sv))
680 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
681   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
682                             (kmp_int32)(sv))
683 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
684   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
685                             (kmp_int32)(sv))
686 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
687   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
688                             (kmp_int64)(sv))
689 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
690   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
691                             (kmp_int64)(sv))
692 
693 #if KMP_ARCH_X86
694 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
695   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
696                             (kmp_int32)(sv))
697 #else /* 64 bit pointers */
698 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
699   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
700                             (kmp_int64)(sv))
701 #endif /* KMP_ARCH_X86 */
702 
703 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
704   __kmp_compare_and_store_ret8((p), (cv), (sv))
705 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
706   __kmp_compare_and_store_ret16((p), (cv), (sv))
707 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
708   __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
709                                 (kmp_int64)(sv))
710 
711 #define KMP_XCHG_FIXED8(p, v)                                                  \
712   __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
713 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
714 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
715 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
716 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
717 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
718 #endif
719 
720 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
721 
722 /* cast p to correct type so that proper intrinsic will be used */
723 #define KMP_TEST_THEN_INC32(p)                                                 \
724   __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
725 #define KMP_TEST_THEN_INC_ACQ32(p)                                             \
726   __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
727 #if KMP_ARCH_MIPS
728 #define KMP_TEST_THEN_INC64(p)                                                 \
729   __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
730 #define KMP_TEST_THEN_INC_ACQ64(p)                                             \
731   __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
732 #else
733 #define KMP_TEST_THEN_INC64(p)                                                 \
734   __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
735 #define KMP_TEST_THEN_INC_ACQ64(p)                                             \
736   __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
737 #endif
738 #define KMP_TEST_THEN_ADD4_32(p)                                               \
739   __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
740 #define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
741   __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
742 #if KMP_ARCH_MIPS
743 #define KMP_TEST_THEN_ADD4_64(p)                                               \
744   __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
745 #define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
746   __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
747 #define KMP_TEST_THEN_DEC64(p)                                                 \
748   __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
749 #define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
750   __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
751 #else
752 #define KMP_TEST_THEN_ADD4_64(p)                                               \
753   __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
754 #define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
755   __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
756 #define KMP_TEST_THEN_DEC64(p)                                                 \
757   __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
758 #define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
759   __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
760 #endif
761 #define KMP_TEST_THEN_DEC32(p)                                                 \
762   __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
763 #define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
764   __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
765 #define KMP_TEST_THEN_ADD8(p, v)                                               \
766   __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
767 #define KMP_TEST_THEN_ADD32(p, v)                                              \
768   __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
769 #if KMP_ARCH_MIPS
770 #define KMP_TEST_THEN_ADD64(p, v)                                              \
771   __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v),              \
772                      __ATOMIC_SEQ_CST)
773 #else
774 #define KMP_TEST_THEN_ADD64(p, v)                                              \
775   __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
776 #endif
777 
778 #define KMP_TEST_THEN_OR8(p, v)                                                \
779   __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
780 #define KMP_TEST_THEN_AND8(p, v)                                               \
781   __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
782 #define KMP_TEST_THEN_OR32(p, v)                                               \
783   __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
784 #define KMP_TEST_THEN_AND32(p, v)                                              \
785   __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
786 #if KMP_ARCH_MIPS
787 #define KMP_TEST_THEN_OR64(p, v)                                               \
788   __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v),               \
789                     __ATOMIC_SEQ_CST)
790 #define KMP_TEST_THEN_AND64(p, v)                                              \
791   __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v),              \
792                      __ATOMIC_SEQ_CST)
793 #else
794 #define KMP_TEST_THEN_OR64(p, v)                                               \
795   __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
796 #define KMP_TEST_THEN_AND64(p, v)                                              \
797   __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
798 #endif
799 
800 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
801   __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),     \
802                                (kmp_uint8)(sv))
803 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
804   __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),     \
805                                (kmp_uint8)(sv))
806 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
807   __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),   \
808                                (kmp_uint16)(sv))
809 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
810   __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),   \
811                                (kmp_uint16)(sv))
812 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
813   __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),   \
814                                (kmp_uint32)(sv))
815 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
816   __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),   \
817                                (kmp_uint32)(sv))
818 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
819   __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv),            \
820                                (void *)(sv))
821 
822 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
823   __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),      \
824                               (kmp_uint8)(sv))
825 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
826   __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),    \
827                               (kmp_uint16)(sv))
828 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
829   __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),    \
830                               (kmp_uint32)(sv))
831 #if KMP_ARCH_MIPS
832 static inline bool mips_sync_bool_compare_and_swap(volatile kmp_uint64 *p,
833                                                    kmp_uint64 cv,
834                                                    kmp_uint64 sv) {
835   return __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
836                                    __ATOMIC_SEQ_CST);
837 }
838 static inline bool mips_sync_val_compare_and_swap(volatile kmp_uint64 *p,
839                                                   kmp_uint64 cv,
840                                                   kmp_uint64 sv) {
841   __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
842                             __ATOMIC_SEQ_CST);
843   return cv;
844 }
845 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
846   mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p),                  \
847                                   (kmp_uint64)(cv), (kmp_uint64)(sv))
848 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
849   mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p),                  \
850                                   (kmp_uint64)(cv), (kmp_uint64)(sv))
851 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
852   mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
853                                  (kmp_uint64)(sv))
854 #else
855 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
856   __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),   \
857                                (kmp_uint64)(sv))
858 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
859   __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),   \
860                                (kmp_uint64)(sv))
861 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
862   __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),    \
863                               (kmp_uint64)(sv))
864 #endif
865 
866 #if KMP_OS_DARWIN && defined(__INTEL_COMPILER) && __INTEL_COMPILER >= 1800
867 #define KMP_XCHG_FIXED8(p, v)                                                  \
868   __atomic_exchange_1((volatile kmp_uint8 *)(p), (kmp_uint8)(v),               \
869                       __ATOMIC_SEQ_CST)
870 #else
871 #define KMP_XCHG_FIXED8(p, v)                                                  \
872   __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
873 #endif
874 #define KMP_XCHG_FIXED16(p, v)                                                 \
875   __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
876 #define KMP_XCHG_FIXED32(p, v)                                                 \
877   __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
878 #define KMP_XCHG_FIXED64(p, v)                                                 \
879   __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
880 
881 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
882   volatile kmp_uint32 *up;
883   kmp_uint32 uv;
884   memcpy(&up, &p, sizeof(up));
885   memcpy(&uv, &v, sizeof(uv));
886   kmp_int32 tmp = __sync_lock_test_and_set(up, uv);
887   kmp_real32 ftmp;
888   memcpy(&ftmp, &tmp, sizeof(tmp));
889   return ftmp;
890 }
891 
892 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
893   volatile kmp_uint64 *up;
894   kmp_uint64 uv;
895   memcpy(&up, &p, sizeof(up));
896   memcpy(&uv, &v, sizeof(uv));
897   kmp_int64 tmp = __sync_lock_test_and_set(up, uv);
898   kmp_real64 dtmp;
899   memcpy(&dtmp, &tmp, sizeof(tmp));
900   return dtmp;
901 }
902 
903 #else
904 
905 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
906 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
907 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
908 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
909 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
910 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
911 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
912 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
913 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
914 
915 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
916                                          kmp_int8 sv);
917 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
918                                            kmp_int16 sv);
919 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
920                                            kmp_int32 sv);
921 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
922                                            kmp_int64 sv);
923 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
924                                              kmp_int8 sv);
925 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
926                                                kmp_int16 cv, kmp_int16 sv);
927 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
928                                                kmp_int32 cv, kmp_int32 sv);
929 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
930                                                kmp_int64 cv, kmp_int64 sv);
931 
932 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
933 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
934 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
935 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
936 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
937 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
938 
939 #define KMP_TEST_THEN_INC32(p)                                                 \
940   __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
941 #define KMP_TEST_THEN_INC_ACQ32(p)                                             \
942   __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
943 #define KMP_TEST_THEN_INC64(p)                                                 \
944   __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
945 #define KMP_TEST_THEN_INC_ACQ64(p)                                             \
946   __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
947 #define KMP_TEST_THEN_ADD4_32(p)                                               \
948   __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
949 #define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
950   __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
951 #define KMP_TEST_THEN_ADD4_64(p)                                               \
952   __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
953 #define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
954   __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
955 #define KMP_TEST_THEN_DEC32(p)                                                 \
956   __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
957 #define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
958   __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
959 #define KMP_TEST_THEN_DEC64(p)                                                 \
960   __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
961 #define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
962   __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
963 #define KMP_TEST_THEN_ADD8(p, v)                                               \
964   __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
965 #define KMP_TEST_THEN_ADD32(p, v)                                              \
966   __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
967 #define KMP_TEST_THEN_ADD64(p, v)                                              \
968   __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
969 
970 #define KMP_TEST_THEN_OR8(p, v)                                                \
971   __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
972 #define KMP_TEST_THEN_AND8(p, v)                                               \
973   __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
974 #define KMP_TEST_THEN_OR32(p, v)                                               \
975   __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
976 #define KMP_TEST_THEN_AND32(p, v)                                              \
977   __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
978 #define KMP_TEST_THEN_OR64(p, v)                                               \
979   __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
980 #define KMP_TEST_THEN_AND64(p, v)                                              \
981   __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
982 
983 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
984   __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv),           \
985                            (kmp_int8)(sv))
986 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
987   __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv),           \
988                            (kmp_int8)(sv))
989 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
990   __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv),        \
991                             (kmp_int16)(sv))
992 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
993   __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv),        \
994                             (kmp_int16)(sv))
995 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
996   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
997                             (kmp_int32)(sv))
998 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
999   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
1000                             (kmp_int32)(sv))
1001 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
1002   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
1003                             (kmp_int64)(sv))
1004 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
1005   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
1006                             (kmp_int64)(sv))
1007 
1008 #if KMP_ARCH_X86
1009 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
1010   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
1011                             (kmp_int32)(sv))
1012 #else /* 64 bit pointers */
1013 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
1014   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
1015                             (kmp_int64)(sv))
1016 #endif /* KMP_ARCH_X86 */
1017 
1018 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
1019   __kmp_compare_and_store_ret8((p), (cv), (sv))
1020 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
1021   __kmp_compare_and_store_ret16((p), (cv), (sv))
1022 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
1023   __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv),    \
1024                                 (kmp_int32)(sv))
1025 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
1026   __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
1027                                 (kmp_int64)(sv))
1028 
1029 #define KMP_XCHG_FIXED8(p, v)                                                  \
1030   __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
1031 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
1032 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
1033 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
1034 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
1035 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
1036 
1037 #endif /* KMP_ASM_INTRINS */
1038 
1039 /* ------------- relaxed consistency memory model stuff ------------------ */
1040 
1041 #if KMP_OS_WINDOWS
1042 #ifdef __ABSOFT_WIN
1043 #define KMP_MB() asm("nop")
1044 #define KMP_IMB() asm("nop")
1045 #else
1046 #define KMP_MB() /* _asm{ nop } */
1047 #define KMP_IMB() /* _asm{ nop } */
1048 #endif
1049 #endif /* KMP_OS_WINDOWS */
1050 
1051 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS ||     \
1052     KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64 || KMP_ARCH_LOONGARCH64 ||             \
1053     KMP_ARCH_VE || KMP_ARCH_S390X || KMP_ARCH_PPC
1054 #if KMP_OS_WINDOWS
1055 #undef KMP_MB
1056 #define KMP_MB() std::atomic_thread_fence(std::memory_order_seq_cst)
1057 #else /* !KMP_OS_WINDOWS */
1058 #define KMP_MB() __sync_synchronize()
1059 #endif
1060 #endif
1061 
1062 #ifndef KMP_MB
1063 #define KMP_MB() /* nothing to do */
1064 #endif
1065 
1066 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
1067 #if KMP_MIC
1068 // fence-style instructions do not exist, but lock; xaddl $0,(%rsp) can be used.
1069 // We shouldn't need it, though, since the ABI rules require that
1070 // * If the compiler generates NGO stores it also generates the fence
1071 // * If users hand-code NGO stores they should insert the fence
1072 // therefore no incomplete unordered stores should be visible.
1073 #define KMP_MFENCE() /* Nothing */
1074 #define KMP_SFENCE() /* Nothing */
1075 #else
1076 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
1077 #define KMP_MFENCE_() _mm_mfence()
1078 #define KMP_SFENCE_() _mm_sfence()
1079 #elif KMP_COMPILER_MSVC
1080 #define KMP_MFENCE_() MemoryBarrier()
1081 #define KMP_SFENCE_() MemoryBarrier()
1082 #else
1083 #define KMP_MFENCE_() __sync_synchronize()
1084 #define KMP_SFENCE_() __sync_synchronize()
1085 #endif
1086 #define KMP_MFENCE()                                                           \
1087   if (UNLIKELY(!__kmp_cpuinfo.initialized)) {                                  \
1088     __kmp_query_cpuid(&__kmp_cpuinfo);                                         \
1089   }                                                                            \
1090   if (__kmp_cpuinfo.flags.sse2) {                                              \
1091     KMP_MFENCE_();                                                             \
1092   }
1093 #define KMP_SFENCE() KMP_SFENCE_()
1094 #endif
1095 #else
1096 #define KMP_MFENCE() KMP_MB()
1097 #define KMP_SFENCE() KMP_MB()
1098 #endif
1099 
1100 #ifndef KMP_IMB
1101 #define KMP_IMB() /* nothing to do */
1102 #endif
1103 
1104 #ifndef KMP_ST_REL32
1105 #define KMP_ST_REL32(A, D) (*(A) = (D))
1106 #endif
1107 
1108 #ifndef KMP_ST_REL64
1109 #define KMP_ST_REL64(A, D) (*(A) = (D))
1110 #endif
1111 
1112 #ifndef KMP_LD_ACQ32
1113 #define KMP_LD_ACQ32(A) (*(A))
1114 #endif
1115 
1116 #ifndef KMP_LD_ACQ64
1117 #define KMP_LD_ACQ64(A) (*(A))
1118 #endif
1119 
1120 /* ------------------------------------------------------------------------ */
1121 // FIXME - maybe this should this be
1122 //
1123 // #define TCR_4(a)    (*(volatile kmp_int32 *)(&a))
1124 // #define TCW_4(a,b)  (a) = (*(volatile kmp_int32 *)&(b))
1125 //
1126 // #define TCR_8(a)    (*(volatile kmp_int64 *)(a))
1127 // #define TCW_8(a,b)  (a) = (*(volatile kmp_int64 *)(&b))
1128 //
1129 // I'm fairly certain this is the correct thing to do, but I'm afraid
1130 // of performance regressions.
1131 
1132 #define TCR_1(a) (a)
1133 #define TCW_1(a, b) (a) = (b)
1134 #define TCR_4(a) (a)
1135 #define TCW_4(a, b) (a) = (b)
1136 #define TCI_4(a) (++(a))
1137 #define TCD_4(a) (--(a))
1138 #define TCR_8(a) (a)
1139 #define TCW_8(a, b) (a) = (b)
1140 #define TCI_8(a) (++(a))
1141 #define TCD_8(a) (--(a))
1142 #define TCR_SYNC_4(a) (a)
1143 #define TCW_SYNC_4(a, b) (a) = (b)
1144 #define TCX_SYNC_4(a, b, c)                                                    \
1145   KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a),     \
1146                               (kmp_int32)(b), (kmp_int32)(c))
1147 #define TCR_SYNC_8(a) (a)
1148 #define TCW_SYNC_8(a, b) (a) = (b)
1149 #define TCX_SYNC_8(a, b, c)                                                    \
1150   KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a),     \
1151                               (kmp_int64)(b), (kmp_int64)(c))
1152 
1153 #if KMP_ARCH_X86 || KMP_ARCH_MIPS || KMP_ARCH_WASM || KMP_ARCH_PPC
1154 // What about ARM?
1155 #define TCR_PTR(a) ((void *)TCR_4(a))
1156 #define TCW_PTR(a, b) TCW_4((a), (b))
1157 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
1158 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
1159 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
1160 
1161 #else /* 64 bit pointers */
1162 
1163 #define TCR_PTR(a) ((void *)TCR_8(a))
1164 #define TCW_PTR(a, b) TCW_8((a), (b))
1165 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
1166 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
1167 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
1168 
1169 #endif /* KMP_ARCH_X86 */
1170 
1171 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
1172    where they are used to check that language is Fortran, not C. */
1173 
1174 #ifndef FTN_TRUE
1175 #define FTN_TRUE TRUE
1176 #endif
1177 
1178 #ifndef FTN_FALSE
1179 #define FTN_FALSE FALSE
1180 #endif
1181 
1182 typedef void (*microtask_t)(int *gtid, int *npr, ...);
1183 
1184 #ifdef USE_VOLATILE_CAST
1185 #define VOLATILE_CAST(x) (volatile x)
1186 #else
1187 #define VOLATILE_CAST(x) (x)
1188 #endif
1189 
1190 #define KMP_WAIT __kmp_wait_4
1191 #define KMP_WAIT_PTR __kmp_wait_4_ptr
1192 #define KMP_EQ __kmp_eq_4
1193 #define KMP_NEQ __kmp_neq_4
1194 #define KMP_LT __kmp_lt_4
1195 #define KMP_GE __kmp_ge_4
1196 #define KMP_LE __kmp_le_4
1197 
1198 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
1199  * (Intel(R) 64 Tracker #138) */
1200 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
1201 #define STATIC_EFI2_WORKAROUND
1202 #else
1203 #define STATIC_EFI2_WORKAROUND static
1204 #endif
1205 
1206 // Support of BGET usage
1207 #ifndef KMP_USE_BGET
1208 #define KMP_USE_BGET 1
1209 #endif
1210 
1211 // Switches for OSS builds
1212 #ifndef USE_CMPXCHG_FIX
1213 #define USE_CMPXCHG_FIX 1
1214 #endif
1215 
1216 // Enable dynamic user lock
1217 #define KMP_USE_DYNAMIC_LOCK 1
1218 
1219 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
1220 // dynamic user lock is turned on
1221 #if KMP_USE_DYNAMIC_LOCK
1222 // Visual studio can't handle the asm sections in this code
1223 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
1224 #ifdef KMP_USE_ADAPTIVE_LOCKS
1225 #undef KMP_USE_ADAPTIVE_LOCKS
1226 #endif
1227 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
1228 #endif
1229 
1230 // Enable tick time conversion of ticks to seconds
1231 #if KMP_STATS_ENABLED
1232 #define KMP_HAVE_TICK_TIME                                                     \
1233   (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
1234 #endif
1235 
1236 // Warning levels
1237 enum kmp_warnings_level {
1238   kmp_warnings_off = 0, /* No warnings */
1239   kmp_warnings_low, /* Minimal warnings (default) */
1240   kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
1241   kmp_warnings_verbose /* reserved */
1242 };
1243 
1244 #ifdef __cplusplus
1245 } // extern "C"
1246 #endif // __cplusplus
1247 
1248 // Safe C API
1249 #include "kmp_safe_c_api.h"
1250 
1251 // Macros for C++11 atomic functions
1252 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1253 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1254 
1255 // For non-default load/store
1256 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1257 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1258 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1259 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1260 
1261 // For non-default fetch_<op>
1262 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1263 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1264 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1265 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1266 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1267 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1268 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1269 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1270 
1271 // Callers of the following functions cannot see the side effect on "expected".
1272 template <typename T>
1273 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
1274   return p->compare_exchange_strong(
1275       expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
1276 }
1277 
1278 template <typename T>
1279 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
1280   return p->compare_exchange_strong(
1281       expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
1282 }
1283 
1284 template <typename T>
1285 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
1286   return p->compare_exchange_strong(
1287       expected, desired, std::memory_order_release, std::memory_order_relaxed);
1288 }
1289 
1290 // Symbol lookup on Linux/Windows
1291 #if KMP_OS_WINDOWS
1292 extern void *__kmp_lookup_symbol(const char *name, bool next = false);
1293 #define KMP_DLSYM(name) __kmp_lookup_symbol(name)
1294 #define KMP_DLSYM_NEXT(name) __kmp_lookup_symbol(name, true)
1295 #elif KMP_OS_WASI
1296 #define KMP_DLSYM(name) nullptr
1297 #define KMP_DLSYM_NEXT(name) nullptr
1298 #else
1299 #define KMP_DLSYM(name) dlsym(RTLD_DEFAULT, name)
1300 #define KMP_DLSYM_NEXT(name) dlsym(RTLD_NEXT, name)
1301 #endif
1302 
1303 // MSVC doesn't have this, but clang/clang-cl does.
1304 #ifndef __has_builtin
1305 #define __has_builtin(x) 0
1306 #endif
1307 
1308 // Same as LLVM_BUILTIN_UNREACHABLE. States that it is UB to reach this point.
1309 #if __has_builtin(__builtin_unreachable) || defined(__GNUC__)
1310 #define KMP_BUILTIN_UNREACHABLE __builtin_unreachable()
1311 #elif defined(_MSC_VER)
1312 #define KMP_BUILTIN_UNREACHABLE __assume(false)
1313 #else
1314 #define KMP_BUILTIN_UNREACHABLE
1315 #endif
1316 
1317 #endif /* KMP_OS_H */
1318