16 #include "kmp_config.h"
22 #define KMP_FTN_PLAIN 1
23 #define KMP_FTN_APPEND 2
24 #define KMP_FTN_UPPER 3
30 #define KMP_PTR_SKIP (sizeof(void *))
37 #define KMP_MEM_CONS_VOLATILE 0
38 #define KMP_MEM_CONS_FENCE 1
40 #ifndef KMP_MEM_CONS_MODEL
41 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
44 #ifndef __has_cpp_attribute
45 #define __has_cpp_attribute(x) 0
48 #ifndef __has_attribute
49 #define __has_attribute(x) 0
53 #define KMP_COMPILER_ICC 0
54 #define KMP_COMPILER_GCC 0
55 #define KMP_COMPILER_CLANG 0
56 #define KMP_COMPILER_MSVC 0
57 #define KMP_COMPILER_ICX 0
59 #if __INTEL_CLANG_COMPILER
60 #undef KMP_COMPILER_ICX
61 #define KMP_COMPILER_ICX 1
62 #elif defined(__INTEL_COMPILER)
63 #undef KMP_COMPILER_ICC
64 #define KMP_COMPILER_ICC 1
65 #elif defined(__clang__)
66 #undef KMP_COMPILER_CLANG
67 #define KMP_COMPILER_CLANG 1
68 #elif defined(__GNUC__)
69 #undef KMP_COMPILER_GCC
70 #define KMP_COMPILER_GCC 1
71 #elif defined(_MSC_VER)
72 #undef KMP_COMPILER_MSVC
73 #define KMP_COMPILER_MSVC 1
75 #error Unknown compiler
78 #if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD)
79 #define KMP_AFFINITY_SUPPORTED 1
80 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
81 #define KMP_GROUP_AFFINITY 1
83 #define KMP_GROUP_AFFINITY 0
86 #define KMP_AFFINITY_SUPPORTED 0
87 #define KMP_GROUP_AFFINITY 0
90 #if (KMP_OS_LINUX || (KMP_OS_FREEBSD && __FreeBSD_version >= 1301000))
91 #define KMP_HAVE_SCHED_GETCPU 1
93 #define KMP_HAVE_SCHED_GETCPU 0
97 #define KMP_HAVE_QUAD 0
98 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
99 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
102 #define KMP_HAVE_QUAD 1
103 #elif KMP_COMPILER_CLANG
106 typedef long double _Quad;
107 #elif KMP_COMPILER_GCC
110 typedef __float128 _Quad;
112 #define KMP_HAVE_QUAD 1
114 #elif KMP_COMPILER_MSVC
115 typedef long double _Quad;
118 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
119 typedef long double _Quad;
121 #define KMP_HAVE_QUAD 1
125 #define KMP_USE_X87CONTROL 0
127 #define KMP_END_OF_LINE "\r\n"
128 typedef char kmp_int8;
129 typedef unsigned char kmp_uint8;
130 typedef short kmp_int16;
131 typedef unsigned short kmp_uint16;
132 typedef int kmp_int32;
133 typedef unsigned int kmp_uint32;
134 #define KMP_INT32_SPEC "d"
135 #define KMP_UINT32_SPEC "u"
137 typedef __int64 kmp_int64;
138 typedef unsigned __int64 kmp_uint64;
139 #define KMP_INT64_SPEC "I64d"
140 #define KMP_UINT64_SPEC "I64u"
142 struct kmp_struct64 {
145 typedef struct kmp_struct64 kmp_int64;
146 typedef struct kmp_struct64 kmp_uint64;
149 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
150 #undef KMP_USE_X87CONTROL
151 #define KMP_USE_X87CONTROL 1
153 #if KMP_ARCH_X86_64 || KMP_ARCH_AARCH64
155 typedef __int64 kmp_intptr_t;
156 typedef unsigned __int64 kmp_uintptr_t;
157 #define KMP_INTPTR_SPEC "I64d"
158 #define KMP_UINTPTR_SPEC "I64u"
163 #define KMP_END_OF_LINE "\n"
164 typedef char kmp_int8;
165 typedef unsigned char kmp_uint8;
166 typedef short kmp_int16;
167 typedef unsigned short kmp_uint16;
168 typedef int kmp_int32;
169 typedef unsigned int kmp_uint32;
170 typedef long long kmp_int64;
171 typedef unsigned long long kmp_uint64;
172 #define KMP_INT32_SPEC "d"
173 #define KMP_UINT32_SPEC "u"
174 #define KMP_INT64_SPEC "lld"
175 #define KMP_UINT64_SPEC "llu"
178 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
179 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
180 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || \
181 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
182 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
184 #error "Can't determine size_t printf format specifier."
188 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
190 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
193 typedef size_t kmp_size_t;
194 typedef float kmp_real32;
195 typedef double kmp_real64;
199 typedef long kmp_intptr_t;
200 typedef unsigned long kmp_uintptr_t;
201 #define KMP_INTPTR_SPEC "ld"
202 #define KMP_UINTPTR_SPEC "lu"
206 typedef kmp_int64 kmp_int;
207 typedef kmp_uint64 kmp_uint;
209 typedef kmp_int32 kmp_int;
210 typedef kmp_uint32 kmp_uint;
212 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
213 #define KMP_INT_MIN ((kmp_int32)0x80000000)
216 #if (KMP_ARCH_ARM || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64) && \
217 (KMP_OS_FREEBSD || KMP_OS_LINUX)
218 typedef va_list *kmp_va_list;
219 #define kmp_va_deref(ap) (*(ap))
220 #define kmp_va_addr_of(ap) (&(ap))
222 typedef va_list kmp_va_list;
223 #define kmp_va_deref(ap) (ap)
224 #define kmp_va_addr_of(ap) (ap)
229 #define CCAST(type, var) const_cast<type>(var)
230 #define RCAST(type, var) reinterpret_cast<type>(var)
234 template <
typename T>
struct traits_t {};
236 template <>
struct traits_t<signed int> {
237 typedef signed int signed_t;
238 typedef unsigned int unsigned_t;
239 typedef double floating_t;
240 static char const *spec;
241 static const signed_t max_value = 0x7fffffff;
242 static const signed_t min_value = 0x80000000;
243 static const int type_size =
sizeof(signed_t);
246 template <>
struct traits_t<unsigned int> {
247 typedef signed int signed_t;
248 typedef unsigned int unsigned_t;
249 typedef double floating_t;
250 static char const *spec;
251 static const unsigned_t max_value = 0xffffffff;
252 static const unsigned_t min_value = 0x00000000;
253 static const int type_size =
sizeof(unsigned_t);
256 template <>
struct traits_t<signed long> {
257 typedef signed long signed_t;
258 typedef unsigned long unsigned_t;
259 typedef long double floating_t;
260 static char const *spec;
261 static const int type_size =
sizeof(signed_t);
264 template <>
struct traits_t<signed long long> {
265 typedef signed long long signed_t;
266 typedef unsigned long long unsigned_t;
267 typedef long double floating_t;
268 static char const *spec;
269 static const signed_t max_value = 0x7fffffffffffffffLL;
270 static const signed_t min_value = 0x8000000000000000LL;
271 static const int type_size =
sizeof(signed_t);
274 template <>
struct traits_t<unsigned long long> {
275 typedef signed long long signed_t;
276 typedef unsigned long long unsigned_t;
277 typedef long double floating_t;
278 static char const *spec;
279 static const unsigned_t max_value = 0xffffffffffffffffLL;
280 static const unsigned_t min_value = 0x0000000000000000LL;
281 static const int type_size =
sizeof(unsigned_t);
285 #define CCAST(type, var) (type)(var)
286 #define RCAST(type, var) (type)(var)
287 #endif // __cplusplus
289 #define KMP_EXPORT extern
291 #if __GNUC__ >= 4 && !defined(__MINGW32__)
292 #define __forceinline __inline
298 #define KMP_HAVE_MWAIT \
299 ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
301 #define KMP_HAVE_UMWAIT \
302 ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) && \
308 static inline int KMP_GET_PAGE_SIZE(
void) {
311 return si.dwPageSize;
314 #define KMP_GET_PAGE_SIZE() getpagesize()
317 #define PAGE_ALIGNED(_addr) \
318 (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
319 #define ALIGN_TO_PAGE(x) \
320 (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
326 #endif // __cplusplus
328 #define INTERNODE_CACHE_LINE 4096
332 #define CACHE_LINE 128
334 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
336 #warning CACHE_LINE is too small.
340 #define KMP_CACHE_PREFETCH(ADDR)
346 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
347 #define KMP_FALLTHROUGH() [[fallthrough]]
349 #elif KMP_COMPILER_ICC
350 #define KMP_FALLTHROUGH() ((void)0)
351 #elif __has_cpp_attribute(clang::fallthrough)
352 #define KMP_FALLTHROUGH() [[clang::fallthrough]]
353 #elif __has_attribute(fallthrough) || __GNUC__ >= 7
354 #define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
356 #define KMP_FALLTHROUGH() ((void)0)
359 #if KMP_HAVE_ATTRIBUTE_WAITPKG
360 #define KMP_ATTRIBUTE_TARGET_WAITPKG __attribute__((target("waitpkg")))
362 #define KMP_ATTRIBUTE_TARGET_WAITPKG
365 #if KMP_HAVE_ATTRIBUTE_RTM
366 #define KMP_ATTRIBUTE_TARGET_RTM __attribute__((target("rtm")))
368 #define KMP_ATTRIBUTE_TARGET_RTM
372 #if __cplusplus >= 201103L
373 #define KMP_NORETURN [[noreturn]]
375 #define KMP_NORETURN __declspec(noreturn)
377 #define KMP_NORETURN __attribute__((noreturn))
380 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
381 #define KMP_ALIGN(bytes) __declspec(align(bytes))
382 #define KMP_THREAD_LOCAL __declspec(thread)
385 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
386 #define KMP_THREAD_LOCAL __thread
387 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
390 #if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
391 #define KMP_WEAK_ATTRIBUTE_EXTERNAL __attribute__((weak))
393 #define KMP_WEAK_ATTRIBUTE_EXTERNAL
396 #if KMP_HAVE_WEAK_ATTRIBUTE
397 #define KMP_WEAK_ATTRIBUTE_INTERNAL __attribute__((weak))
399 #define KMP_WEAK_ATTRIBUTE_INTERNAL
404 #define KMP_STR(x) _KMP_STR(x)
405 #define _KMP_STR(x) #x
408 #ifdef KMP_USE_VERSION_SYMBOLS
411 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
412 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
413 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
414 _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
415 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
416 __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
417 __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
419 ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
420 api_name) "@" ver_str "\n\t"); \
421 __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
422 api_name) "@@" default_ver "\n\t")
424 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str) \
425 _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, "VERSION")
426 #define _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, \
428 __typeof__(__kmp_api_##apic_name) __kmp_api_##apic_name##_##ver_num##_alias \
429 __attribute__((alias(KMP_STR(__kmp_api_##apic_name)))); \
430 __asm__(".symver " KMP_STR(__kmp_api_##apic_name) "," KMP_STR( \
431 apic_name) "@@" default_ver "\n\t"); \
433 ".symver " KMP_STR(__kmp_api_##apic_name##_##ver_num##_alias) "," KMP_STR( \
434 api_name) "@" ver_str "\n\t")
436 #else // KMP_USE_VERSION_SYMBOLS
437 #define KMP_EXPAND_NAME(api_name) api_name
438 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str)
439 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, \
441 #endif // KMP_USE_VERSION_SYMBOLS
445 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
446 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
447 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
450 enum kmp_mem_fence_type {
459 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
461 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
462 #pragma intrinsic(InterlockedExchangeAdd)
463 #pragma intrinsic(InterlockedCompareExchange)
464 #pragma intrinsic(InterlockedExchange)
465 #if !(KMP_COMPILER_ICX && KMP_32_BIT_ARCH)
466 #pragma intrinsic(InterlockedExchange64)
472 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
473 #define KMP_TEST_THEN_INC_ACQ32(p) \
474 InterlockedExchangeAdd((volatile long *)(p), 1)
475 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
476 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
477 InterlockedExchangeAdd((volatile long *)(p), 4)
478 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
479 #define KMP_TEST_THEN_DEC_ACQ32(p) \
480 InterlockedExchangeAdd((volatile long *)(p), -1)
481 #define KMP_TEST_THEN_ADD32(p, v) \
482 InterlockedExchangeAdd((volatile long *)(p), (v))
484 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
485 InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
487 #define KMP_XCHG_FIXED32(p, v) \
488 InterlockedExchange((volatile long *)(p), (long)(v))
489 #define KMP_XCHG_FIXED64(p, v) \
490 InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
492 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v) {
493 kmp_int32 tmp = InterlockedExchange((
volatile long *)p, *(
long *)&v);
494 return *(kmp_real32 *)&tmp;
497 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
498 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
499 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
500 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
501 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
502 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
504 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v);
505 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v);
506 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v);
507 extern kmp_uint32 __kmp_test_then_or32(
volatile kmp_uint32 *p, kmp_uint32 v);
508 extern kmp_uint32 __kmp_test_then_and32(
volatile kmp_uint32 *p, kmp_uint32 v);
509 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v);
510 extern kmp_uint64 __kmp_test_then_or64(
volatile kmp_uint64 *p, kmp_uint64 v);
511 extern kmp_uint64 __kmp_test_then_and64(
volatile kmp_uint64 *p, kmp_uint64 v);
513 #if KMP_ARCH_AARCH64 && KMP_COMPILER_MSVC && !KMP_COMPILER_CLANG
514 #define KMP_TEST_THEN_INC64(p) _InterlockedExchangeAdd64((p), 1LL)
515 #define KMP_TEST_THEN_INC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 1LL)
516 #define KMP_TEST_THEN_ADD4_64(p) _InterlockedExchangeAdd64((p), 4LL)
521 #define KMP_TEST_THEN_ADD64(p, v) _InterlockedExchangeAdd64((p), (v))
523 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
524 __kmp_compare_and_store_acq8((p), (cv), (sv))
525 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
526 __kmp_compare_and_store_rel8((p), (cv), (sv))
527 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
528 __kmp_compare_and_store_acq16((p), (cv), (sv))
533 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
534 __kmp_compare_and_store_acq32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
536 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
537 __kmp_compare_and_store_rel32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
539 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
540 __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
542 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
543 __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
545 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
546 __kmp_compare_and_store_ptr((void *volatile *)(p), (void *)(cv), (void *)(sv))
553 inline kmp_int8 __kmp_compare_and_store_acq8(
volatile kmp_int8 *p, kmp_int8 cv,
555 return _InterlockedCompareExchange8_acq(p, sv, cv) == cv;
558 inline kmp_int8 __kmp_compare_and_store_rel8(
volatile kmp_int8 *p, kmp_int8 cv,
560 return _InterlockedCompareExchange8_rel(p, sv, cv) == cv;
563 inline kmp_int16 __kmp_compare_and_store_acq16(
volatile kmp_int16 *p,
564 kmp_int16 cv, kmp_int16 sv) {
565 return _InterlockedCompareExchange16_acq(p, sv, cv) == cv;
568 inline kmp_int16 __kmp_compare_and_store_rel16(
volatile kmp_int16 *p,
569 kmp_int16 cv, kmp_int16 sv) {
570 return _InterlockedCompareExchange16_rel(p, sv, cv) == cv;
573 inline kmp_int32 __kmp_compare_and_store_acq32(
volatile kmp_int32 *p,
574 kmp_int32 cv, kmp_int32 sv) {
575 return _InterlockedCompareExchange_acq((
volatile long *)p, sv, cv) == cv;
578 inline kmp_int32 __kmp_compare_and_store_rel32(
volatile kmp_int32 *p,
579 kmp_int32 cv, kmp_int32 sv) {
580 return _InterlockedCompareExchange_rel((
volatile long *)p, sv, cv) == cv;
583 inline kmp_int32 __kmp_compare_and_store_acq64(
volatile kmp_int64 *p,
584 kmp_int64 cv, kmp_int64 sv) {
585 return _InterlockedCompareExchange64_acq(p, sv, cv) == cv;
588 inline kmp_int32 __kmp_compare_and_store_rel64(
volatile kmp_int64 *p,
589 kmp_int64 cv, kmp_int64 sv) {
590 return _InterlockedCompareExchange64_rel(p, sv, cv) == cv;
593 inline kmp_int32 __kmp_compare_and_store_ptr(
void *
volatile *p,
void *cv,
595 return _InterlockedCompareExchangePointer(p, sv, cv) == cv;
605 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
606 _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv), \
621 #else // !KMP_ARCH_AARCH64
624 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v);
626 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv,
628 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv,
630 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv,
632 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv,
634 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv,
636 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p,
637 kmp_int16 cv, kmp_int16 sv);
638 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p,
639 kmp_int32 cv, kmp_int32 sv);
640 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p,
641 kmp_int64 cv, kmp_int64 sv);
643 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v);
644 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v);
645 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v);
646 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v);
647 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v);
648 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v);
652 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
653 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
656 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
657 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
660 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
661 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
663 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
664 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
667 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
668 __kmp_compare_and_store8((p), (cv), (sv))
669 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
670 __kmp_compare_and_store8((p), (cv), (sv))
671 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
672 __kmp_compare_and_store16((p), (cv), (sv))
673 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
674 __kmp_compare_and_store16((p), (cv), (sv))
675 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
676 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
678 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
679 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
681 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
682 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
684 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
685 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
689 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
690 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
693 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
694 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
698 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
699 __kmp_compare_and_store_ret8((p), (cv), (sv))
700 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
701 __kmp_compare_and_store_ret16((p), (cv), (sv))
702 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
703 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
706 #define KMP_XCHG_FIXED8(p, v) \
707 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
708 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
712 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
715 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
718 #define KMP_TEST_THEN_INC32(p) \
719 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
720 #define KMP_TEST_THEN_INC_ACQ32(p) \
721 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
723 #define KMP_TEST_THEN_INC64(p) \
724 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
725 #define KMP_TEST_THEN_INC_ACQ64(p) \
726 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
728 #define KMP_TEST_THEN_INC64(p) \
729 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
730 #define KMP_TEST_THEN_INC_ACQ64(p) \
731 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
733 #define KMP_TEST_THEN_ADD4_32(p) \
734 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
735 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
736 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
738 #define KMP_TEST_THEN_ADD4_64(p) \
739 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
740 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
741 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
742 #define KMP_TEST_THEN_DEC64(p) \
743 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
744 #define KMP_TEST_THEN_DEC_ACQ64(p) \
745 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
747 #define KMP_TEST_THEN_ADD4_64(p) \
748 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
749 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
750 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
751 #define KMP_TEST_THEN_DEC64(p) \
752 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
753 #define KMP_TEST_THEN_DEC_ACQ64(p) \
754 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
756 #define KMP_TEST_THEN_DEC32(p) \
757 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
758 #define KMP_TEST_THEN_DEC_ACQ32(p) \
759 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
760 #define KMP_TEST_THEN_ADD8(p, v) \
761 __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
762 #define KMP_TEST_THEN_ADD32(p, v) \
763 __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
765 #define KMP_TEST_THEN_ADD64(p, v) \
766 __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
769 #define KMP_TEST_THEN_ADD64(p, v) \
770 __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
773 #define KMP_TEST_THEN_OR8(p, v) \
774 __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
775 #define KMP_TEST_THEN_AND8(p, v) \
776 __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
777 #define KMP_TEST_THEN_OR32(p, v) \
778 __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
779 #define KMP_TEST_THEN_AND32(p, v) \
780 __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
782 #define KMP_TEST_THEN_OR64(p, v) \
783 __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
785 #define KMP_TEST_THEN_AND64(p, v) \
786 __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
789 #define KMP_TEST_THEN_OR64(p, v) \
790 __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
791 #define KMP_TEST_THEN_AND64(p, v) \
792 __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
795 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
796 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
798 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
799 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
801 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
802 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
804 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
805 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
807 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
808 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
810 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
811 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
813 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
814 __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
817 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
818 __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
820 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
821 __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
823 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
824 __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
827 static inline bool mips_sync_bool_compare_and_swap(
volatile kmp_uint64 *p,
830 return __atomic_compare_exchange(p, &cv, &sv,
false, __ATOMIC_SEQ_CST,
833 static inline bool mips_sync_val_compare_and_swap(
volatile kmp_uint64 *p,
836 __atomic_compare_exchange(p, &cv, &sv,
false, __ATOMIC_SEQ_CST,
840 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
841 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
842 (kmp_uint64)(cv), (kmp_uint64)(sv))
843 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
844 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), \
845 (kmp_uint64)(cv), (kmp_uint64)(sv))
846 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
847 mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
850 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
851 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
853 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
854 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
856 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
857 __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
861 #if KMP_OS_DARWIN && defined(__INTEL_COMPILER) && __INTEL_COMPILER >= 1800
862 #define KMP_XCHG_FIXED8(p, v) \
863 __atomic_exchange_1((volatile kmp_uint8 *)(p), (kmp_uint8)(v), \
866 #define KMP_XCHG_FIXED8(p, v) \
867 __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
869 #define KMP_XCHG_FIXED16(p, v) \
870 __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
871 #define KMP_XCHG_FIXED32(p, v) \
872 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
873 #define KMP_XCHG_FIXED64(p, v) \
874 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
876 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v) {
877 volatile kmp_uint32 *up;
879 memcpy(&up, &p,
sizeof(up));
880 memcpy(&uv, &v,
sizeof(uv));
881 kmp_int32 tmp = __sync_lock_test_and_set(up, uv);
883 memcpy(&ftmp, &tmp,
sizeof(tmp));
887 inline kmp_real64 KMP_XCHG_REAL64(
volatile kmp_real64 *p, kmp_real64 v) {
888 volatile kmp_uint64 *up;
890 memcpy(&up, &p,
sizeof(up));
891 memcpy(&uv, &v,
sizeof(uv));
892 kmp_int64 tmp = __sync_lock_test_and_set(up, uv);
894 memcpy(&dtmp, &tmp,
sizeof(tmp));
900 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v);
901 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v);
902 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v);
903 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v);
904 extern kmp_uint32 __kmp_test_then_or32(
volatile kmp_uint32 *p, kmp_uint32 v);
905 extern kmp_uint32 __kmp_test_then_and32(
volatile kmp_uint32 *p, kmp_uint32 v);
906 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v);
907 extern kmp_uint64 __kmp_test_then_or64(
volatile kmp_uint64 *p, kmp_uint64 v);
908 extern kmp_uint64 __kmp_test_then_and64(
volatile kmp_uint64 *p, kmp_uint64 v);
910 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv,
912 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv,
914 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv,
916 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv,
918 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv,
920 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p,
921 kmp_int16 cv, kmp_int16 sv);
922 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p,
923 kmp_int32 cv, kmp_int32 sv);
924 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p,
925 kmp_int64 cv, kmp_int64 sv);
927 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v);
928 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v);
929 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v);
930 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v);
931 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v);
932 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v);
934 #define KMP_TEST_THEN_INC32(p) \
935 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
936 #define KMP_TEST_THEN_INC_ACQ32(p) \
937 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
938 #define KMP_TEST_THEN_INC64(p) \
939 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
940 #define KMP_TEST_THEN_INC_ACQ64(p) \
941 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
942 #define KMP_TEST_THEN_ADD4_32(p) \
943 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
944 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
945 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
946 #define KMP_TEST_THEN_ADD4_64(p) \
947 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
948 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
949 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
950 #define KMP_TEST_THEN_DEC32(p) \
951 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
952 #define KMP_TEST_THEN_DEC_ACQ32(p) \
953 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
954 #define KMP_TEST_THEN_DEC64(p) \
955 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
956 #define KMP_TEST_THEN_DEC_ACQ64(p) \
957 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
958 #define KMP_TEST_THEN_ADD8(p, v) \
959 __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
960 #define KMP_TEST_THEN_ADD32(p, v) \
961 __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
962 #define KMP_TEST_THEN_ADD64(p, v) \
963 __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
965 #define KMP_TEST_THEN_OR8(p, v) \
966 __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
967 #define KMP_TEST_THEN_AND8(p, v) \
968 __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
969 #define KMP_TEST_THEN_OR32(p, v) \
970 __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
971 #define KMP_TEST_THEN_AND32(p, v) \
972 __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
973 #define KMP_TEST_THEN_OR64(p, v) \
974 __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
975 #define KMP_TEST_THEN_AND64(p, v) \
976 __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
978 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
979 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
981 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
982 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
984 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
985 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
987 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
988 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
990 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
991 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
993 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
994 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
996 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
997 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
999 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
1000 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1004 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
1005 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1008 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
1009 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1013 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
1014 __kmp_compare_and_store_ret8((p), (cv), (sv))
1015 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
1016 __kmp_compare_and_store_ret16((p), (cv), (sv))
1017 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
1018 __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
1020 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
1021 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
1024 #define KMP_XCHG_FIXED8(p, v) \
1025 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
1026 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
1027 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
1028 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
1029 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
1030 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
1038 #define KMP_MB() asm("nop")
1039 #define KMP_IMB() asm("nop")
1046 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
1047 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
1050 #define KMP_MB() std::atomic_thread_fence(std::memory_order_seq_cst)
1052 #define KMP_MB() __sync_synchronize()
1060 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
1061 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
1062 #define KMP_MFENCE_() _mm_mfence()
1063 #define KMP_SFENCE_() _mm_sfence()
1064 #elif KMP_COMPILER_MSVC
1065 #define KMP_MFENCE_() MemoryBarrier()
1066 #define KMP_SFENCE_() MemoryBarrier()
1068 #define KMP_MFENCE_() __sync_synchronize()
1069 #define KMP_SFENCE_() __sync_synchronize()
1071 #define KMP_MFENCE() \
1072 if (UNLIKELY(!__kmp_cpuinfo.initialized)) { \
1073 __kmp_query_cpuid(&__kmp_cpuinfo); \
1075 if (__kmp_cpuinfo.flags.sse2) { \
1078 #define KMP_SFENCE() KMP_SFENCE_()
1080 #define KMP_MFENCE() KMP_MB()
1081 #define KMP_SFENCE() KMP_MB()
1088 #ifndef KMP_ST_REL32
1089 #define KMP_ST_REL32(A, D) (*(A) = (D))
1092 #ifndef KMP_ST_REL64
1093 #define KMP_ST_REL64(A, D) (*(A) = (D))
1096 #ifndef KMP_LD_ACQ32
1097 #define KMP_LD_ACQ32(A) (*(A))
1100 #ifndef KMP_LD_ACQ64
1101 #define KMP_LD_ACQ64(A) (*(A))
1116 #define TCR_1(a) (a)
1117 #define TCW_1(a, b) (a) = (b)
1118 #define TCR_4(a) (a)
1119 #define TCW_4(a, b) (a) = (b)
1120 #define TCI_4(a) (++(a))
1121 #define TCD_4(a) (--(a))
1122 #define TCR_8(a) (a)
1123 #define TCW_8(a, b) (a) = (b)
1124 #define TCI_8(a) (++(a))
1125 #define TCD_8(a) (--(a))
1126 #define TCR_SYNC_4(a) (a)
1127 #define TCW_SYNC_4(a, b) (a) = (b)
1128 #define TCX_SYNC_4(a, b, c) \
1129 KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
1130 (kmp_int32)(b), (kmp_int32)(c))
1131 #define TCR_SYNC_8(a) (a)
1132 #define TCW_SYNC_8(a, b) (a) = (b)
1133 #define TCX_SYNC_8(a, b, c) \
1134 KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
1135 (kmp_int64)(b), (kmp_int64)(c))
1137 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
1139 #define TCR_PTR(a) ((void *)TCR_4(a))
1140 #define TCW_PTR(a, b) TCW_4((a), (b))
1141 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
1142 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
1143 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
1147 #define TCR_PTR(a) ((void *)TCR_8(a))
1148 #define TCW_PTR(a, b) TCW_8((a), (b))
1149 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
1150 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
1151 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
1159 #define FTN_TRUE TRUE
1163 #define FTN_FALSE FALSE
1166 typedef void (*microtask_t)(
int *gtid,
int *npr, ...);
1168 #ifdef USE_VOLATILE_CAST
1169 #define VOLATILE_CAST(x) (volatile x)
1171 #define VOLATILE_CAST(x) (x)
1174 #define KMP_WAIT __kmp_wait_4
1175 #define KMP_WAIT_PTR __kmp_wait_4_ptr
1176 #define KMP_EQ __kmp_eq_4
1177 #define KMP_NEQ __kmp_neq_4
1178 #define KMP_LT __kmp_lt_4
1179 #define KMP_GE __kmp_ge_4
1180 #define KMP_LE __kmp_le_4
1184 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
1185 #define STATIC_EFI2_WORKAROUND
1187 #define STATIC_EFI2_WORKAROUND static
1191 #ifndef KMP_USE_BGET
1192 #define KMP_USE_BGET 1
1196 #ifndef USE_CMPXCHG_FIX
1197 #define USE_CMPXCHG_FIX 1
1201 #define KMP_USE_DYNAMIC_LOCK 1
1205 #if KMP_USE_DYNAMIC_LOCK
1207 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
1208 #ifdef KMP_USE_ADAPTIVE_LOCKS
1209 #undef KMP_USE_ADAPTIVE_LOCKS
1211 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
1215 #if KMP_STATS_ENABLED
1216 #define KMP_HAVE_TICK_TIME \
1217 (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
1221 enum kmp_warnings_level {
1222 kmp_warnings_off = 0,
1224 kmp_warnings_explicit = 6,
1225 kmp_warnings_verbose
1230 #endif // __cplusplus
1233 #include "kmp_safe_c_api.h"
1236 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1237 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1240 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1241 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1242 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1243 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1246 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1247 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1248 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1249 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1250 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1251 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1252 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1253 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1256 template <
typename T>
1257 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
1258 return p->compare_exchange_strong(
1259 expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
1262 template <
typename T>
1263 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
1264 return p->compare_exchange_strong(
1265 expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
1268 template <
typename T>
1269 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
1270 return p->compare_exchange_strong(
1271 expected, desired, std::memory_order_release, std::memory_order_relaxed);
1276 extern void *__kmp_lookup_symbol(
const char *name);
1277 #define KMP_DLSYM(name) __kmp_lookup_symbol(name)
1278 #define KMP_DLSYM_NEXT(name) nullptr
1280 #define KMP_DLSYM(name) dlsym(RTLD_DEFAULT, name)
1281 #define KMP_DLSYM_NEXT(name) dlsym(RTLD_NEXT, name)