2 * kmp_os.h -- KPTS runtime header file.
5 //===----------------------------------------------------------------------===//
7 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
8 // See https://llvm.org/LICENSE.txt for license information.
9 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
11 //===----------------------------------------------------------------------===//
16 #include "kmp_config.h"
20 #define KMP_FTN_PLAIN 1
21 #define KMP_FTN_APPEND 2
22 #define KMP_FTN_UPPER 3
24 #define KMP_FTN_PREPEND 4
25 #define KMP_FTN_UAPPEND 5
28 #define KMP_PTR_SKIP (sizeof(void *))
30 /* -------------------------- Compiler variations ------------------------ */
35 #define KMP_MEM_CONS_VOLATILE 0
36 #define KMP_MEM_CONS_FENCE 1
38 #ifndef KMP_MEM_CONS_MODEL
39 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
42 #ifndef __has_cpp_attribute
43 #define __has_cpp_attribute(x) 0
46 #ifndef __has_attribute
47 #define __has_attribute(x) 0
50 /* ------------------------- Compiler recognition ---------------------- */
51 #define KMP_COMPILER_ICC 0
52 #define KMP_COMPILER_GCC 0
53 #define KMP_COMPILER_CLANG 0
54 #define KMP_COMPILER_MSVC 0
56 #if defined(__INTEL_COMPILER)
57 #undef KMP_COMPILER_ICC
58 #define KMP_COMPILER_ICC 1
59 #elif defined(__clang__)
60 #undef KMP_COMPILER_CLANG
61 #define KMP_COMPILER_CLANG 1
62 #elif defined(__GNUC__)
63 #undef KMP_COMPILER_GCC
64 #define KMP_COMPILER_GCC 1
65 #elif defined(_MSC_VER)
66 #undef KMP_COMPILER_MSVC
67 #define KMP_COMPILER_MSVC 1
69 #error Unknown compiler
72 #if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD) && !KMP_OS_CNK
73 #define KMP_AFFINITY_SUPPORTED 1
74 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
75 #define KMP_GROUP_AFFINITY 1
77 #define KMP_GROUP_AFFINITY 0
80 #define KMP_AFFINITY_SUPPORTED 0
81 #define KMP_GROUP_AFFINITY 0
84 /* Check for quad-precision extension. */
85 #define KMP_HAVE_QUAD 0
86 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
88 /* _Quad is already defined for icc */
90 #define KMP_HAVE_QUAD 1
91 #elif KMP_COMPILER_CLANG
92 /* Clang doesn't support a software-implemented
93 128-bit extended precision type yet */
94 typedef long double _Quad;
95 #elif KMP_COMPILER_GCC
96 /* GCC on NetBSD lacks __multc3/__divtc3 builtins needed for quad */
98 typedef __float128 _Quad;
100 #define KMP_HAVE_QUAD 1
102 #elif KMP_COMPILER_MSVC
103 typedef long double _Quad;
106 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
107 typedef long double _Quad;
109 #define KMP_HAVE_QUAD 1
111 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
113 #define KMP_USE_X87CONTROL 0
115 #define KMP_END_OF_LINE "\r\n"
116 typedef char kmp_int8;
117 typedef unsigned char kmp_uint8;
118 typedef short kmp_int16;
119 typedef unsigned short kmp_uint16;
120 typedef int kmp_int32;
121 typedef unsigned int kmp_uint32;
122 #define KMP_INT32_SPEC "d"
123 #define KMP_UINT32_SPEC "u"
125 typedef __int64 kmp_int64;
126 typedef unsigned __int64 kmp_uint64;
127 #define KMP_INT64_SPEC "I64d"
128 #define KMP_UINT64_SPEC "I64u"
130 struct kmp_struct64 {
133 typedef struct kmp_struct64 kmp_int64;
134 typedef struct kmp_struct64 kmp_uint64;
135 /* Not sure what to use for KMP_[U]INT64_SPEC here */
137 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
138 #undef KMP_USE_X87CONTROL
139 #define KMP_USE_X87CONTROL 1
143 typedef __int64 kmp_intptr_t;
144 typedef unsigned __int64 kmp_uintptr_t;
145 #define KMP_INTPTR_SPEC "I64d"
146 #define KMP_UINTPTR_SPEC "I64u"
148 #endif /* KMP_OS_WINDOWS */
151 #define KMP_END_OF_LINE "\n"
152 typedef char kmp_int8;
153 typedef unsigned char kmp_uint8;
154 typedef short kmp_int16;
155 typedef unsigned short kmp_uint16;
156 typedef int kmp_int32;
157 typedef unsigned int kmp_uint32;
158 typedef long long kmp_int64;
159 typedef unsigned long long kmp_uint64;
160 #define KMP_INT32_SPEC "d"
161 #define KMP_UINT32_SPEC "u"
162 #define KMP_INT64_SPEC "lld"
163 #define KMP_UINT64_SPEC "llu"
164 #endif /* KMP_OS_UNIX */
166 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
167 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
168 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || \
169 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
170 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
172 #error "Can't determine size_t printf format specifier."
176 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
178 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
181 typedef size_t kmp_size_t;
182 typedef float kmp_real32;
183 typedef double kmp_real64;
187 typedef long kmp_intptr_t;
188 typedef unsigned long kmp_uintptr_t;
189 #define KMP_INTPTR_SPEC "ld"
190 #define KMP_UINTPTR_SPEC "lu"
194 typedef kmp_int64 kmp_int;
195 typedef kmp_uint64 kmp_uint;
197 typedef kmp_int32 kmp_int;
198 typedef kmp_uint32 kmp_uint;
199 #endif /* BUILD_I8 */
200 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
201 #define KMP_INT_MIN ((kmp_int32)0x80000000)
204 // macros to cast out qualifiers and to re-interpret types
205 #define CCAST(type, var) const_cast<type>(var)
206 #define RCAST(type, var) reinterpret_cast<type>(var)
207 //-------------------------------------------------------------------------
208 // template for debug prints specification ( d, u, lld, llu ), and to obtain
209 // signed/unsigned flavors of a type
210 template <typename T> struct traits_t {};
212 template <> struct traits_t<signed int> {
213 typedef signed int signed_t;
214 typedef unsigned int unsigned_t;
215 typedef double floating_t;
216 static char const *spec;
217 static const signed_t max_value = 0x7fffffff;
218 static const signed_t min_value = 0x80000000;
219 static const int type_size = sizeof(signed_t);
222 template <> struct traits_t<unsigned int> {
223 typedef signed int signed_t;
224 typedef unsigned int unsigned_t;
225 typedef double floating_t;
226 static char const *spec;
227 static const unsigned_t max_value = 0xffffffff;
228 static const unsigned_t min_value = 0x00000000;
229 static const int type_size = sizeof(unsigned_t);
232 template <> struct traits_t<signed long> {
233 typedef signed long signed_t;
234 typedef unsigned long unsigned_t;
235 typedef long double floating_t;
236 static char const *spec;
237 static const int type_size = sizeof(signed_t);
240 template <> struct traits_t<signed long long> {
241 typedef signed long long signed_t;
242 typedef unsigned long long unsigned_t;
243 typedef long double floating_t;
244 static char const *spec;
245 static const signed_t max_value = 0x7fffffffffffffffLL;
246 static const signed_t min_value = 0x8000000000000000LL;
247 static const int type_size = sizeof(signed_t);
249 // unsigned long long
250 template <> struct traits_t<unsigned long long> {
251 typedef signed long long signed_t;
252 typedef unsigned long long unsigned_t;
253 typedef long double floating_t;
254 static char const *spec;
255 static const unsigned_t max_value = 0xffffffffffffffffLL;
256 static const unsigned_t min_value = 0x0000000000000000LL;
257 static const int type_size = sizeof(unsigned_t);
259 //-------------------------------------------------------------------------
261 #define CCAST(type, var) (type)(var)
262 #define RCAST(type, var) (type)(var)
263 #endif // __cplusplus
265 #define KMP_EXPORT extern /* export declaration in guide libraries */
267 #if __GNUC__ >= 4 && !defined(__MINGW32__)
268 #define __forceinline __inline
274 static inline int KMP_GET_PAGE_SIZE(void) {
277 return si.dwPageSize;
280 #define KMP_GET_PAGE_SIZE() getpagesize()
283 #define PAGE_ALIGNED(_addr) \
284 (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
285 #define ALIGN_TO_PAGE(x) \
286 (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
288 /* ---------- Support for cache alignment, padding, etc. ----------------*/
292 #endif // __cplusplus
294 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
296 /* Define the default size of the cache line */
298 #define CACHE_LINE 128 /* cache line size in bytes */
300 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
301 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
302 #warning CACHE_LINE is too small.
304 #endif /* CACHE_LINE */
306 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
308 // Define attribute that indicates that the fall through from the previous
309 // case label is intentional and should not be diagnosed by a compiler
310 // Code from libcxx/include/__config
311 // Use a function like macro to imply that it must be followed by a semicolon
312 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
313 # define KMP_FALLTHROUGH() [[fallthrough]]
314 #elif __has_cpp_attribute(clang::fallthrough)
315 # define KMP_FALLTHROUGH() [[clang::fallthrough]]
316 #elif __has_attribute(fallthrough) || __GNUC__ >= 7
317 # define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
319 # define KMP_FALLTHROUGH() ((void)0)
322 // Define attribute that indicates a function does not return
323 #if __cplusplus >= 201103L
324 #define KMP_NORETURN [[noreturn]]
326 #define KMP_NORETURN __declspec(noreturn)
328 #define KMP_NORETURN __attribute__((noreturn))
331 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
332 #define KMP_ALIGN(bytes) __declspec(align(bytes))
333 #define KMP_THREAD_LOCAL __declspec(thread)
334 #define KMP_ALIAS /* Nothing */
336 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
337 #define KMP_THREAD_LOCAL __thread
338 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
341 #if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
342 #define KMP_WEAK_ATTRIBUTE_EXTERNAL __attribute__((weak))
344 #define KMP_WEAK_ATTRIBUTE_EXTERNAL /* Nothing */
347 #if KMP_HAVE_WEAK_ATTRIBUTE
348 #define KMP_WEAK_ATTRIBUTE_INTERNAL __attribute__((weak))
350 #define KMP_WEAK_ATTRIBUTE_INTERNAL /* Nothing */
353 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
355 #define KMP_STR(x) _KMP_STR(x)
356 #define _KMP_STR(x) #x
359 #ifdef KMP_USE_VERSION_SYMBOLS
360 // If using versioned symbols, KMP_EXPAND_NAME prepends
361 // __kmp_api_ to the real API name
362 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
363 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
364 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \
365 _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
366 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \
367 __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \
368 __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \
370 ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
371 api_name) "@" ver_str "\n\t"); \
372 __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \
373 api_name) "@@" default_ver "\n\t")
374 #else // KMP_USE_VERSION_SYMBOLS
375 #define KMP_EXPAND_NAME(api_name) api_name
376 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
377 #endif // KMP_USE_VERSION_SYMBOLS
379 /* Temporary note: if performance testing of this passes, we can remove
380 all references to KMP_DO_ALIGN and replace with KMP_ALIGN. */
381 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
382 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
383 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
385 /* General purpose fence types for memory operations */
386 enum kmp_mem_fence_type {
387 kmp_no_fence, /* No memory fence */
388 kmp_acquire_fence, /* Acquire (read) memory fence */
389 kmp_release_fence, /* Release (write) memory fence */
390 kmp_full_fence /* Full (read+write) memory fence */
393 // Synchronization primitives
395 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
397 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
398 #pragma intrinsic(InterlockedExchangeAdd)
399 #pragma intrinsic(InterlockedCompareExchange)
400 #pragma intrinsic(InterlockedExchange)
401 #pragma intrinsic(InterlockedExchange64)
404 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
405 // ordering problem, so we use InterlockedExchangeAdd instead.
406 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
407 #define KMP_TEST_THEN_INC_ACQ32(p) \
408 InterlockedExchangeAdd((volatile long *)(p), 1)
409 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
410 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
411 InterlockedExchangeAdd((volatile long *)(p), 4)
412 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
413 #define KMP_TEST_THEN_DEC_ACQ32(p) \
414 InterlockedExchangeAdd((volatile long *)(p), -1)
415 #define KMP_TEST_THEN_ADD32(p, v) \
416 InterlockedExchangeAdd((volatile long *)(p), (v))
418 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
419 InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
421 #define KMP_XCHG_FIXED32(p, v) \
422 InterlockedExchange((volatile long *)(p), (long)(v))
423 #define KMP_XCHG_FIXED64(p, v) \
424 InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
426 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
427 kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
428 return *(kmp_real32 *)&tmp;
431 // Routines that we still need to implement in assembly.
432 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
433 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
434 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
435 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
436 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
437 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
438 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
439 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
440 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
442 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
444 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
446 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
448 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
450 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
452 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
453 kmp_int16 cv, kmp_int16 sv);
454 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
455 kmp_int32 cv, kmp_int32 sv);
456 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
457 kmp_int64 cv, kmp_int64 sv);
459 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
460 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
461 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
462 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
463 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
464 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
466 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
467 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
468 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
469 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
470 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
471 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
472 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
473 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
474 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
475 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
476 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
477 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
478 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
479 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
480 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
482 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
483 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
484 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
485 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
486 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
487 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
489 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
490 __kmp_compare_and_store8((p), (cv), (sv))
491 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
492 __kmp_compare_and_store8((p), (cv), (sv))
493 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
494 __kmp_compare_and_store16((p), (cv), (sv))
495 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
496 __kmp_compare_and_store16((p), (cv), (sv))
497 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
498 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
500 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
501 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
503 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
504 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
506 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
507 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
511 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
512 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
514 #else /* 64 bit pointers */
515 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
516 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
518 #endif /* KMP_ARCH_X86 */
520 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
521 __kmp_compare_and_store_ret8((p), (cv), (sv))
522 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
523 __kmp_compare_and_store_ret16((p), (cv), (sv))
524 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
525 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
528 #define KMP_XCHG_FIXED8(p, v) \
529 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
530 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
531 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
532 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
533 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
534 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
536 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
538 /* cast p to correct type so that proper intrinsic will be used */
539 #define KMP_TEST_THEN_INC32(p) \
540 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
541 #define KMP_TEST_THEN_INC_ACQ32(p) \
542 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
544 #define KMP_TEST_THEN_INC64(p) \
545 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
546 #define KMP_TEST_THEN_INC_ACQ64(p) \
547 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
549 #define KMP_TEST_THEN_INC64(p) \
550 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
551 #define KMP_TEST_THEN_INC_ACQ64(p) \
552 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
554 #define KMP_TEST_THEN_ADD4_32(p) \
555 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
556 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
557 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
559 #define KMP_TEST_THEN_ADD4_64(p) \
560 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
561 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
562 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
563 #define KMP_TEST_THEN_DEC64(p) \
564 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
565 #define KMP_TEST_THEN_DEC_ACQ64(p) \
566 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
568 #define KMP_TEST_THEN_ADD4_64(p) \
569 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
570 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
571 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
572 #define KMP_TEST_THEN_DEC64(p) \
573 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
574 #define KMP_TEST_THEN_DEC_ACQ64(p) \
575 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
577 #define KMP_TEST_THEN_DEC32(p) \
578 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
579 #define KMP_TEST_THEN_DEC_ACQ32(p) \
580 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
581 #define KMP_TEST_THEN_ADD8(p, v) \
582 __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
583 #define KMP_TEST_THEN_ADD32(p, v) \
584 __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
586 #define KMP_TEST_THEN_ADD64(p, v) \
587 __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
590 #define KMP_TEST_THEN_ADD64(p, v) \
591 __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
594 #define KMP_TEST_THEN_OR8(p, v) \
595 __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
596 #define KMP_TEST_THEN_AND8(p, v) \
597 __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
598 #define KMP_TEST_THEN_OR32(p, v) \
599 __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
600 #define KMP_TEST_THEN_AND32(p, v) \
601 __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
603 #define KMP_TEST_THEN_OR64(p, v) \
604 __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
606 #define KMP_TEST_THEN_AND64(p, v) \
607 __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \
610 #define KMP_TEST_THEN_OR64(p, v) \
611 __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
612 #define KMP_TEST_THEN_AND64(p, v) \
613 __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
616 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
617 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
619 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
620 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
622 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
623 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
625 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
626 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
628 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
629 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
631 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
632 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
634 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
635 __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \
638 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
639 __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \
641 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
642 __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \
644 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
645 __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \
648 static inline bool mips_sync_bool_compare_and_swap(
649 volatile kmp_uint64 *p, kmp_uint64 cv, kmp_uint64 sv) {
650 return __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
653 static inline bool mips_sync_val_compare_and_swap(
654 volatile kmp_uint64 *p, kmp_uint64 cv, kmp_uint64 sv) {
655 __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
659 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
660 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),\
662 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
663 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),\
665 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
666 mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
669 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
670 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
672 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
673 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
675 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
676 __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
680 #define KMP_XCHG_FIXED8(p, v) \
681 __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
682 #define KMP_XCHG_FIXED16(p, v) \
683 __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
684 #define KMP_XCHG_FIXED32(p, v) \
685 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
686 #define KMP_XCHG_FIXED64(p, v) \
687 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
689 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
691 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
692 return *(kmp_real32 *)&tmp;
695 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
697 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
698 return *(kmp_real64 *)&tmp;
703 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
704 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
705 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
706 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
707 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
708 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
709 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
710 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
711 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
713 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
715 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
717 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
719 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
721 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
723 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
724 kmp_int16 cv, kmp_int16 sv);
725 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
726 kmp_int32 cv, kmp_int32 sv);
727 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
728 kmp_int64 cv, kmp_int64 sv);
730 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
731 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
732 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
733 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
734 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
735 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
737 #define KMP_TEST_THEN_INC32(p) \
738 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
739 #define KMP_TEST_THEN_INC_ACQ32(p) \
740 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
741 #define KMP_TEST_THEN_INC64(p) \
742 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
743 #define KMP_TEST_THEN_INC_ACQ64(p) \
744 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
745 #define KMP_TEST_THEN_ADD4_32(p) \
746 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
747 #define KMP_TEST_THEN_ADD4_ACQ32(p) \
748 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
749 #define KMP_TEST_THEN_ADD4_64(p) \
750 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
751 #define KMP_TEST_THEN_ADD4_ACQ64(p) \
752 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
753 #define KMP_TEST_THEN_DEC32(p) \
754 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
755 #define KMP_TEST_THEN_DEC_ACQ32(p) \
756 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
757 #define KMP_TEST_THEN_DEC64(p) \
758 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
759 #define KMP_TEST_THEN_DEC_ACQ64(p) \
760 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
761 #define KMP_TEST_THEN_ADD8(p, v) \
762 __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
763 #define KMP_TEST_THEN_ADD32(p, v) \
764 __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
765 #define KMP_TEST_THEN_ADD64(p, v) \
766 __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
768 #define KMP_TEST_THEN_OR8(p, v) \
769 __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
770 #define KMP_TEST_THEN_AND8(p, v) \
771 __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
772 #define KMP_TEST_THEN_OR32(p, v) \
773 __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
774 #define KMP_TEST_THEN_AND32(p, v) \
775 __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
776 #define KMP_TEST_THEN_OR64(p, v) \
777 __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
778 #define KMP_TEST_THEN_AND64(p, v) \
779 __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
781 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \
782 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
784 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \
785 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \
787 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \
788 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
790 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \
791 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \
793 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \
794 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
796 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \
797 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
799 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \
800 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
802 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \
803 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
807 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
808 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
810 #else /* 64 bit pointers */
811 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \
812 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
814 #endif /* KMP_ARCH_X86 */
816 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \
817 __kmp_compare_and_store_ret8((p), (cv), (sv))
818 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \
819 __kmp_compare_and_store_ret16((p), (cv), (sv))
820 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \
821 __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \
823 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \
824 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \
827 #define KMP_XCHG_FIXED8(p, v) \
828 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
829 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
830 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
831 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
832 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
833 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
835 #endif /* KMP_ASM_INTRINS */
837 /* ------------- relaxed consistency memory model stuff ------------------ */
841 #define KMP_MB() asm("nop")
842 #define KMP_IMB() asm("nop")
844 #define KMP_MB() /* _asm{ nop } */
845 #define KMP_IMB() /* _asm{ nop } */
847 #endif /* KMP_OS_WINDOWS */
849 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \
850 KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
851 #define KMP_MB() __sync_synchronize()
855 #define KMP_MB() /* nothing to do */
859 #define KMP_IMB() /* nothing to do */
863 #define KMP_ST_REL32(A, D) (*(A) = (D))
867 #define KMP_ST_REL64(A, D) (*(A) = (D))
871 #define KMP_LD_ACQ32(A) (*(A))
875 #define KMP_LD_ACQ64(A) (*(A))
878 /* ------------------------------------------------------------------------ */
879 // FIXME - maybe this should this be
881 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a))
882 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b))
884 // #define TCR_8(a) (*(volatile kmp_int64 *)(a))
885 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b))
887 // I'm fairly certain this is the correct thing to do, but I'm afraid
888 // of performance regressions.
891 #define TCW_1(a, b) (a) = (b)
893 #define TCW_4(a, b) (a) = (b)
894 #define TCI_4(a) (++(a))
895 #define TCD_4(a) (--(a))
897 #define TCW_8(a, b) (a) = (b)
898 #define TCI_8(a) (++(a))
899 #define TCD_8(a) (--(a))
900 #define TCR_SYNC_4(a) (a)
901 #define TCW_SYNC_4(a, b) (a) = (b)
902 #define TCX_SYNC_4(a, b, c) \
903 KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \
904 (kmp_int32)(b), (kmp_int32)(c))
905 #define TCR_SYNC_8(a) (a)
906 #define TCW_SYNC_8(a, b) (a) = (b)
907 #define TCX_SYNC_8(a, b, c) \
908 KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \
909 (kmp_int64)(b), (kmp_int64)(c))
911 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
913 #define TCR_PTR(a) ((void *)TCR_4(a))
914 #define TCW_PTR(a, b) TCW_4((a), (b))
915 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
916 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
917 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
919 #else /* 64 bit pointers */
921 #define TCR_PTR(a) ((void *)TCR_8(a))
922 #define TCW_PTR(a, b) TCW_8((a), (b))
923 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
924 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
925 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
927 #endif /* KMP_ARCH_X86 */
929 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
930 where they are used to check that language is Fortran, not C. */
933 #define FTN_TRUE TRUE
937 #define FTN_FALSE FALSE
940 typedef void (*microtask_t)(int *gtid, int *npr, ...);
942 #ifdef USE_VOLATILE_CAST
943 #define VOLATILE_CAST(x) (volatile x)
945 #define VOLATILE_CAST(x) (x)
948 #define KMP_WAIT __kmp_wait_4
949 #define KMP_WAIT_PTR __kmp_wait_4_ptr
950 #define KMP_EQ __kmp_eq_4
951 #define KMP_NEQ __kmp_neq_4
952 #define KMP_LT __kmp_lt_4
953 #define KMP_GE __kmp_ge_4
954 #define KMP_LE __kmp_le_4
956 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
957 * (Intel(R) 64 Tracker #138) */
958 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
959 #define STATIC_EFI2_WORKAROUND
961 #define STATIC_EFI2_WORKAROUND static
964 // Support of BGET usage
966 #define KMP_USE_BGET 1
969 // Switches for OSS builds
970 #ifndef USE_CMPXCHG_FIX
971 #define USE_CMPXCHG_FIX 1
974 // Enable dynamic user lock
975 #define KMP_USE_DYNAMIC_LOCK 1
977 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
978 // dynamic user lock is turned on
979 #if KMP_USE_DYNAMIC_LOCK
980 // Visual studio can't handle the asm sections in this code
981 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
982 #ifdef KMP_USE_ADAPTIVE_LOCKS
983 #undef KMP_USE_ADAPTIVE_LOCKS
985 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
988 // Enable tick time conversion of ticks to seconds
989 #if KMP_STATS_ENABLED
990 #define KMP_HAVE_TICK_TIME \
991 (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
995 enum kmp_warnings_level {
996 kmp_warnings_off = 0, /* No warnings */
997 kmp_warnings_low, /* Minimal warnings (default) */
998 kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
999 kmp_warnings_verbose /* reserved */
1004 #endif // __cplusplus
1006 // Macros for C++11 atomic functions
1007 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1008 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1010 // For non-default load/store
1011 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1012 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1013 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1014 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1016 // For non-default fetch_<op>
1017 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1018 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1019 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1020 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1021 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1022 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1023 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1024 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1026 // Callers of the following functions cannot see the side effect on "expected".
1027 template <typename T>
1028 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
1029 return p->compare_exchange_strong(
1030 expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
1033 template <typename T>
1034 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
1035 return p->compare_exchange_strong(
1036 expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
1039 template <typename T>
1040 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
1041 return p->compare_exchange_strong(
1042 expected, desired, std::memory_order_release, std::memory_order_relaxed);
1045 #endif /* KMP_OS_H */
1047 #include "kmp_safe_c_api.h"