19 #include "kmp_config.h" 22 #define KMP_FTN_PLAIN 1 23 #define KMP_FTN_APPEND 2 24 #define KMP_FTN_UPPER 3 30 #define KMP_PTR_SKIP (sizeof(void*)) 37 #define KMP_MEM_CONS_VOLATILE 0 38 #define KMP_MEM_CONS_FENCE 1 40 #ifndef KMP_MEM_CONS_MODEL 41 # define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE 45 #define KMP_COMPILER_ICC 0 46 #define KMP_COMPILER_GCC 0 47 #define KMP_COMPILER_CLANG 0 48 #define KMP_COMPILER_MSVC 0 50 #if defined( __INTEL_COMPILER ) 51 # undef KMP_COMPILER_ICC 52 # define KMP_COMPILER_ICC 1 53 #elif defined( __clang__ ) 54 # undef KMP_COMPILER_CLANG 55 # define KMP_COMPILER_CLANG 1 56 #elif defined( __GNUC__ ) 57 # undef KMP_COMPILER_GCC 58 # define KMP_COMPILER_GCC 1 59 #elif defined( _MSC_VER ) 60 # undef KMP_COMPILER_MSVC 61 # define KMP_COMPILER_MSVC 1 63 # error Unknown compiler 66 #if (KMP_OS_LINUX || KMP_OS_WINDOWS) && !KMP_OS_CNK && !KMP_ARCH_PPC64 67 # define KMP_AFFINITY_SUPPORTED 1 68 # if KMP_OS_WINDOWS && KMP_ARCH_X86_64 69 # define KMP_GROUP_AFFINITY 1 71 # define KMP_GROUP_AFFINITY 0 74 # define KMP_AFFINITY_SUPPORTED 0 75 # define KMP_GROUP_AFFINITY 0 79 #define KMP_HAVE_QUAD 0 80 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 84 # define KMP_HAVE_QUAD 1 85 # elif KMP_COMPILER_CLANG 88 typedef long double _Quad;
89 # elif KMP_COMPILER_GCC 90 typedef __float128 _Quad;
92 # define KMP_HAVE_QUAD 1 93 # elif KMP_COMPILER_MSVC 94 typedef long double _Quad;
97 # if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC 98 typedef long double _Quad;
100 # define KMP_HAVE_QUAD 1 105 typedef char kmp_int8;
106 typedef unsigned char kmp_uint8;
107 typedef short kmp_int16;
108 typedef unsigned short kmp_uint16;
109 typedef int kmp_int32;
110 typedef unsigned int kmp_uint32;
111 # define KMP_INT32_SPEC "d" 112 # define KMP_UINT32_SPEC "u" 113 # ifndef KMP_STRUCT64 114 typedef __int64 kmp_int64;
115 typedef unsigned __int64 kmp_uint64;
116 #define KMP_INT64_SPEC "I64d" 117 #define KMP_UINT64_SPEC "I64u" 119 struct kmp_struct64 {
122 typedef struct kmp_struct64 kmp_int64;
123 typedef struct kmp_struct64 kmp_uint64;
127 # define KMP_INTPTR 1 128 typedef __int64 kmp_intptr_t;
129 typedef unsigned __int64 kmp_uintptr_t;
130 # define KMP_INTPTR_SPEC "I64d" 131 # define KMP_UINTPTR_SPEC "I64u" 136 typedef char kmp_int8;
137 typedef unsigned char kmp_uint8;
138 typedef short kmp_int16;
139 typedef unsigned short kmp_uint16;
140 typedef int kmp_int32;
141 typedef unsigned int kmp_uint32;
142 typedef long long kmp_int64;
143 typedef unsigned long long kmp_uint64;
144 # define KMP_INT32_SPEC "d" 145 # define KMP_UINT32_SPEC "u" 146 # define KMP_INT64_SPEC "lld" 147 # define KMP_UINT64_SPEC "llu" 150 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS 151 # define KMP_SIZE_T_SPEC KMP_UINT32_SPEC 152 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS64 153 # define KMP_SIZE_T_SPEC KMP_UINT64_SPEC 155 # error "Can't determine size_t printf format specifier." 159 # define KMP_SIZE_T_MAX (0xFFFFFFFF) 161 # define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF) 164 typedef size_t kmp_size_t;
165 typedef float kmp_real32;
166 typedef double kmp_real64;
169 # define KMP_INTPTR 1 170 typedef long kmp_intptr_t;
171 typedef unsigned long kmp_uintptr_t;
172 # define KMP_INTPTR_SPEC "ld" 173 # define KMP_UINTPTR_SPEC "lu" 177 typedef kmp_int64 kmp_int;
178 typedef kmp_uint64 kmp_uint;
180 typedef kmp_int32 kmp_int;
181 typedef kmp_uint32 kmp_uint;
183 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF) 184 #define KMP_INT_MIN ((kmp_int32)0x80000000) 190 template<
typename T >
193 typedef T unsigned_t;
194 typedef T floating_t;
195 static char const * spec;
199 struct traits_t< signed int > {
200 typedef signed int signed_t;
201 typedef unsigned int unsigned_t;
202 typedef double floating_t;
203 static char const * spec;
207 struct traits_t< unsigned int > {
208 typedef signed int signed_t;
209 typedef unsigned int unsigned_t;
210 typedef double floating_t;
211 static char const * spec;
215 struct traits_t< signed long long > {
216 typedef signed long long signed_t;
217 typedef unsigned long long unsigned_t;
218 typedef long double floating_t;
219 static char const * spec;
223 struct traits_t< unsigned long long > {
224 typedef signed long long signed_t;
225 typedef unsigned long long unsigned_t;
226 typedef long double floating_t;
227 static char const * spec;
230 #endif // __cplusplus 232 #define KMP_EXPORT extern 235 #define __forceinline __inline 238 #define PAGE_SIZE (0x4000) 241 #define KMP_GET_PAGE_SIZE() getpagesize() 245 #define KMP_GET_PAGE_SIZE() PAGE_SIZE 248 #define PAGE_ALIGNED(_addr) ( ! ((size_t) _addr & \ 249 (size_t)(KMP_GET_PAGE_SIZE() - 1))) 250 #define ALIGN_TO_PAGE(x) (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1))) 256 #endif // __cplusplus 258 #define INTERNODE_CACHE_LINE 4096 262 #define CACHE_LINE 128 264 #if ( CACHE_LINE < 64 ) && ! defined( KMP_OS_DARWIN ) 266 #warning CACHE_LINE is too small. 270 #define KMP_CACHE_PREFETCH(ADDR) 274 #if KMP_OS_UNIX && defined(__GNUC__) 275 # define KMP_DO_ALIGN(bytes) __attribute__((aligned(bytes))) 276 # define KMP_ALIGN_CACHE __attribute__((aligned(CACHE_LINE))) 277 # define KMP_ALIGN_CACHE_INTERNODE __attribute__((aligned(INTERNODE_CACHE_LINE))) 278 # define KMP_ALIGN(bytes) __attribute__((aligned(bytes))) 280 # define KMP_DO_ALIGN(bytes) __declspec( align(bytes) ) 281 # define KMP_ALIGN_CACHE __declspec( align(CACHE_LINE) ) 282 # define KMP_ALIGN_CACHE_INTERNODE __declspec( align(INTERNODE_CACHE_LINE) ) 283 # define KMP_ALIGN(bytes) __declspec( align(bytes) ) 287 enum kmp_mem_fence_type {
299 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS 303 #pragma intrinsic(InterlockedExchangeAdd) 304 #pragma intrinsic(InterlockedCompareExchange) 305 #pragma intrinsic(InterlockedExchange) 306 #pragma intrinsic(InterlockedExchange64) 312 # define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd( (volatile long *)(p), 1 ) 313 # define KMP_TEST_THEN_INC_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), 1 ) 314 # define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd( (volatile long *)(p), 4 ) 315 # define KMP_TEST_THEN_ADD4_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), 4 ) 316 # define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd( (volatile long *)(p), -1 ) 317 # define KMP_TEST_THEN_DEC_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), -1 ) 318 # define KMP_TEST_THEN_ADD32(p, v) InterlockedExchangeAdd( (volatile long *)(p), (v) ) 320 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v );
321 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v );
322 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v );
323 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) InterlockedCompareExchange( (volatile long *)(p),(long)(sv),(long)(cv) ) 325 # define KMP_XCHG_FIXED32(p, v) InterlockedExchange( (volatile long *)(p), (long)(v) ) 326 # define KMP_XCHG_FIXED64(p, v) InterlockedExchange64( (volatile kmp_int64 *)(p), (kmp_int64)(v) ) 328 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v)
330 kmp_int32 tmp = InterlockedExchange( (
volatile long *)p, *(
long *)&v);
331 return *(kmp_real32*)&tmp;
337 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v );
338 extern kmp_int32 __kmp_test_then_or32(
volatile kmp_int32 *p, kmp_int32 v );
339 extern kmp_int32 __kmp_test_then_and32(
volatile kmp_int32 *p, kmp_int32 v );
340 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v );
341 extern kmp_int64 __kmp_test_then_or64(
volatile kmp_int64 *p, kmp_int64 v );
342 extern kmp_int64 __kmp_test_then_and64(
volatile kmp_int64 *p, kmp_int64 v );
344 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
345 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
346 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
347 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
348 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
349 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
350 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
351 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
353 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v );
354 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v );
355 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v );
356 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v );
357 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v );
358 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v );
359 # define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8( (p), (v) ) 362 # define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8( (p), (v) ) 363 # define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8( (p), (v) ) 365 # define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64( (p), 1LL ) 366 # define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64( (p), 1LL ) 369 # define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64( (p), 4LL ) 370 # define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64( (p), 4LL ) 373 # define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64( (p), -1LL ) 374 # define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64( (p), -1LL ) 376 # define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64( (p), (v) ) 378 # define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32( (p), (v) ) 379 # define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32( (p), (v) ) 380 # define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64( (p), (v) ) 381 # define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64( (p), (v) ) 383 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) ) 384 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) ) 385 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) ) 386 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) ) 387 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) ) 388 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) ) 389 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) ) 390 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) ) 393 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store32( (volatile kmp_int32*)(p), (kmp_int32)(cv), (kmp_int32)(sv) ) 395 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store64( (volatile kmp_int64*)(p), (kmp_int64)(cv), (kmp_int64)(sv) ) 398 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __kmp_compare_and_store_ret8( (p), (cv), (sv) ) 399 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __kmp_compare_and_store_ret16( (p), (cv), (sv) ) 401 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __kmp_compare_and_store_ret64( (p), (cv), (sv) ) 403 # define KMP_XCHG_FIXED8(p, v) __kmp_xchg_fixed8( (volatile kmp_int8*)(p), (kmp_int8)(v) ); 404 # define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16( (p), (v) ); 408 # define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64( (p), (v) ); 411 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64) 412 # define KMP_TEST_THEN_ADD8(p, v) __sync_fetch_and_add( (kmp_int8 *)(p), (v) ) 415 # define KMP_TEST_THEN_INC32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 ) 416 # define KMP_TEST_THEN_OR8(p, v) __sync_fetch_and_or( (kmp_int8 *)(p), (v) ) 417 # define KMP_TEST_THEN_AND8(p, v) __sync_fetch_and_and( (kmp_int8 *)(p), (v) ) 418 # define KMP_TEST_THEN_INC_ACQ32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 ) 419 # define KMP_TEST_THEN_INC64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL ) 420 # define KMP_TEST_THEN_INC_ACQ64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL ) 421 # define KMP_TEST_THEN_ADD4_32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 4 ) 422 # define KMP_TEST_THEN_ADD4_ACQ32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 4 ) 423 # define KMP_TEST_THEN_ADD4_64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 4LL ) 424 # define KMP_TEST_THEN_ADD4_ACQ64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 4LL ) 425 # define KMP_TEST_THEN_DEC32(p) __sync_fetch_and_sub( (kmp_int32 *)(p), 1 ) 426 # define KMP_TEST_THEN_DEC_ACQ32(p) __sync_fetch_and_sub( (kmp_int32 *)(p), 1 ) 427 # define KMP_TEST_THEN_DEC64(p) __sync_fetch_and_sub( (kmp_int64 *)(p), 1LL ) 428 # define KMP_TEST_THEN_DEC_ACQ64(p) __sync_fetch_and_sub( (kmp_int64 *)(p), 1LL ) 429 # define KMP_TEST_THEN_ADD32(p, v) __sync_fetch_and_add( (kmp_int32 *)(p), (v) ) 430 # define KMP_TEST_THEN_ADD64(p, v) __sync_fetch_and_add( (kmp_int64 *)(p), (v) ) 432 # define KMP_TEST_THEN_OR32(p, v) __sync_fetch_and_or( (kmp_int32 *)(p), (v) ) 433 # define KMP_TEST_THEN_AND32(p, v) __sync_fetch_and_and( (kmp_int32 *)(p), (v) ) 434 # define KMP_TEST_THEN_OR64(p, v) __sync_fetch_and_or( (kmp_int64 *)(p), (v) ) 435 # define KMP_TEST_THEN_AND64(p, v) __sync_fetch_and_and( (kmp_int64 *)(p), (v) ) 437 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) ) 438 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) ) 439 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) ) 440 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) ) 441 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) ) 442 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) ) 443 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) ) 444 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __sync_bool_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) ) 445 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __sync_bool_compare_and_swap( (volatile void **)(p),(void *)(cv),(void *)(sv) ) 447 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint8 *)(p),(kmp_uint8)(cv),(kmp_uint8)(sv) ) 448 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint16 *)(p),(kmp_uint16)(cv),(kmp_uint16)(sv) ) 449 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint32 *)(p),(kmp_uint32)(cv),(kmp_uint32)(sv) ) 450 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __sync_val_compare_and_swap( (volatile kmp_uint64 *)(p),(kmp_uint64)(cv),(kmp_uint64)(sv) ) 452 #define KMP_XCHG_FIXED8(p, v) __sync_lock_test_and_set( (volatile kmp_uint8 *)(p), (kmp_uint8)(v) ) 453 #define KMP_XCHG_FIXED16(p, v) __sync_lock_test_and_set( (volatile kmp_uint16 *)(p), (kmp_uint16)(v) ) 454 #define KMP_XCHG_FIXED32(p, v) __sync_lock_test_and_set( (volatile kmp_uint32 *)(p), (kmp_uint32)(v) ) 455 #define KMP_XCHG_FIXED64(p, v) __sync_lock_test_and_set( (volatile kmp_uint64 *)(p), (kmp_uint64)(v) ) 457 extern kmp_int8 __kmp_test_then_add8(
volatile kmp_int8 *p, kmp_int8 v );
458 extern kmp_int8 __kmp_test_then_or8(
volatile kmp_int8 *p, kmp_int8 v );
459 extern kmp_int8 __kmp_test_then_and8(
volatile kmp_int8 *p, kmp_int8 v );
460 inline kmp_real32 KMP_XCHG_REAL32(
volatile kmp_real32 *p, kmp_real32 v)
462 kmp_int32 tmp = __sync_lock_test_and_set( (kmp_int32*)p, *(kmp_int32*)&v);
463 return *(kmp_real32*)&tmp;
466 inline kmp_real64 KMP_XCHG_REAL64(
volatile kmp_real64 *p, kmp_real64 v)
468 kmp_int64 tmp = __sync_lock_test_and_set( (kmp_int64*)p, *(kmp_int64*)&v);
469 return *(kmp_real64*)&tmp;
474 extern kmp_int32 __kmp_test_then_add32(
volatile kmp_int32 *p, kmp_int32 v );
475 extern kmp_int32 __kmp_test_then_or32(
volatile kmp_int32 *p, kmp_int32 v );
476 extern kmp_int32 __kmp_test_then_and32(
volatile kmp_int32 *p, kmp_int32 v );
477 extern kmp_int64 __kmp_test_then_add64(
volatile kmp_int64 *p, kmp_int64 v );
478 extern kmp_int64 __kmp_test_then_or64(
volatile kmp_int64 *p, kmp_int64 v );
479 extern kmp_int64 __kmp_test_then_and64(
volatile kmp_int64 *p, kmp_int64 v );
481 extern kmp_int8 __kmp_compare_and_store8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
482 extern kmp_int16 __kmp_compare_and_store16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
483 extern kmp_int32 __kmp_compare_and_store32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
484 extern kmp_int32 __kmp_compare_and_store64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
485 extern kmp_int8 __kmp_compare_and_store_ret8(
volatile kmp_int8 *p, kmp_int8 cv, kmp_int8 sv );
486 extern kmp_int16 __kmp_compare_and_store_ret16(
volatile kmp_int16 *p, kmp_int16 cv, kmp_int16 sv );
487 extern kmp_int32 __kmp_compare_and_store_ret32(
volatile kmp_int32 *p, kmp_int32 cv, kmp_int32 sv );
488 extern kmp_int64 __kmp_compare_and_store_ret64(
volatile kmp_int64 *p, kmp_int64 cv, kmp_int64 sv );
490 extern kmp_int8 __kmp_xchg_fixed8(
volatile kmp_int8 *p, kmp_int8 v );
491 extern kmp_int16 __kmp_xchg_fixed16(
volatile kmp_int16 *p, kmp_int16 v );
492 extern kmp_int32 __kmp_xchg_fixed32(
volatile kmp_int32 *p, kmp_int32 v );
493 extern kmp_int64 __kmp_xchg_fixed64(
volatile kmp_int64 *p, kmp_int64 v );
494 extern kmp_real32 __kmp_xchg_real32(
volatile kmp_real32 *p, kmp_real32 v );
495 # define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8( (p), (v) ) 496 extern kmp_real64 __kmp_xchg_real64(
volatile kmp_real64 *p, kmp_real64 v );
498 # define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32( (p), 1 ) 499 # define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8( (p), (v) ) 500 # define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8( (p), (v) ) 501 # define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32( (p), 1 ) 502 # define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64( (p), 1LL ) 503 # define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64( (p), 1LL ) 504 # define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32( (p), 4 ) 505 # define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32( (p), 4 ) 506 # define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64( (p), 4LL ) 507 # define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64( (p), 4LL ) 508 # define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32( (p), -1 ) 509 # define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32( (p), -1 ) 510 # define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64( (p), -1LL ) 511 # define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64( (p), -1LL ) 512 # define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32( (p), (v) ) 513 # define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64( (p), (v) ) 515 # define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32( (p), (v) ) 516 # define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32( (p), (v) ) 517 # define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64( (p), (v) ) 518 # define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64( (p), (v) ) 520 # define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) ) 521 # define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) __kmp_compare_and_store8( (p), (cv), (sv) ) 522 # define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) ) 523 # define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) __kmp_compare_and_store16( (p), (cv), (sv) ) 524 # define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) ) 525 # define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) __kmp_compare_and_store32( (p), (cv), (sv) ) 526 # define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) ) 527 # define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) __kmp_compare_and_store64( (p), (cv), (sv) ) 530 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store32( (volatile kmp_int32*)(p), (kmp_int32)(cv), (kmp_int32)(sv) ) 532 # define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) __kmp_compare_and_store64( (volatile kmp_int64*)(p), (kmp_int64)(cv), (kmp_int64)(sv) ) 535 # define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) __kmp_compare_and_store_ret8( (p), (cv), (sv) ) 536 # define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) __kmp_compare_and_store_ret16( (p), (cv), (sv) ) 537 # define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) __kmp_compare_and_store_ret32( (p), (cv), (sv) ) 538 # define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) __kmp_compare_and_store_ret64( (p), (cv), (sv) ) 540 # define KMP_XCHG_FIXED8(p, v) __kmp_xchg_fixed8( (volatile kmp_int8*)(p), (kmp_int8)(v) ); 541 # define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16( (p), (v) ); 542 # define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32( (p), (v) ); 543 # define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64( (p), (v) ); 544 # define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32( (p), (v) ); 545 # define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64( (p), (v) ); 554 # define KMP_MB() asm ("nop") 555 # define KMP_IMB() asm ("nop") 562 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || KMP_ARCH_MIPS64 563 # define KMP_MB() __sync_synchronize() 575 # define KMP_ST_REL32(A,D) ( *(A) = (D) ) 579 # define KMP_ST_REL64(A,D) ( *(A) = (D) ) 583 # define KMP_LD_ACQ32(A) ( *(A) ) 587 # define KMP_LD_ACQ64(A) ( *(A) ) 591 #define TCW_1(a,b) (a) = (b) 607 #define TCW_4(a,b) (a) = (b) 608 #define TCI_4(a) (++(a)) 609 #define TCD_4(a) (--(a)) 611 #define TCW_8(a,b) (a) = (b) 612 #define TCI_8(a) (++(a)) 613 #define TCD_8(a) (--(a)) 614 #define TCR_SYNC_4(a) (a) 615 #define TCW_SYNC_4(a,b) (a) = (b) 616 #define TCX_SYNC_4(a,b,c) KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), (kmp_int32)(b), (kmp_int32)(c)) 617 #define TCR_SYNC_8(a) (a) 618 #define TCW_SYNC_8(a,b) (a) = (b) 619 #define TCX_SYNC_8(a,b,c) KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), (kmp_int64)(b), (kmp_int64)(c)) 621 #if KMP_ARCH_X86 || KMP_ARCH_MIPS 623 #define TCR_PTR(a) ((void *)TCR_4(a)) 624 #define TCW_PTR(a,b) TCW_4((a),(b)) 625 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a)) 626 #define TCW_SYNC_PTR(a,b) TCW_SYNC_4((a),(b)) 627 #define TCX_SYNC_PTR(a,b,c) ((void *)TCX_SYNC_4((a),(b),(c))) 631 #define TCR_PTR(a) ((void *)TCR_8(a)) 632 #define TCW_PTR(a,b) TCW_8((a),(b)) 633 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a)) 634 #define TCW_SYNC_PTR(a,b) TCW_SYNC_8((a),(b)) 635 #define TCX_SYNC_PTR(a,b,c) ((void *)TCX_SYNC_8((a),(b),(c))) 646 # define FTN_TRUE TRUE 650 # define FTN_FALSE FALSE 653 typedef void (*microtask_t)(
int *gtid,
int *npr, ... );
655 #ifdef USE_VOLATILE_CAST 656 # define VOLATILE_CAST(x) (volatile x) 658 # define VOLATILE_CAST(x) (x) 661 #define KMP_WAIT_YIELD __kmp_wait_yield_4 662 #define KMP_WAIT_YIELD_PTR __kmp_wait_yield_4_ptr 663 #define KMP_EQ __kmp_eq_4 664 #define KMP_NEQ __kmp_neq_4 665 #define KMP_LT __kmp_lt_4 666 #define KMP_GE __kmp_ge_4 667 #define KMP_LE __kmp_le_4 670 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX 671 # define STATIC_EFI2_WORKAROUND 673 # define STATIC_EFI2_WORKAROUND static 678 #define KMP_USE_BGET 1 683 #ifndef USE_SYSFS_INFO 684 # define USE_SYSFS_INFO 0 686 #ifndef USE_CMPXCHG_FIX 687 # define USE_CMPXCHG_FIX 1 692 # define KMP_USE_DYNAMIC_LOCK 1 696 #if KMP_USE_DYNAMIC_LOCK 698 # define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC 699 # ifdef KMP_USE_ADAPTIVE_LOCKS 700 # undef KMP_USE_ADAPTIVE_LOCKS 702 # define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX 706 #if KMP_STATS_ENABLED 707 # define KMP_HAVE_TICK_TIME (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64)) 711 enum kmp_warnings_level {
712 kmp_warnings_off = 0,
714 kmp_warnings_explicit = 6,
720 #endif // __cplusplus 724 #include "kmp_safe_c_api.h"