23 #include "ompt-specific.h" 35 #if defined(__cplusplus) && (KMP_OS_WINDOWS) 42 #define KMP_DO_ALIGN(alignment) 45 #if (_MSC_VER < 1600) && defined(_DEBUG) 50 #define _DEBUG_TEMPORARILY_UNSET_ 55 template <
typename type_lhs,
typename type_rhs>
56 std::complex<type_lhs> __kmp_lhs_div_rhs(
const std::complex<type_lhs> &lhs,
57 const std::complex<type_rhs> &rhs) {
58 type_lhs a = lhs.real();
59 type_lhs b = lhs.imag();
60 type_rhs c = rhs.real();
61 type_rhs d = rhs.imag();
62 type_rhs den = c * c + d * d;
63 type_rhs r = (a * c + b * d);
64 type_rhs i = (b * c - a * d);
65 std::complex<type_lhs> ret(r / den, i / den);
70 struct __kmp_cmplx64_t : std::complex<double> {
72 __kmp_cmplx64_t() :
std::complex<double>() {}
74 __kmp_cmplx64_t(
const std::complex<double> &cd) :
std::complex<double>(cd) {}
76 void operator/=(
const __kmp_cmplx64_t &rhs) {
77 std::complex<double> lhs = *
this;
78 *
this = __kmp_lhs_div_rhs(lhs, rhs);
81 __kmp_cmplx64_t operator/(
const __kmp_cmplx64_t &rhs) {
82 std::complex<double> lhs = *
this;
83 return __kmp_lhs_div_rhs(lhs, rhs);
86 typedef struct __kmp_cmplx64_t kmp_cmplx64;
89 struct __kmp_cmplx32_t : std::complex<float> {
91 __kmp_cmplx32_t() :
std::complex<float>() {}
93 __kmp_cmplx32_t(
const std::complex<float> &cf) :
std::complex<float>(cf) {}
95 __kmp_cmplx32_t operator+(
const __kmp_cmplx32_t &b) {
96 std::complex<float> lhs = *
this;
97 std::complex<float> rhs = b;
100 __kmp_cmplx32_t operator-(
const __kmp_cmplx32_t &b) {
101 std::complex<float> lhs = *
this;
102 std::complex<float> rhs = b;
105 __kmp_cmplx32_t operator*(
const __kmp_cmplx32_t &b) {
106 std::complex<float> lhs = *
this;
107 std::complex<float> rhs = b;
111 __kmp_cmplx32_t operator+(
const kmp_cmplx64 &b) {
112 kmp_cmplx64 t = kmp_cmplx64(*
this) + b;
113 std::complex<double> d(t);
114 std::complex<float> f(d);
115 __kmp_cmplx32_t r(f);
118 __kmp_cmplx32_t operator-(
const kmp_cmplx64 &b) {
119 kmp_cmplx64 t = kmp_cmplx64(*
this) - b;
120 std::complex<double> d(t);
121 std::complex<float> f(d);
122 __kmp_cmplx32_t r(f);
125 __kmp_cmplx32_t operator*(
const kmp_cmplx64 &b) {
126 kmp_cmplx64 t = kmp_cmplx64(*
this) * b;
127 std::complex<double> d(t);
128 std::complex<float> f(d);
129 __kmp_cmplx32_t r(f);
133 void operator/=(
const __kmp_cmplx32_t &rhs) {
134 std::complex<float> lhs = *
this;
135 *
this = __kmp_lhs_div_rhs(lhs, rhs);
138 __kmp_cmplx32_t operator/(
const __kmp_cmplx32_t &rhs) {
139 std::complex<float> lhs = *
this;
140 return __kmp_lhs_div_rhs(lhs, rhs);
143 void operator/=(
const kmp_cmplx64 &rhs) {
144 std::complex<float> lhs = *
this;
145 *
this = __kmp_lhs_div_rhs(lhs, rhs);
148 __kmp_cmplx32_t operator/(
const kmp_cmplx64 &rhs) {
149 std::complex<float> lhs = *
this;
150 return __kmp_lhs_div_rhs(lhs, rhs);
153 typedef struct __kmp_cmplx32_t kmp_cmplx32;
156 struct KMP_DO_ALIGN(16) __kmp_cmplx80_t : std::complex<long double> {
158 __kmp_cmplx80_t() :
std::complex<long double>() {}
160 __kmp_cmplx80_t(
const std::complex<long double> &cld)
161 :
std::complex<long double>(cld) {}
163 void operator/=(
const __kmp_cmplx80_t &rhs) {
164 std::complex<long double> lhs = *
this;
165 *
this = __kmp_lhs_div_rhs(lhs, rhs);
168 __kmp_cmplx80_t operator/(
const __kmp_cmplx80_t &rhs) {
169 std::complex<long double> lhs = *
this;
170 return __kmp_lhs_div_rhs(lhs, rhs);
173 typedef KMP_DO_ALIGN(16) struct __kmp_cmplx80_t kmp_cmplx80;
177 struct __kmp_cmplx128_t : std::complex<_Quad> {
179 __kmp_cmplx128_t() :
std::complex<_Quad>() {}
181 __kmp_cmplx128_t(
const std::complex<_Quad> &cq) :
std::complex<_Quad>(cq) {}
183 void operator/=(
const __kmp_cmplx128_t &rhs) {
184 std::complex<_Quad> lhs = *
this;
185 *
this = __kmp_lhs_div_rhs(lhs, rhs);
188 __kmp_cmplx128_t operator/(
const __kmp_cmplx128_t &rhs) {
189 std::complex<_Quad> lhs = *
this;
190 return __kmp_lhs_div_rhs(lhs, rhs);
193 typedef struct __kmp_cmplx128_t kmp_cmplx128;
196 #ifdef _DEBUG_TEMPORARILY_UNSET_ 197 #undef _DEBUG_TEMPORARILY_UNSET_ 204 typedef float _Complex kmp_cmplx32;
205 typedef double _Complex kmp_cmplx64;
206 typedef long double _Complex kmp_cmplx80;
208 typedef _Quad _Complex kmp_cmplx128;
216 #if KMP_ARCH_X86 && KMP_HAVE_QUAD 220 #pragma pack(push, 4) 222 struct KMP_DO_ALIGN(4) Quad_a4_t {
226 Quad_a4_t(
const _Quad &cq) : q(cq) {}
228 Quad_a4_t operator+(
const Quad_a4_t &b) {
229 _Quad lhs = (*this).q;
231 return (Quad_a4_t)(lhs + rhs);
234 Quad_a4_t operator-(
const Quad_a4_t &b) {
235 _Quad lhs = (*this).q;
237 return (Quad_a4_t)(lhs - rhs);
239 Quad_a4_t operator*(
const Quad_a4_t &b) {
240 _Quad lhs = (*this).q;
242 return (Quad_a4_t)(lhs * rhs);
245 Quad_a4_t operator/(
const Quad_a4_t &b) {
246 _Quad lhs = (*this).q;
248 return (Quad_a4_t)(lhs / rhs);
252 struct KMP_DO_ALIGN(4) kmp_cmplx128_a4_t {
255 kmp_cmplx128_a4_t() : q() {}
257 kmp_cmplx128_a4_t(
const kmp_cmplx128 &c128) : q(c128) {}
259 kmp_cmplx128_a4_t operator+(
const kmp_cmplx128_a4_t &b) {
260 kmp_cmplx128 lhs = (*this).q;
261 kmp_cmplx128 rhs = b.q;
262 return (kmp_cmplx128_a4_t)(lhs + rhs);
264 kmp_cmplx128_a4_t operator-(
const kmp_cmplx128_a4_t &b) {
265 kmp_cmplx128 lhs = (*this).q;
266 kmp_cmplx128 rhs = b.q;
267 return (kmp_cmplx128_a4_t)(lhs - rhs);
269 kmp_cmplx128_a4_t operator*(
const kmp_cmplx128_a4_t &b) {
270 kmp_cmplx128 lhs = (*this).q;
271 kmp_cmplx128 rhs = b.q;
272 return (kmp_cmplx128_a4_t)(lhs * rhs);
275 kmp_cmplx128_a4_t operator/(
const kmp_cmplx128_a4_t &b) {
276 kmp_cmplx128 lhs = (*this).q;
277 kmp_cmplx128 rhs = b.q;
278 return (kmp_cmplx128_a4_t)(lhs / rhs);
285 struct KMP_DO_ALIGN(16) Quad_a16_t {
288 Quad_a16_t() : q() {}
289 Quad_a16_t(
const _Quad &cq) : q(cq) {}
291 Quad_a16_t operator+(
const Quad_a16_t &b) {
292 _Quad lhs = (*this).q;
294 return (Quad_a16_t)(lhs + rhs);
297 Quad_a16_t operator-(
const Quad_a16_t &b) {
298 _Quad lhs = (*this).q;
300 return (Quad_a16_t)(lhs - rhs);
302 Quad_a16_t operator*(
const Quad_a16_t &b) {
303 _Quad lhs = (*this).q;
305 return (Quad_a16_t)(lhs * rhs);
308 Quad_a16_t operator/(
const Quad_a16_t &b) {
309 _Quad lhs = (*this).q;
311 return (Quad_a16_t)(lhs / rhs);
315 struct KMP_DO_ALIGN(16) kmp_cmplx128_a16_t {
318 kmp_cmplx128_a16_t() : q() {}
320 kmp_cmplx128_a16_t(
const kmp_cmplx128 &c128) : q(c128) {}
322 kmp_cmplx128_a16_t operator+(
const kmp_cmplx128_a16_t &b) {
323 kmp_cmplx128 lhs = (*this).q;
324 kmp_cmplx128 rhs = b.q;
325 return (kmp_cmplx128_a16_t)(lhs + rhs);
327 kmp_cmplx128_a16_t operator-(
const kmp_cmplx128_a16_t &b) {
328 kmp_cmplx128 lhs = (*this).q;
329 kmp_cmplx128 rhs = b.q;
330 return (kmp_cmplx128_a16_t)(lhs - rhs);
332 kmp_cmplx128_a16_t operator*(
const kmp_cmplx128_a16_t &b) {
333 kmp_cmplx128 lhs = (*this).q;
334 kmp_cmplx128 rhs = b.q;
335 return (kmp_cmplx128_a16_t)(lhs * rhs);
338 kmp_cmplx128_a16_t operator/(
const kmp_cmplx128_a16_t &b) {
339 kmp_cmplx128 lhs = (*this).q;
340 kmp_cmplx128 rhs = b.q;
341 return (kmp_cmplx128_a16_t)(lhs / rhs);
348 #define QUAD_LEGACY Quad_a4_t 349 #define CPLX128_LEG kmp_cmplx128_a4_t 351 #define QUAD_LEGACY _Quad 352 #define CPLX128_LEG kmp_cmplx128 359 extern int __kmp_atomic_mode;
362 typedef kmp_queuing_lock_t kmp_atomic_lock_t;
364 static inline void __kmp_acquire_atomic_lock(kmp_atomic_lock_t *lck,
366 #if OMPT_SUPPORT && OMPT_TRACE 367 if (ompt_enabled && ompt_callbacks.ompt_callback(ompt_event_wait_atomic)) {
368 ompt_callbacks.ompt_callback(ompt_event_wait_atomic)((ompt_wait_id_t)lck);
372 __kmp_acquire_queuing_lock(lck, gtid);
374 #if OMPT_SUPPORT && OMPT_TRACE 376 ompt_callbacks.ompt_callback(ompt_event_acquired_atomic)) {
377 ompt_callbacks.ompt_callback(ompt_event_acquired_atomic)(
378 (ompt_wait_id_t)lck);
383 static inline int __kmp_test_atomic_lock(kmp_atomic_lock_t *lck,
385 return __kmp_test_queuing_lock(lck, gtid);
388 static inline void __kmp_release_atomic_lock(kmp_atomic_lock_t *lck,
390 __kmp_release_queuing_lock(lck, gtid);
391 #if OMPT_SUPPORT && OMPT_BLAME 392 if (ompt_enabled && ompt_callbacks.ompt_callback(ompt_event_release_atomic)) {
393 ompt_callbacks.ompt_callback(ompt_event_release_atomic)(
394 (ompt_wait_id_t)lck);
399 static inline void __kmp_init_atomic_lock(kmp_atomic_lock_t *lck) {
400 __kmp_init_queuing_lock(lck);
403 static inline void __kmp_destroy_atomic_lock(kmp_atomic_lock_t *lck) {
404 __kmp_destroy_queuing_lock(lck);
408 extern kmp_atomic_lock_t __kmp_atomic_lock;
410 extern kmp_atomic_lock_t __kmp_atomic_lock_1i;
413 extern kmp_atomic_lock_t __kmp_atomic_lock_2i;
416 extern kmp_atomic_lock_t __kmp_atomic_lock_4i;
419 extern kmp_atomic_lock_t __kmp_atomic_lock_4r;
422 extern kmp_atomic_lock_t __kmp_atomic_lock_8i;
425 extern kmp_atomic_lock_t __kmp_atomic_lock_8r;
428 extern kmp_atomic_lock_t
429 __kmp_atomic_lock_8c;
431 extern kmp_atomic_lock_t
432 __kmp_atomic_lock_10r;
434 extern kmp_atomic_lock_t __kmp_atomic_lock_16r;
437 extern kmp_atomic_lock_t __kmp_atomic_lock_16c;
440 extern kmp_atomic_lock_t
441 __kmp_atomic_lock_20c;
443 extern kmp_atomic_lock_t __kmp_atomic_lock_32c;
450 void __kmpc_atomic_fixed1_add(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
451 void __kmpc_atomic_fixed1_andb(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
452 void __kmpc_atomic_fixed1_div(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
453 void __kmpc_atomic_fixed1u_div(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
455 void __kmpc_atomic_fixed1_mul(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
456 void __kmpc_atomic_fixed1_orb(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
457 void __kmpc_atomic_fixed1_shl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
458 void __kmpc_atomic_fixed1_shr(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
459 void __kmpc_atomic_fixed1u_shr(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
461 void __kmpc_atomic_fixed1_sub(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
462 void __kmpc_atomic_fixed1_xor(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
464 void __kmpc_atomic_fixed2_add(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
465 void __kmpc_atomic_fixed2_andb(
ident_t *id_ref,
int gtid,
short *lhs,
467 void __kmpc_atomic_fixed2_div(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
468 void __kmpc_atomic_fixed2u_div(
ident_t *id_ref,
int gtid,
unsigned short *lhs,
470 void __kmpc_atomic_fixed2_mul(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
471 void __kmpc_atomic_fixed2_orb(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
472 void __kmpc_atomic_fixed2_shl(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
473 void __kmpc_atomic_fixed2_shr(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
474 void __kmpc_atomic_fixed2u_shr(
ident_t *id_ref,
int gtid,
unsigned short *lhs,
476 void __kmpc_atomic_fixed2_sub(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
477 void __kmpc_atomic_fixed2_xor(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
479 void __kmpc_atomic_fixed4_add(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
481 void __kmpc_atomic_fixed4_sub(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
484 void __kmpc_atomic_float4_add(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
486 void __kmpc_atomic_float4_sub(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
489 void __kmpc_atomic_fixed8_add(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
491 void __kmpc_atomic_fixed8_sub(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
494 void __kmpc_atomic_float8_add(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
496 void __kmpc_atomic_float8_sub(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
499 void __kmpc_atomic_fixed4_andb(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
501 void __kmpc_atomic_fixed4_div(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
503 void __kmpc_atomic_fixed4u_div(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
505 void __kmpc_atomic_fixed4_mul(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
507 void __kmpc_atomic_fixed4_orb(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
509 void __kmpc_atomic_fixed4_shl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
511 void __kmpc_atomic_fixed4_shr(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
513 void __kmpc_atomic_fixed4u_shr(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
515 void __kmpc_atomic_fixed4_xor(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
518 void __kmpc_atomic_fixed8_andb(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
520 void __kmpc_atomic_fixed8_div(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
522 void __kmpc_atomic_fixed8u_div(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
524 void __kmpc_atomic_fixed8_mul(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
526 void __kmpc_atomic_fixed8_orb(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
528 void __kmpc_atomic_fixed8_shl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
530 void __kmpc_atomic_fixed8_shr(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
532 void __kmpc_atomic_fixed8u_shr(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
534 void __kmpc_atomic_fixed8_xor(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
537 void __kmpc_atomic_float4_div(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
539 void __kmpc_atomic_float4_mul(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
542 void __kmpc_atomic_float8_div(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
544 void __kmpc_atomic_float8_mul(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
547 void __kmpc_atomic_fixed1_andl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
548 void __kmpc_atomic_fixed1_orl(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
549 void __kmpc_atomic_fixed2_andl(
ident_t *id_ref,
int gtid,
short *lhs,
551 void __kmpc_atomic_fixed2_orl(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
552 void __kmpc_atomic_fixed4_andl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
554 void __kmpc_atomic_fixed4_orl(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
556 void __kmpc_atomic_fixed8_andl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
558 void __kmpc_atomic_fixed8_orl(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
561 void __kmpc_atomic_fixed1_max(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
562 void __kmpc_atomic_fixed1_min(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
563 void __kmpc_atomic_fixed2_max(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
564 void __kmpc_atomic_fixed2_min(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
565 void __kmpc_atomic_fixed4_max(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
567 void __kmpc_atomic_fixed4_min(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
569 void __kmpc_atomic_fixed8_max(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
571 void __kmpc_atomic_fixed8_min(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
573 void __kmpc_atomic_float4_max(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
575 void __kmpc_atomic_float4_min(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
577 void __kmpc_atomic_float8_max(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
579 void __kmpc_atomic_float8_min(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
582 void __kmpc_atomic_float16_max(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
584 void __kmpc_atomic_float16_min(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
589 void __kmpc_atomic_float16_max_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
591 void __kmpc_atomic_float16_min_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
596 void __kmpc_atomic_fixed1_neqv(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
597 void __kmpc_atomic_fixed2_neqv(
ident_t *id_ref,
int gtid,
short *lhs,
599 void __kmpc_atomic_fixed4_neqv(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
601 void __kmpc_atomic_fixed8_neqv(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
604 void __kmpc_atomic_fixed1_eqv(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
605 void __kmpc_atomic_fixed2_eqv(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
606 void __kmpc_atomic_fixed4_eqv(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
608 void __kmpc_atomic_fixed8_eqv(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
611 void __kmpc_atomic_float10_add(
ident_t *id_ref,
int gtid,
long double *lhs,
613 void __kmpc_atomic_float10_sub(
ident_t *id_ref,
int gtid,
long double *lhs,
615 void __kmpc_atomic_float10_mul(
ident_t *id_ref,
int gtid,
long double *lhs,
617 void __kmpc_atomic_float10_div(
ident_t *id_ref,
int gtid,
long double *lhs,
621 void __kmpc_atomic_float16_add(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
623 void __kmpc_atomic_float16_sub(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
625 void __kmpc_atomic_float16_mul(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
627 void __kmpc_atomic_float16_div(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
631 void __kmpc_atomic_float16_add_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
633 void __kmpc_atomic_float16_sub_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
635 void __kmpc_atomic_float16_mul_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
637 void __kmpc_atomic_float16_div_a16(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
642 void __kmpc_atomic_cmplx4_add(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
644 void __kmpc_atomic_cmplx4_sub(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
646 void __kmpc_atomic_cmplx4_mul(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
648 void __kmpc_atomic_cmplx4_div(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
650 void __kmpc_atomic_cmplx8_add(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
652 void __kmpc_atomic_cmplx8_sub(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
654 void __kmpc_atomic_cmplx8_mul(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
656 void __kmpc_atomic_cmplx8_div(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
658 void __kmpc_atomic_cmplx10_add(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
660 void __kmpc_atomic_cmplx10_sub(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
662 void __kmpc_atomic_cmplx10_mul(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
664 void __kmpc_atomic_cmplx10_div(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
667 void __kmpc_atomic_cmplx16_add(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
669 void __kmpc_atomic_cmplx16_sub(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
671 void __kmpc_atomic_cmplx16_mul(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
673 void __kmpc_atomic_cmplx16_div(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
677 void __kmpc_atomic_cmplx16_add_a16(
ident_t *id_ref,
int gtid,
678 kmp_cmplx128_a16_t *lhs,
679 kmp_cmplx128_a16_t rhs);
680 void __kmpc_atomic_cmplx16_sub_a16(
ident_t *id_ref,
int gtid,
681 kmp_cmplx128_a16_t *lhs,
682 kmp_cmplx128_a16_t rhs);
683 void __kmpc_atomic_cmplx16_mul_a16(
ident_t *id_ref,
int gtid,
684 kmp_cmplx128_a16_t *lhs,
685 kmp_cmplx128_a16_t rhs);
686 void __kmpc_atomic_cmplx16_div_a16(
ident_t *id_ref,
int gtid,
687 kmp_cmplx128_a16_t *lhs,
688 kmp_cmplx128_a16_t rhs);
696 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 698 void __kmpc_atomic_fixed1_sub_rev(
ident_t *id_ref,
int gtid,
char *lhs,
700 void __kmpc_atomic_fixed1_div_rev(
ident_t *id_ref,
int gtid,
char *lhs,
702 void __kmpc_atomic_fixed1u_div_rev(
ident_t *id_ref,
int gtid,
703 unsigned char *lhs,
unsigned char rhs);
704 void __kmpc_atomic_fixed1_shl_rev(
ident_t *id_ref,
int gtid,
char *lhs,
706 void __kmpc_atomic_fixed1_shr_rev(
ident_t *id_ref,
int gtid,
char *lhs,
708 void __kmpc_atomic_fixed1u_shr_rev(
ident_t *id_ref,
int gtid,
709 unsigned char *lhs,
unsigned char rhs);
710 void __kmpc_atomic_fixed2_sub_rev(
ident_t *id_ref,
int gtid,
short *lhs,
712 void __kmpc_atomic_fixed2_div_rev(
ident_t *id_ref,
int gtid,
short *lhs,
714 void __kmpc_atomic_fixed2u_div_rev(
ident_t *id_ref,
int gtid,
715 unsigned short *lhs,
unsigned short rhs);
716 void __kmpc_atomic_fixed2_shl_rev(
ident_t *id_ref,
int gtid,
short *lhs,
718 void __kmpc_atomic_fixed2_shr_rev(
ident_t *id_ref,
int gtid,
short *lhs,
720 void __kmpc_atomic_fixed2u_shr_rev(
ident_t *id_ref,
int gtid,
721 unsigned short *lhs,
unsigned short rhs);
722 void __kmpc_atomic_fixed4_sub_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
724 void __kmpc_atomic_fixed4_div_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
726 void __kmpc_atomic_fixed4u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
728 void __kmpc_atomic_fixed4_shl_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
730 void __kmpc_atomic_fixed4_shr_rev(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
732 void __kmpc_atomic_fixed4u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
734 void __kmpc_atomic_fixed8_sub_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
736 void __kmpc_atomic_fixed8_div_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
738 void __kmpc_atomic_fixed8u_div_rev(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
740 void __kmpc_atomic_fixed8_shl_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
742 void __kmpc_atomic_fixed8_shr_rev(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
744 void __kmpc_atomic_fixed8u_shr_rev(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
746 void __kmpc_atomic_float4_sub_rev(
ident_t *id_ref,
int gtid,
float *lhs,
748 void __kmpc_atomic_float4_div_rev(
ident_t *id_ref,
int gtid,
float *lhs,
750 void __kmpc_atomic_float8_sub_rev(
ident_t *id_ref,
int gtid,
double *lhs,
752 void __kmpc_atomic_float8_div_rev(
ident_t *id_ref,
int gtid,
double *lhs,
754 void __kmpc_atomic_float10_sub_rev(
ident_t *id_ref,
int gtid,
long double *lhs,
756 void __kmpc_atomic_float10_div_rev(
ident_t *id_ref,
int gtid,
long double *lhs,
759 void __kmpc_atomic_float16_sub_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
761 void __kmpc_atomic_float16_div_rev(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
764 void __kmpc_atomic_cmplx4_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
766 void __kmpc_atomic_cmplx4_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
768 void __kmpc_atomic_cmplx8_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
770 void __kmpc_atomic_cmplx8_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
772 void __kmpc_atomic_cmplx10_sub_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
774 void __kmpc_atomic_cmplx10_div_rev(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
777 void __kmpc_atomic_cmplx16_sub_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
779 void __kmpc_atomic_cmplx16_div_rev(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
783 void __kmpc_atomic_float16_sub_a16_rev(
ident_t *id_ref,
int gtid,
784 Quad_a16_t *lhs, Quad_a16_t rhs);
785 void __kmpc_atomic_float16_div_a16_rev(
ident_t *id_ref,
int gtid,
786 Quad_a16_t *lhs, Quad_a16_t rhs);
787 void __kmpc_atomic_cmplx16_sub_a16_rev(
ident_t *id_ref,
int gtid,
788 kmp_cmplx128_a16_t *lhs,
789 kmp_cmplx128_a16_t rhs);
790 void __kmpc_atomic_cmplx16_div_a16_rev(
ident_t *id_ref,
int gtid,
791 kmp_cmplx128_a16_t *lhs,
792 kmp_cmplx128_a16_t rhs);
794 #endif // KMP_HAVE_QUAD 796 #endif // KMP_ARCH_X86 || KMP_ARCH_X86_64 798 #endif // OMP_40_ENABLED 803 void __kmpc_atomic_fixed1_mul_float8(
ident_t *id_ref,
int gtid,
char *lhs,
805 void __kmpc_atomic_fixed1_div_float8(
ident_t *id_ref,
int gtid,
char *lhs,
807 void __kmpc_atomic_fixed2_mul_float8(
ident_t *id_ref,
int gtid,
short *lhs,
809 void __kmpc_atomic_fixed2_div_float8(
ident_t *id_ref,
int gtid,
short *lhs,
811 void __kmpc_atomic_fixed4_mul_float8(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
813 void __kmpc_atomic_fixed4_div_float8(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
815 void __kmpc_atomic_fixed8_mul_float8(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
817 void __kmpc_atomic_fixed8_div_float8(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
819 void __kmpc_atomic_float4_add_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
821 void __kmpc_atomic_float4_sub_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
823 void __kmpc_atomic_float4_mul_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
825 void __kmpc_atomic_float4_div_float8(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
831 void __kmpc_atomic_fixed1_add_fp(
ident_t *id_ref,
int gtid,
char *lhs,
833 void __kmpc_atomic_fixed1u_add_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
835 void __kmpc_atomic_fixed1_sub_fp(
ident_t *id_ref,
int gtid,
char *lhs,
837 void __kmpc_atomic_fixed1u_sub_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
839 void __kmpc_atomic_fixed1_mul_fp(
ident_t *id_ref,
int gtid,
char *lhs,
841 void __kmpc_atomic_fixed1u_mul_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
843 void __kmpc_atomic_fixed1_div_fp(
ident_t *id_ref,
int gtid,
char *lhs,
845 void __kmpc_atomic_fixed1u_div_fp(
ident_t *id_ref,
int gtid,
unsigned char *lhs,
848 void __kmpc_atomic_fixed2_add_fp(
ident_t *id_ref,
int gtid,
short *lhs,
850 void __kmpc_atomic_fixed2u_add_fp(
ident_t *id_ref,
int gtid,
851 unsigned short *lhs, _Quad rhs);
852 void __kmpc_atomic_fixed2_sub_fp(
ident_t *id_ref,
int gtid,
short *lhs,
854 void __kmpc_atomic_fixed2u_sub_fp(
ident_t *id_ref,
int gtid,
855 unsigned short *lhs, _Quad rhs);
856 void __kmpc_atomic_fixed2_mul_fp(
ident_t *id_ref,
int gtid,
short *lhs,
858 void __kmpc_atomic_fixed2u_mul_fp(
ident_t *id_ref,
int gtid,
859 unsigned short *lhs, _Quad rhs);
860 void __kmpc_atomic_fixed2_div_fp(
ident_t *id_ref,
int gtid,
short *lhs,
862 void __kmpc_atomic_fixed2u_div_fp(
ident_t *id_ref,
int gtid,
863 unsigned short *lhs, _Quad rhs);
865 void __kmpc_atomic_fixed4_add_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
867 void __kmpc_atomic_fixed4u_add_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
869 void __kmpc_atomic_fixed4_sub_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
871 void __kmpc_atomic_fixed4u_sub_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
873 void __kmpc_atomic_fixed4_mul_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
875 void __kmpc_atomic_fixed4u_mul_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
877 void __kmpc_atomic_fixed4_div_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
879 void __kmpc_atomic_fixed4u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint32 *lhs,
882 void __kmpc_atomic_fixed8_add_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
884 void __kmpc_atomic_fixed8u_add_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
886 void __kmpc_atomic_fixed8_sub_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
888 void __kmpc_atomic_fixed8u_sub_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
890 void __kmpc_atomic_fixed8_mul_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
892 void __kmpc_atomic_fixed8u_mul_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
894 void __kmpc_atomic_fixed8_div_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
896 void __kmpc_atomic_fixed8u_div_fp(
ident_t *id_ref,
int gtid, kmp_uint64 *lhs,
899 void __kmpc_atomic_float4_add_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
901 void __kmpc_atomic_float4_sub_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
903 void __kmpc_atomic_float4_mul_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
905 void __kmpc_atomic_float4_div_fp(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
908 void __kmpc_atomic_float8_add_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
910 void __kmpc_atomic_float8_sub_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
912 void __kmpc_atomic_float8_mul_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
914 void __kmpc_atomic_float8_div_fp(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
917 void __kmpc_atomic_float10_add_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
919 void __kmpc_atomic_float10_sub_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
921 void __kmpc_atomic_float10_mul_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
923 void __kmpc_atomic_float10_div_fp(
ident_t *id_ref,
int gtid,
long double *lhs,
927 void __kmpc_atomic_fixed1_sub_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
929 void __kmpc_atomic_fixed1u_sub_rev_fp(
ident_t *id_ref,
int gtid,
930 unsigned char *lhs, _Quad rhs);
931 void __kmpc_atomic_fixed1_div_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
933 void __kmpc_atomic_fixed1u_div_rev_fp(
ident_t *id_ref,
int gtid,
934 unsigned char *lhs, _Quad rhs);
935 void __kmpc_atomic_fixed2_sub_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
937 void __kmpc_atomic_fixed2u_sub_rev_fp(
ident_t *id_ref,
int gtid,
938 unsigned short *lhs, _Quad rhs);
939 void __kmpc_atomic_fixed2_div_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
941 void __kmpc_atomic_fixed2u_div_rev_fp(
ident_t *id_ref,
int gtid,
942 unsigned short *lhs, _Quad rhs);
943 void __kmpc_atomic_fixed4_sub_rev_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
945 void __kmpc_atomic_fixed4u_sub_rev_fp(
ident_t *id_ref,
int gtid,
946 kmp_uint32 *lhs, _Quad rhs);
947 void __kmpc_atomic_fixed4_div_rev_fp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
949 void __kmpc_atomic_fixed4u_div_rev_fp(
ident_t *id_ref,
int gtid,
950 kmp_uint32 *lhs, _Quad rhs);
951 void __kmpc_atomic_fixed8_sub_rev_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
953 void __kmpc_atomic_fixed8u_sub_rev_fp(
ident_t *id_ref,
int gtid,
954 kmp_uint64 *lhs, _Quad rhs);
955 void __kmpc_atomic_fixed8_div_rev_fp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
957 void __kmpc_atomic_fixed8u_div_rev_fp(
ident_t *id_ref,
int gtid,
958 kmp_uint64 *lhs, _Quad rhs);
959 void __kmpc_atomic_float4_sub_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
961 void __kmpc_atomic_float4_div_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
963 void __kmpc_atomic_float8_sub_rev_fp(
ident_t *id_ref,
int gtid,
double *lhs,
965 void __kmpc_atomic_float8_div_rev_fp(
ident_t *id_ref,
int gtid,
double *lhs,
967 void __kmpc_atomic_float10_sub_rev_fp(
ident_t *id_ref,
int gtid,
968 long double *lhs, _Quad rhs);
969 void __kmpc_atomic_float10_div_rev_fp(
ident_t *id_ref,
int gtid,
970 long double *lhs, _Quad rhs);
972 #endif // KMP_HAVE_QUAD 975 void __kmpc_atomic_cmplx4_add_cmplx8(
ident_t *id_ref,
int gtid,
976 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
977 void __kmpc_atomic_cmplx4_sub_cmplx8(
ident_t *id_ref,
int gtid,
978 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
979 void __kmpc_atomic_cmplx4_mul_cmplx8(
ident_t *id_ref,
int gtid,
980 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
981 void __kmpc_atomic_cmplx4_div_cmplx8(
ident_t *id_ref,
int gtid,
982 kmp_cmplx32 *lhs, kmp_cmplx64 rhs);
985 void __kmpc_atomic_1(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
986 void (*f)(
void *,
void *,
void *));
987 void __kmpc_atomic_2(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
988 void (*f)(
void *,
void *,
void *));
989 void __kmpc_atomic_4(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
990 void (*f)(
void *,
void *,
void *));
991 void __kmpc_atomic_8(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
992 void (*f)(
void *,
void *,
void *));
993 void __kmpc_atomic_10(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
994 void (*f)(
void *,
void *,
void *));
995 void __kmpc_atomic_16(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
996 void (*f)(
void *,
void *,
void *));
997 void __kmpc_atomic_20(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
998 void (*f)(
void *,
void *,
void *));
999 void __kmpc_atomic_32(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
1000 void (*f)(
void *,
void *,
void *));
1003 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 1006 char __kmpc_atomic_fixed1_rd(
ident_t *id_ref,
int gtid,
char *loc);
1007 short __kmpc_atomic_fixed2_rd(
ident_t *id_ref,
int gtid,
short *loc);
1008 kmp_int32 __kmpc_atomic_fixed4_rd(
ident_t *id_ref,
int gtid, kmp_int32 *loc);
1009 kmp_int64 __kmpc_atomic_fixed8_rd(
ident_t *id_ref,
int gtid, kmp_int64 *loc);
1010 kmp_real32 __kmpc_atomic_float4_rd(
ident_t *id_ref,
int gtid, kmp_real32 *loc);
1011 kmp_real64 __kmpc_atomic_float8_rd(
ident_t *id_ref,
int gtid, kmp_real64 *loc);
1012 long double __kmpc_atomic_float10_rd(
ident_t *id_ref,
int gtid,
1015 QUAD_LEGACY __kmpc_atomic_float16_rd(
ident_t *id_ref,
int gtid,
1020 #if (KMP_OS_WINDOWS) 1021 void __kmpc_atomic_cmplx4_rd(kmp_cmplx32 *out,
ident_t *id_ref,
int gtid,
1024 kmp_cmplx32 __kmpc_atomic_cmplx4_rd(
ident_t *id_ref,
int gtid,
1027 kmp_cmplx64 __kmpc_atomic_cmplx8_rd(
ident_t *id_ref,
int gtid,
1029 kmp_cmplx80 __kmpc_atomic_cmplx10_rd(
ident_t *id_ref,
int gtid,
1032 CPLX128_LEG __kmpc_atomic_cmplx16_rd(
ident_t *id_ref,
int gtid,
1036 Quad_a16_t __kmpc_atomic_float16_a16_rd(
ident_t *id_ref,
int gtid,
1038 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_rd(
ident_t *id_ref,
int gtid,
1039 kmp_cmplx128_a16_t *loc);
1044 void __kmpc_atomic_fixed1_wr(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
1045 void __kmpc_atomic_fixed2_wr(
ident_t *id_ref,
int gtid,
short *lhs,
short rhs);
1046 void __kmpc_atomic_fixed4_wr(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
1048 void __kmpc_atomic_fixed8_wr(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
1050 void __kmpc_atomic_float4_wr(
ident_t *id_ref,
int gtid, kmp_real32 *lhs,
1052 void __kmpc_atomic_float8_wr(
ident_t *id_ref,
int gtid, kmp_real64 *lhs,
1054 void __kmpc_atomic_float10_wr(
ident_t *id_ref,
int gtid,
long double *lhs,
1057 void __kmpc_atomic_float16_wr(
ident_t *id_ref,
int gtid, QUAD_LEGACY *lhs,
1060 void __kmpc_atomic_cmplx4_wr(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1062 void __kmpc_atomic_cmplx8_wr(
ident_t *id_ref,
int gtid, kmp_cmplx64 *lhs,
1064 void __kmpc_atomic_cmplx10_wr(
ident_t *id_ref,
int gtid, kmp_cmplx80 *lhs,
1067 void __kmpc_atomic_cmplx16_wr(
ident_t *id_ref,
int gtid, CPLX128_LEG *lhs,
1071 void __kmpc_atomic_float16_a16_wr(
ident_t *id_ref,
int gtid, Quad_a16_t *lhs,
1073 void __kmpc_atomic_cmplx16_a16_wr(
ident_t *id_ref,
int gtid,
1074 kmp_cmplx128_a16_t *lhs,
1075 kmp_cmplx128_a16_t rhs);
1082 char __kmpc_atomic_fixed1_add_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1083 char rhs,
int flag);
1084 char __kmpc_atomic_fixed1_andb_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1085 char rhs,
int flag);
1086 char __kmpc_atomic_fixed1_div_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1087 char rhs,
int flag);
1088 unsigned char __kmpc_atomic_fixed1u_div_cpt(
ident_t *id_ref,
int gtid,
1090 unsigned char rhs,
int flag);
1091 char __kmpc_atomic_fixed1_mul_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1092 char rhs,
int flag);
1093 char __kmpc_atomic_fixed1_orb_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1094 char rhs,
int flag);
1095 char __kmpc_atomic_fixed1_shl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1096 char rhs,
int flag);
1097 char __kmpc_atomic_fixed1_shr_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1098 char rhs,
int flag);
1099 unsigned char __kmpc_atomic_fixed1u_shr_cpt(
ident_t *id_ref,
int gtid,
1101 unsigned char rhs,
int flag);
1102 char __kmpc_atomic_fixed1_sub_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1103 char rhs,
int flag);
1104 char __kmpc_atomic_fixed1_xor_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1105 char rhs,
int flag);
1107 short __kmpc_atomic_fixed2_add_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1108 short rhs,
int flag);
1109 short __kmpc_atomic_fixed2_andb_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1110 short rhs,
int flag);
1111 short __kmpc_atomic_fixed2_div_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1112 short rhs,
int flag);
1113 unsigned short __kmpc_atomic_fixed2u_div_cpt(
ident_t *id_ref,
int gtid,
1114 unsigned short *lhs,
1115 unsigned short rhs,
int flag);
1116 short __kmpc_atomic_fixed2_mul_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1117 short rhs,
int flag);
1118 short __kmpc_atomic_fixed2_orb_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1119 short rhs,
int flag);
1120 short __kmpc_atomic_fixed2_shl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1121 short rhs,
int flag);
1122 short __kmpc_atomic_fixed2_shr_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1123 short rhs,
int flag);
1124 unsigned short __kmpc_atomic_fixed2u_shr_cpt(
ident_t *id_ref,
int gtid,
1125 unsigned short *lhs,
1126 unsigned short rhs,
int flag);
1127 short __kmpc_atomic_fixed2_sub_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1128 short rhs,
int flag);
1129 short __kmpc_atomic_fixed2_xor_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1130 short rhs,
int flag);
1132 kmp_int32 __kmpc_atomic_fixed4_add_cpt(
ident_t *id_ref,
int gtid,
1133 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1134 kmp_int32 __kmpc_atomic_fixed4_sub_cpt(
ident_t *id_ref,
int gtid,
1135 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1137 kmp_real32 __kmpc_atomic_float4_add_cpt(
ident_t *id_ref,
int gtid,
1138 kmp_real32 *lhs, kmp_real32 rhs,
1140 kmp_real32 __kmpc_atomic_float4_sub_cpt(
ident_t *id_ref,
int gtid,
1141 kmp_real32 *lhs, kmp_real32 rhs,
1144 kmp_int64 __kmpc_atomic_fixed8_add_cpt(
ident_t *id_ref,
int gtid,
1145 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1146 kmp_int64 __kmpc_atomic_fixed8_sub_cpt(
ident_t *id_ref,
int gtid,
1147 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1149 kmp_real64 __kmpc_atomic_float8_add_cpt(
ident_t *id_ref,
int gtid,
1150 kmp_real64 *lhs, kmp_real64 rhs,
1152 kmp_real64 __kmpc_atomic_float8_sub_cpt(
ident_t *id_ref,
int gtid,
1153 kmp_real64 *lhs, kmp_real64 rhs,
1156 kmp_int32 __kmpc_atomic_fixed4_andb_cpt(
ident_t *id_ref,
int gtid,
1157 kmp_int32 *lhs, kmp_int32 rhs,
1159 kmp_int32 __kmpc_atomic_fixed4_div_cpt(
ident_t *id_ref,
int gtid,
1160 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1161 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt(
ident_t *id_ref,
int gtid,
1162 kmp_uint32 *lhs, kmp_uint32 rhs,
1164 kmp_int32 __kmpc_atomic_fixed4_mul_cpt(
ident_t *id_ref,
int gtid,
1165 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1166 kmp_int32 __kmpc_atomic_fixed4_orb_cpt(
ident_t *id_ref,
int gtid,
1167 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1168 kmp_int32 __kmpc_atomic_fixed4_shl_cpt(
ident_t *id_ref,
int gtid,
1169 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1170 kmp_int32 __kmpc_atomic_fixed4_shr_cpt(
ident_t *id_ref,
int gtid,
1171 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1172 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt(
ident_t *id_ref,
int gtid,
1173 kmp_uint32 *lhs, kmp_uint32 rhs,
1175 kmp_int32 __kmpc_atomic_fixed4_xor_cpt(
ident_t *id_ref,
int gtid,
1176 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1178 kmp_int64 __kmpc_atomic_fixed8_andb_cpt(
ident_t *id_ref,
int gtid,
1179 kmp_int64 *lhs, kmp_int64 rhs,
1181 kmp_int64 __kmpc_atomic_fixed8_div_cpt(
ident_t *id_ref,
int gtid,
1182 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1183 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt(
ident_t *id_ref,
int gtid,
1184 kmp_uint64 *lhs, kmp_uint64 rhs,
1186 kmp_int64 __kmpc_atomic_fixed8_mul_cpt(
ident_t *id_ref,
int gtid,
1187 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1188 kmp_int64 __kmpc_atomic_fixed8_orb_cpt(
ident_t *id_ref,
int gtid,
1189 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1190 kmp_int64 __kmpc_atomic_fixed8_shl_cpt(
ident_t *id_ref,
int gtid,
1191 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1192 kmp_int64 __kmpc_atomic_fixed8_shr_cpt(
ident_t *id_ref,
int gtid,
1193 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1194 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt(
ident_t *id_ref,
int gtid,
1195 kmp_uint64 *lhs, kmp_uint64 rhs,
1197 kmp_int64 __kmpc_atomic_fixed8_xor_cpt(
ident_t *id_ref,
int gtid,
1198 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1200 kmp_real32 __kmpc_atomic_float4_div_cpt(
ident_t *id_ref,
int gtid,
1201 kmp_real32 *lhs, kmp_real32 rhs,
1203 kmp_real32 __kmpc_atomic_float4_mul_cpt(
ident_t *id_ref,
int gtid,
1204 kmp_real32 *lhs, kmp_real32 rhs,
1207 kmp_real64 __kmpc_atomic_float8_div_cpt(
ident_t *id_ref,
int gtid,
1208 kmp_real64 *lhs, kmp_real64 rhs,
1210 kmp_real64 __kmpc_atomic_float8_mul_cpt(
ident_t *id_ref,
int gtid,
1211 kmp_real64 *lhs, kmp_real64 rhs,
1214 char __kmpc_atomic_fixed1_andl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1215 char rhs,
int flag);
1216 char __kmpc_atomic_fixed1_orl_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1217 char rhs,
int flag);
1218 short __kmpc_atomic_fixed2_andl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1219 short rhs,
int flag);
1220 short __kmpc_atomic_fixed2_orl_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1221 short rhs,
int flag);
1222 kmp_int32 __kmpc_atomic_fixed4_andl_cpt(
ident_t *id_ref,
int gtid,
1223 kmp_int32 *lhs, kmp_int32 rhs,
1225 kmp_int32 __kmpc_atomic_fixed4_orl_cpt(
ident_t *id_ref,
int gtid,
1226 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1227 kmp_int64 __kmpc_atomic_fixed8_andl_cpt(
ident_t *id_ref,
int gtid,
1228 kmp_int64 *lhs, kmp_int64 rhs,
1230 kmp_int64 __kmpc_atomic_fixed8_orl_cpt(
ident_t *id_ref,
int gtid,
1231 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1233 char __kmpc_atomic_fixed1_max_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1234 char rhs,
int flag);
1235 char __kmpc_atomic_fixed1_min_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1236 char rhs,
int flag);
1237 short __kmpc_atomic_fixed2_max_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1238 short rhs,
int flag);
1239 short __kmpc_atomic_fixed2_min_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1240 short rhs,
int flag);
1241 kmp_int32 __kmpc_atomic_fixed4_max_cpt(
ident_t *id_ref,
int gtid,
1242 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1243 kmp_int32 __kmpc_atomic_fixed4_min_cpt(
ident_t *id_ref,
int gtid,
1244 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1245 kmp_int64 __kmpc_atomic_fixed8_max_cpt(
ident_t *id_ref,
int gtid,
1246 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1247 kmp_int64 __kmpc_atomic_fixed8_min_cpt(
ident_t *id_ref,
int gtid,
1248 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1249 kmp_real32 __kmpc_atomic_float4_max_cpt(
ident_t *id_ref,
int gtid,
1250 kmp_real32 *lhs, kmp_real32 rhs,
1252 kmp_real32 __kmpc_atomic_float4_min_cpt(
ident_t *id_ref,
int gtid,
1253 kmp_real32 *lhs, kmp_real32 rhs,
1255 kmp_real64 __kmpc_atomic_float8_max_cpt(
ident_t *id_ref,
int gtid,
1256 kmp_real64 *lhs, kmp_real64 rhs,
1258 kmp_real64 __kmpc_atomic_float8_min_cpt(
ident_t *id_ref,
int gtid,
1259 kmp_real64 *lhs, kmp_real64 rhs,
1262 QUAD_LEGACY __kmpc_atomic_float16_max_cpt(
ident_t *id_ref,
int gtid,
1263 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1265 QUAD_LEGACY __kmpc_atomic_float16_min_cpt(
ident_t *id_ref,
int gtid,
1266 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1270 char __kmpc_atomic_fixed1_neqv_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1271 char rhs,
int flag);
1272 short __kmpc_atomic_fixed2_neqv_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1273 short rhs,
int flag);
1274 kmp_int32 __kmpc_atomic_fixed4_neqv_cpt(
ident_t *id_ref,
int gtid,
1275 kmp_int32 *lhs, kmp_int32 rhs,
1277 kmp_int64 __kmpc_atomic_fixed8_neqv_cpt(
ident_t *id_ref,
int gtid,
1278 kmp_int64 *lhs, kmp_int64 rhs,
1281 char __kmpc_atomic_fixed1_eqv_cpt(
ident_t *id_ref,
int gtid,
char *lhs,
1282 char rhs,
int flag);
1283 short __kmpc_atomic_fixed2_eqv_cpt(
ident_t *id_ref,
int gtid,
short *lhs,
1284 short rhs,
int flag);
1285 kmp_int32 __kmpc_atomic_fixed4_eqv_cpt(
ident_t *id_ref,
int gtid,
1286 kmp_int32 *lhs, kmp_int32 rhs,
int flag);
1287 kmp_int64 __kmpc_atomic_fixed8_eqv_cpt(
ident_t *id_ref,
int gtid,
1288 kmp_int64 *lhs, kmp_int64 rhs,
int flag);
1290 long double __kmpc_atomic_float10_add_cpt(
ident_t *id_ref,
int gtid,
1291 long double *lhs,
long double rhs,
1293 long double __kmpc_atomic_float10_sub_cpt(
ident_t *id_ref,
int gtid,
1294 long double *lhs,
long double rhs,
1296 long double __kmpc_atomic_float10_mul_cpt(
ident_t *id_ref,
int gtid,
1297 long double *lhs,
long double rhs,
1299 long double __kmpc_atomic_float10_div_cpt(
ident_t *id_ref,
int gtid,
1300 long double *lhs,
long double rhs,
1304 QUAD_LEGACY __kmpc_atomic_float16_add_cpt(
ident_t *id_ref,
int gtid,
1305 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1307 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt(
ident_t *id_ref,
int gtid,
1308 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1310 QUAD_LEGACY __kmpc_atomic_float16_mul_cpt(
ident_t *id_ref,
int gtid,
1311 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1313 QUAD_LEGACY __kmpc_atomic_float16_div_cpt(
ident_t *id_ref,
int gtid,
1314 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1320 void __kmpc_atomic_cmplx4_add_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1321 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1322 void __kmpc_atomic_cmplx4_sub_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1323 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1324 void __kmpc_atomic_cmplx4_mul_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1325 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1326 void __kmpc_atomic_cmplx4_div_cpt(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1327 kmp_cmplx32 rhs, kmp_cmplx32 *out,
int flag);
1329 kmp_cmplx64 __kmpc_atomic_cmplx8_add_cpt(
ident_t *id_ref,
int gtid,
1330 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1332 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt(
ident_t *id_ref,
int gtid,
1333 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1335 kmp_cmplx64 __kmpc_atomic_cmplx8_mul_cpt(
ident_t *id_ref,
int gtid,
1336 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1338 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt(
ident_t *id_ref,
int gtid,
1339 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1341 kmp_cmplx80 __kmpc_atomic_cmplx10_add_cpt(
ident_t *id_ref,
int gtid,
1342 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1344 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt(
ident_t *id_ref,
int gtid,
1345 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1347 kmp_cmplx80 __kmpc_atomic_cmplx10_mul_cpt(
ident_t *id_ref,
int gtid,
1348 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1350 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt(
ident_t *id_ref,
int gtid,
1351 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1354 CPLX128_LEG __kmpc_atomic_cmplx16_add_cpt(
ident_t *id_ref,
int gtid,
1355 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1357 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt(
ident_t *id_ref,
int gtid,
1358 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1360 CPLX128_LEG __kmpc_atomic_cmplx16_mul_cpt(
ident_t *id_ref,
int gtid,
1361 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1363 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt(
ident_t *id_ref,
int gtid,
1364 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1368 Quad_a16_t __kmpc_atomic_float16_add_a16_cpt(
ident_t *id_ref,
int gtid,
1369 Quad_a16_t *lhs, Quad_a16_t rhs,
1371 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt(
ident_t *id_ref,
int gtid,
1372 Quad_a16_t *lhs, Quad_a16_t rhs,
1374 Quad_a16_t __kmpc_atomic_float16_mul_a16_cpt(
ident_t *id_ref,
int gtid,
1375 Quad_a16_t *lhs, Quad_a16_t rhs,
1377 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt(
ident_t *id_ref,
int gtid,
1378 Quad_a16_t *lhs, Quad_a16_t rhs,
1380 Quad_a16_t __kmpc_atomic_float16_max_a16_cpt(
ident_t *id_ref,
int gtid,
1381 Quad_a16_t *lhs, Quad_a16_t rhs,
1383 Quad_a16_t __kmpc_atomic_float16_min_a16_cpt(
ident_t *id_ref,
int gtid,
1384 Quad_a16_t *lhs, Quad_a16_t rhs,
1386 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_add_a16_cpt(
ident_t *id_ref,
int gtid,
1387 kmp_cmplx128_a16_t *lhs,
1388 kmp_cmplx128_a16_t rhs,
1390 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_sub_a16_cpt(
ident_t *id_ref,
int gtid,
1391 kmp_cmplx128_a16_t *lhs,
1392 kmp_cmplx128_a16_t rhs,
1394 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_mul_a16_cpt(
ident_t *id_ref,
int gtid,
1395 kmp_cmplx128_a16_t *lhs,
1396 kmp_cmplx128_a16_t rhs,
1398 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_div_a16_cpt(
ident_t *id_ref,
int gtid,
1399 kmp_cmplx128_a16_t *lhs,
1400 kmp_cmplx128_a16_t rhs,
1405 void __kmpc_atomic_start(
void);
1406 void __kmpc_atomic_end(
void);
1413 char __kmpc_atomic_fixed1_sub_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1414 char rhs,
int flag);
1415 char __kmpc_atomic_fixed1_div_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1416 char rhs,
int flag);
1417 unsigned char __kmpc_atomic_fixed1u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1419 unsigned char rhs,
int flag);
1420 char __kmpc_atomic_fixed1_shl_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1421 char rhs,
int flag);
1422 char __kmpc_atomic_fixed1_shr_cpt_rev(
ident_t *id_ref,
int gtid,
char *lhs,
1423 char rhs,
int flag);
1424 unsigned char __kmpc_atomic_fixed1u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1426 unsigned char rhs,
int flag);
1427 short __kmpc_atomic_fixed2_sub_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1428 short rhs,
int flag);
1429 short __kmpc_atomic_fixed2_div_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1430 short rhs,
int flag);
1431 unsigned short __kmpc_atomic_fixed2u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1432 unsigned short *lhs,
1433 unsigned short rhs,
int flag);
1434 short __kmpc_atomic_fixed2_shl_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1435 short rhs,
int flag);
1436 short __kmpc_atomic_fixed2_shr_cpt_rev(
ident_t *id_ref,
int gtid,
short *lhs,
1437 short rhs,
int flag);
1438 unsigned short __kmpc_atomic_fixed2u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1439 unsigned short *lhs,
1440 unsigned short rhs,
int flag);
1441 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1442 kmp_int32 *lhs, kmp_int32 rhs,
1444 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev(
ident_t *id_ref,
int gtid,
1445 kmp_int32 *lhs, kmp_int32 rhs,
1447 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1448 kmp_uint32 *lhs, kmp_uint32 rhs,
1450 kmp_int32 __kmpc_atomic_fixed4_shl_cpt_rev(
ident_t *id_ref,
int gtid,
1451 kmp_int32 *lhs, kmp_int32 rhs,
1453 kmp_int32 __kmpc_atomic_fixed4_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1454 kmp_int32 *lhs, kmp_int32 rhs,
1456 kmp_uint32 __kmpc_atomic_fixed4u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1457 kmp_uint32 *lhs, kmp_uint32 rhs,
1459 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1460 kmp_int64 *lhs, kmp_int64 rhs,
1462 kmp_int64 __kmpc_atomic_fixed8_div_cpt_rev(
ident_t *id_ref,
int gtid,
1463 kmp_int64 *lhs, kmp_int64 rhs,
1465 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_rev(
ident_t *id_ref,
int gtid,
1466 kmp_uint64 *lhs, kmp_uint64 rhs,
1468 kmp_int64 __kmpc_atomic_fixed8_shl_cpt_rev(
ident_t *id_ref,
int gtid,
1469 kmp_int64 *lhs, kmp_int64 rhs,
1471 kmp_int64 __kmpc_atomic_fixed8_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1472 kmp_int64 *lhs, kmp_int64 rhs,
1474 kmp_uint64 __kmpc_atomic_fixed8u_shr_cpt_rev(
ident_t *id_ref,
int gtid,
1475 kmp_uint64 *lhs, kmp_uint64 rhs,
1477 float __kmpc_atomic_float4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
float *lhs,
1478 float rhs,
int flag);
1479 float __kmpc_atomic_float4_div_cpt_rev(
ident_t *id_ref,
int gtid,
float *lhs,
1480 float rhs,
int flag);
1481 double __kmpc_atomic_float8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
double *lhs,
1482 double rhs,
int flag);
1483 double __kmpc_atomic_float8_div_cpt_rev(
ident_t *id_ref,
int gtid,
double *lhs,
1484 double rhs,
int flag);
1485 long double __kmpc_atomic_float10_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1486 long double *lhs,
long double rhs,
1488 long double __kmpc_atomic_float10_div_cpt_rev(
ident_t *id_ref,
int gtid,
1489 long double *lhs,
long double rhs,
1492 QUAD_LEGACY __kmpc_atomic_float16_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1493 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1495 QUAD_LEGACY __kmpc_atomic_float16_div_cpt_rev(
ident_t *id_ref,
int gtid,
1496 QUAD_LEGACY *lhs, QUAD_LEGACY rhs,
1501 void __kmpc_atomic_cmplx4_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1502 kmp_cmplx32 *lhs, kmp_cmplx32 rhs,
1503 kmp_cmplx32 *out,
int flag);
1504 void __kmpc_atomic_cmplx4_div_cpt_rev(
ident_t *id_ref,
int gtid,
1505 kmp_cmplx32 *lhs, kmp_cmplx32 rhs,
1506 kmp_cmplx32 *out,
int flag);
1507 kmp_cmplx64 __kmpc_atomic_cmplx8_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1508 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1510 kmp_cmplx64 __kmpc_atomic_cmplx8_div_cpt_rev(
ident_t *id_ref,
int gtid,
1511 kmp_cmplx64 *lhs, kmp_cmplx64 rhs,
1513 kmp_cmplx80 __kmpc_atomic_cmplx10_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1514 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1516 kmp_cmplx80 __kmpc_atomic_cmplx10_div_cpt_rev(
ident_t *id_ref,
int gtid,
1517 kmp_cmplx80 *lhs, kmp_cmplx80 rhs,
1520 CPLX128_LEG __kmpc_atomic_cmplx16_sub_cpt_rev(
ident_t *id_ref,
int gtid,
1521 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1523 CPLX128_LEG __kmpc_atomic_cmplx16_div_cpt_rev(
ident_t *id_ref,
int gtid,
1524 CPLX128_LEG *lhs, CPLX128_LEG rhs,
1527 Quad_a16_t __kmpc_atomic_float16_sub_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1529 Quad_a16_t rhs,
int flag);
1530 Quad_a16_t __kmpc_atomic_float16_div_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1532 Quad_a16_t rhs,
int flag);
1534 __kmpc_atomic_cmplx16_sub_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1535 kmp_cmplx128_a16_t *lhs,
1536 kmp_cmplx128_a16_t rhs,
int flag);
1538 __kmpc_atomic_cmplx16_div_a16_cpt_rev(
ident_t *id_ref,
int gtid,
1539 kmp_cmplx128_a16_t *lhs,
1540 kmp_cmplx128_a16_t rhs,
int flag);
1545 char __kmpc_atomic_fixed1_swp(
ident_t *id_ref,
int gtid,
char *lhs,
char rhs);
1546 short __kmpc_atomic_fixed2_swp(
ident_t *id_ref,
int gtid,
short *lhs,
1548 kmp_int32 __kmpc_atomic_fixed4_swp(
ident_t *id_ref,
int gtid, kmp_int32 *lhs,
1550 kmp_int64 __kmpc_atomic_fixed8_swp(
ident_t *id_ref,
int gtid, kmp_int64 *lhs,
1552 float __kmpc_atomic_float4_swp(
ident_t *id_ref,
int gtid,
float *lhs,
1554 double __kmpc_atomic_float8_swp(
ident_t *id_ref,
int gtid,
double *lhs,
1556 long double __kmpc_atomic_float10_swp(
ident_t *id_ref,
int gtid,
1557 long double *lhs,
long double rhs);
1559 QUAD_LEGACY __kmpc_atomic_float16_swp(
ident_t *id_ref,
int gtid,
1560 QUAD_LEGACY *lhs, QUAD_LEGACY rhs);
1563 void __kmpc_atomic_cmplx4_swp(
ident_t *id_ref,
int gtid, kmp_cmplx32 *lhs,
1564 kmp_cmplx32 rhs, kmp_cmplx32 *out);
1568 kmp_cmplx64 __kmpc_atomic_cmplx8_swp(
ident_t *id_ref,
int gtid,
1569 kmp_cmplx64 *lhs, kmp_cmplx64 rhs);
1570 kmp_cmplx80 __kmpc_atomic_cmplx10_swp(
ident_t *id_ref,
int gtid,
1571 kmp_cmplx80 *lhs, kmp_cmplx80 rhs);
1573 CPLX128_LEG __kmpc_atomic_cmplx16_swp(
ident_t *id_ref,
int gtid,
1574 CPLX128_LEG *lhs, CPLX128_LEG rhs);
1576 Quad_a16_t __kmpc_atomic_float16_a16_swp(
ident_t *id_ref,
int gtid,
1577 Quad_a16_t *lhs, Quad_a16_t rhs);
1578 kmp_cmplx128_a16_t __kmpc_atomic_cmplx16_a16_swp(
ident_t *id_ref,
int gtid,
1579 kmp_cmplx128_a16_t *lhs,
1580 kmp_cmplx128_a16_t rhs);
1587 char __kmpc_atomic_fixed1_add_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1588 _Quad rhs,
int flag);
1589 char __kmpc_atomic_fixed1_sub_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1590 _Quad rhs,
int flag);
1591 char __kmpc_atomic_fixed1_mul_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1592 _Quad rhs,
int flag);
1593 char __kmpc_atomic_fixed1_div_cpt_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1594 _Quad rhs,
int flag);
1595 unsigned char __kmpc_atomic_fixed1u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1596 unsigned char *lhs, _Quad rhs,
1598 unsigned char __kmpc_atomic_fixed1u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1599 unsigned char *lhs, _Quad rhs,
1601 unsigned char __kmpc_atomic_fixed1u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1602 unsigned char *lhs, _Quad rhs,
1604 unsigned char __kmpc_atomic_fixed1u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1605 unsigned char *lhs, _Quad rhs,
1608 short __kmpc_atomic_fixed2_add_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1609 _Quad rhs,
int flag);
1610 short __kmpc_atomic_fixed2_sub_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1611 _Quad rhs,
int flag);
1612 short __kmpc_atomic_fixed2_mul_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1613 _Quad rhs,
int flag);
1614 short __kmpc_atomic_fixed2_div_cpt_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1615 _Quad rhs,
int flag);
1616 unsigned short __kmpc_atomic_fixed2u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1617 unsigned short *lhs, _Quad rhs,
1619 unsigned short __kmpc_atomic_fixed2u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1620 unsigned short *lhs, _Quad rhs,
1622 unsigned short __kmpc_atomic_fixed2u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1623 unsigned short *lhs, _Quad rhs,
1625 unsigned short __kmpc_atomic_fixed2u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1626 unsigned short *lhs, _Quad rhs,
1629 kmp_int32 __kmpc_atomic_fixed4_add_cpt_fp(
ident_t *id_ref,
int gtid,
1630 kmp_int32 *lhs, _Quad rhs,
int flag);
1631 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1632 kmp_int32 *lhs, _Quad rhs,
int flag);
1633 kmp_int32 __kmpc_atomic_fixed4_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1634 kmp_int32 *lhs, _Quad rhs,
int flag);
1635 kmp_int32 __kmpc_atomic_fixed4_div_cpt_fp(
ident_t *id_ref,
int gtid,
1636 kmp_int32 *lhs, _Quad rhs,
int flag);
1637 kmp_uint32 __kmpc_atomic_fixed4u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1638 kmp_uint32 *lhs, _Quad rhs,
1640 kmp_uint32 __kmpc_atomic_fixed4u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1641 kmp_uint32 *lhs, _Quad rhs,
1643 kmp_uint32 __kmpc_atomic_fixed4u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1644 kmp_uint32 *lhs, _Quad rhs,
1646 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1647 kmp_uint32 *lhs, _Quad rhs,
1650 kmp_int64 __kmpc_atomic_fixed8_add_cpt_fp(
ident_t *id_ref,
int gtid,
1651 kmp_int64 *lhs, _Quad rhs,
int flag);
1652 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1653 kmp_int64 *lhs, _Quad rhs,
int flag);
1654 kmp_int64 __kmpc_atomic_fixed8_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1655 kmp_int64 *lhs, _Quad rhs,
int flag);
1656 kmp_int64 __kmpc_atomic_fixed8_div_cpt_fp(
ident_t *id_ref,
int gtid,
1657 kmp_int64 *lhs, _Quad rhs,
int flag);
1658 kmp_uint64 __kmpc_atomic_fixed8u_add_cpt_fp(
ident_t *id_ref,
int gtid,
1659 kmp_uint64 *lhs, _Quad rhs,
1661 kmp_uint64 __kmpc_atomic_fixed8u_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1662 kmp_uint64 *lhs, _Quad rhs,
1664 kmp_uint64 __kmpc_atomic_fixed8u_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1665 kmp_uint64 *lhs, _Quad rhs,
1667 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_fp(
ident_t *id_ref,
int gtid,
1668 kmp_uint64 *lhs, _Quad rhs,
1671 float __kmpc_atomic_float4_add_cpt_fp(
ident_t *id_ref,
int gtid,
1672 kmp_real32 *lhs, _Quad rhs,
int flag);
1673 float __kmpc_atomic_float4_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1674 kmp_real32 *lhs, _Quad rhs,
int flag);
1675 float __kmpc_atomic_float4_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1676 kmp_real32 *lhs, _Quad rhs,
int flag);
1677 float __kmpc_atomic_float4_div_cpt_fp(
ident_t *id_ref,
int gtid,
1678 kmp_real32 *lhs, _Quad rhs,
int flag);
1680 double __kmpc_atomic_float8_add_cpt_fp(
ident_t *id_ref,
int gtid,
1681 kmp_real64 *lhs, _Quad rhs,
int flag);
1682 double __kmpc_atomic_float8_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1683 kmp_real64 *lhs, _Quad rhs,
int flag);
1684 double __kmpc_atomic_float8_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1685 kmp_real64 *lhs, _Quad rhs,
int flag);
1686 double __kmpc_atomic_float8_div_cpt_fp(
ident_t *id_ref,
int gtid,
1687 kmp_real64 *lhs, _Quad rhs,
int flag);
1689 long double __kmpc_atomic_float10_add_cpt_fp(
ident_t *id_ref,
int gtid,
1690 long double *lhs, _Quad rhs,
1692 long double __kmpc_atomic_float10_sub_cpt_fp(
ident_t *id_ref,
int gtid,
1693 long double *lhs, _Quad rhs,
1695 long double __kmpc_atomic_float10_mul_cpt_fp(
ident_t *id_ref,
int gtid,
1696 long double *lhs, _Quad rhs,
1698 long double __kmpc_atomic_float10_div_cpt_fp(
ident_t *id_ref,
int gtid,
1699 long double *lhs, _Quad rhs,
1702 char __kmpc_atomic_fixed1_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1703 _Quad rhs,
int flag);
1704 unsigned char __kmpc_atomic_fixed1u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1706 _Quad rhs,
int flag);
1707 char __kmpc_atomic_fixed1_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
char *lhs,
1708 _Quad rhs,
int flag);
1709 unsigned char __kmpc_atomic_fixed1u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1711 _Quad rhs,
int flag);
1712 short __kmpc_atomic_fixed2_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1713 _Quad rhs,
int flag);
1714 unsigned short __kmpc_atomic_fixed2u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1715 unsigned short *lhs,
1716 _Quad rhs,
int flag);
1717 short __kmpc_atomic_fixed2_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
short *lhs,
1718 _Quad rhs,
int flag);
1719 unsigned short __kmpc_atomic_fixed2u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1720 unsigned short *lhs,
1721 _Quad rhs,
int flag);
1722 kmp_int32 __kmpc_atomic_fixed4_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1723 kmp_int32 *lhs, _Quad rhs,
1725 kmp_uint32 __kmpc_atomic_fixed4u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1726 kmp_uint32 *lhs, _Quad rhs,
1728 kmp_int32 __kmpc_atomic_fixed4_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1729 kmp_int32 *lhs, _Quad rhs,
1731 kmp_uint32 __kmpc_atomic_fixed4u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1732 kmp_uint32 *lhs, _Quad rhs,
1734 kmp_int64 __kmpc_atomic_fixed8_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1735 kmp_int64 *lhs, _Quad rhs,
1737 kmp_uint64 __kmpc_atomic_fixed8u_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1738 kmp_uint64 *lhs, _Quad rhs,
1740 kmp_int64 __kmpc_atomic_fixed8_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1741 kmp_int64 *lhs, _Quad rhs,
1743 kmp_uint64 __kmpc_atomic_fixed8u_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1744 kmp_uint64 *lhs, _Quad rhs,
1746 float __kmpc_atomic_float4_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
1747 _Quad rhs,
int flag);
1748 float __kmpc_atomic_float4_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
float *lhs,
1749 _Quad rhs,
int flag);
1750 double __kmpc_atomic_float8_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1751 double *lhs, _Quad rhs,
int flag);
1752 double __kmpc_atomic_float8_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1753 double *lhs, _Quad rhs,
int flag);
1754 long double __kmpc_atomic_float10_sub_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1755 long double *lhs, _Quad rhs,
1757 long double __kmpc_atomic_float10_div_cpt_rev_fp(
ident_t *id_ref,
int gtid,
1758 long double *lhs, _Quad rhs,
1761 #endif // KMP_HAVE_QUAD 1765 #endif // OMP_40_ENABLED 1767 #endif // KMP_ARCH_X86 || KMP_ARCH_X86_64