13#include "kmp_atomic.h"
16typedef unsigned char uchar;
17typedef unsigned short ushort;
565#ifndef KMP_GOMP_COMPAT
566int __kmp_atomic_mode = 1;
568int __kmp_atomic_mode = 2;
574kmp_atomic_lock_t __kmp_atomic_lock;
576kmp_atomic_lock_t __kmp_atomic_lock_1i;
578kmp_atomic_lock_t __kmp_atomic_lock_2i;
580kmp_atomic_lock_t __kmp_atomic_lock_4i;
582kmp_atomic_lock_t __kmp_atomic_lock_4r;
584kmp_atomic_lock_t __kmp_atomic_lock_8i;
586kmp_atomic_lock_t __kmp_atomic_lock_8r;
588kmp_atomic_lock_t __kmp_atomic_lock_8c;
590kmp_atomic_lock_t __kmp_atomic_lock_10r;
592kmp_atomic_lock_t __kmp_atomic_lock_16r;
594kmp_atomic_lock_t __kmp_atomic_lock_16c;
596kmp_atomic_lock_t __kmp_atomic_lock_20c;
598kmp_atomic_lock_t __kmp_atomic_lock_32c;
605#define KMP_ATOMIC_VOLATILE volatile
607#if (KMP_ARCH_X86) && KMP_HAVE_QUAD
609static inline Quad_a4_t operator+(Quad_a4_t &lhs, Quad_a4_t &rhs) {
610 return lhs.q + rhs.q;
612static inline Quad_a4_t operator-(Quad_a4_t &lhs, Quad_a4_t &rhs) {
613 return lhs.q - rhs.q;
615static inline Quad_a4_t operator*(Quad_a4_t &lhs, Quad_a4_t &rhs) {
616 return lhs.q * rhs.q;
618static inline Quad_a4_t operator/(Quad_a4_t &lhs, Quad_a4_t &rhs) {
619 return lhs.q / rhs.q;
621static inline bool operator<(Quad_a4_t &lhs, Quad_a4_t &rhs) {
622 return lhs.q < rhs.q;
624static inline bool operator>(Quad_a4_t &lhs, Quad_a4_t &rhs) {
625 return lhs.q > rhs.q;
628static inline Quad_a16_t operator+(Quad_a16_t &lhs, Quad_a16_t &rhs) {
629 return lhs.q + rhs.q;
631static inline Quad_a16_t operator-(Quad_a16_t &lhs, Quad_a16_t &rhs) {
632 return lhs.q - rhs.q;
634static inline Quad_a16_t operator*(Quad_a16_t &lhs, Quad_a16_t &rhs) {
635 return lhs.q * rhs.q;
637static inline Quad_a16_t operator/(Quad_a16_t &lhs, Quad_a16_t &rhs) {
638 return lhs.q / rhs.q;
640static inline bool operator<(Quad_a16_t &lhs, Quad_a16_t &rhs) {
641 return lhs.q < rhs.q;
643static inline bool operator>(Quad_a16_t &lhs, Quad_a16_t &rhs) {
644 return lhs.q > rhs.q;
647static inline kmp_cmplx128_a4_t operator+(kmp_cmplx128_a4_t &lhs,
648 kmp_cmplx128_a4_t &rhs) {
649 return lhs.q + rhs.q;
651static inline kmp_cmplx128_a4_t operator-(kmp_cmplx128_a4_t &lhs,
652 kmp_cmplx128_a4_t &rhs) {
653 return lhs.q - rhs.q;
655static inline kmp_cmplx128_a4_t operator*(kmp_cmplx128_a4_t &lhs,
656 kmp_cmplx128_a4_t &rhs) {
657 return lhs.q * rhs.q;
659static inline kmp_cmplx128_a4_t operator/(kmp_cmplx128_a4_t &lhs,
660 kmp_cmplx128_a4_t &rhs) {
661 return lhs.q / rhs.q;
664static inline kmp_cmplx128_a16_t operator+(kmp_cmplx128_a16_t &lhs,
665 kmp_cmplx128_a16_t &rhs) {
666 return lhs.q + rhs.q;
668static inline kmp_cmplx128_a16_t operator-(kmp_cmplx128_a16_t &lhs,
669 kmp_cmplx128_a16_t &rhs) {
670 return lhs.q - rhs.q;
672static inline kmp_cmplx128_a16_t operator*(kmp_cmplx128_a16_t &lhs,
673 kmp_cmplx128_a16_t &rhs) {
674 return lhs.q * rhs.q;
676static inline kmp_cmplx128_a16_t operator/(kmp_cmplx128_a16_t &lhs,
677 kmp_cmplx128_a16_t &rhs) {
678 return lhs.q / rhs.q;
688#define KMP_CHECK_GTID \
689 if (gtid == KMP_GTID_UNKNOWN) { \
690 gtid = __kmp_entry_gtid(); \
698#define ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, RET_TYPE) \
699 RET_TYPE __kmpc_atomic_##TYPE_ID##_##OP_ID(ident_t *id_ref, int gtid, \
700 TYPE *lhs, TYPE rhs) { \
701 KMP_DEBUG_ASSERT(__kmp_init_serial); \
702 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID ": T#%d\n", gtid));
706#define ATOMIC_LOCK0 __kmp_atomic_lock
707#define ATOMIC_LOCK1i __kmp_atomic_lock_1i
708#define ATOMIC_LOCK2i __kmp_atomic_lock_2i
709#define ATOMIC_LOCK4i __kmp_atomic_lock_4i
710#define ATOMIC_LOCK4r __kmp_atomic_lock_4r
711#define ATOMIC_LOCK8i __kmp_atomic_lock_8i
712#define ATOMIC_LOCK8r __kmp_atomic_lock_8r
713#define ATOMIC_LOCK8c __kmp_atomic_lock_8c
714#define ATOMIC_LOCK10r __kmp_atomic_lock_10r
715#define ATOMIC_LOCK16r __kmp_atomic_lock_16r
716#define ATOMIC_LOCK16c __kmp_atomic_lock_16c
717#define ATOMIC_LOCK20c __kmp_atomic_lock_20c
718#define ATOMIC_LOCK32c __kmp_atomic_lock_32c
726#define OP_CRITICAL(OP, LCK_ID) \
727 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
731 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
733#define OP_UPDATE_CRITICAL(TYPE, OP, LCK_ID) \
734 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
735 (*lhs) = (TYPE)((*lhs)OP rhs); \
736 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
760#ifdef KMP_GOMP_COMPAT
761#define OP_GOMP_CRITICAL(OP, FLAG) \
762 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
764 OP_CRITICAL(OP, 0); \
768#define OP_UPDATE_GOMP_CRITICAL(TYPE, OP, FLAG) \
769 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
771 OP_UPDATE_CRITICAL(TYPE, OP, 0); \
775#define OP_GOMP_CRITICAL(OP, FLAG)
776#define OP_UPDATE_GOMP_CRITICAL(TYPE, OP, FLAG)
780#define KMP_DO_PAUSE _mm_delay_32(1)
790#define OP_CMPXCHG(TYPE, BITS, OP) \
792 TYPE old_value, new_value; \
793 old_value = *(TYPE volatile *)lhs; \
794 new_value = (TYPE)(old_value OP rhs); \
795 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
796 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
797 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
800 old_value = *(TYPE volatile *)lhs; \
801 new_value = (TYPE)(old_value OP rhs); \
812#define OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP) \
816 kmp_int##BITS *vvv; \
818 struct _sss old_value, new_value; \
819 old_value.vvv = (kmp_int##BITS *)&old_value.cmp; \
820 new_value.vvv = (kmp_int##BITS *)&new_value.cmp; \
821 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
822 new_value.cmp = (TYPE)(old_value.cmp OP rhs); \
823 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
824 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) old_value.vvv, \
825 *VOLATILE_CAST(kmp_int##BITS *) new_value.vvv)) { \
828 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
829 new_value.cmp = (TYPE)(old_value.cmp OP rhs); \
835#if KMP_OS_WINDOWS && KMP_ARCH_AARCH64
839#define OP_CMPXCHG(TYPE, BITS, OP) \
843 kmp_int##BITS *vvv; \
845 struct _sss old_value, new_value; \
846 old_value.vvv = (kmp_int##BITS *)&old_value.cmp; \
847 new_value.vvv = (kmp_int##BITS *)&new_value.cmp; \
848 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
849 new_value.cmp = old_value.cmp OP rhs; \
850 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
851 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) old_value.vvv, \
852 *VOLATILE_CAST(kmp_int##BITS *) new_value.vvv)) { \
855 *old_value.vvv = *(volatile kmp_int##BITS *)lhs; \
856 new_value.cmp = old_value.cmp OP rhs; \
860#undef OP_UPDATE_CRITICAL
861#define OP_UPDATE_CRITICAL(TYPE, OP, LCK_ID) \
862 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
863 (*lhs) = (*lhs)OP rhs; \
864 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
868#if KMP_ARCH_X86 || KMP_ARCH_X86_64
872#define ATOMIC_FIXED_ADD(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
874 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
875 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
877 KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
880#define ATOMIC_CMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
882 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
883 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
884 OP_CMPXCHG(TYPE, BITS, OP) \
889#define ATOMIC_CMPXCHG_WORKAROUND(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, \
891 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
892 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
893 OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP) \
901#define ATOMIC_FIXED_ADD(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
903 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
904 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
905 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
907 KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
910 OP_UPDATE_CRITICAL(TYPE, OP, \
915#define ATOMIC_CMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
917 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
918 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
919 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
920 OP_CMPXCHG(TYPE, BITS, OP) \
923 OP_UPDATE_CRITICAL(TYPE, OP, \
930#define ATOMIC_CMPXCHG_WORKAROUND(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, \
932 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
933 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
934 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
935 OP_CMPXCHG(TYPE, BITS, OP) \
938 OP_UPDATE_CRITICAL(TYPE, OP, \
947ATOMIC_FIXED_ADD(fixed4, add, kmp_int32, 32, +, 4i, 3,
949ATOMIC_FIXED_ADD(fixed4, sub, kmp_int32, 32, -, 4i, 3,
952ATOMIC_CMPXCHG(float4, add, kmp_real32, 32, +, 4r, 3,
954ATOMIC_CMPXCHG(float4, sub, kmp_real32, 32, -, 4r, 3,
958ATOMIC_FIXED_ADD(fixed8, add, kmp_int64, 64, +, 8i, 7,
960ATOMIC_FIXED_ADD(fixed8, sub, kmp_int64, 64, -, 8i, 7,
963ATOMIC_CMPXCHG(float8, add, kmp_real64, 64, +, 8r, 7,
965ATOMIC_CMPXCHG(float8, sub, kmp_real64, 64, -, 8r, 7,
983ATOMIC_CMPXCHG(fixed1, add, kmp_int8, 8, +, 1i, 0,
985ATOMIC_CMPXCHG(fixed1, andb, kmp_int8, 8, &, 1i, 0,
987ATOMIC_CMPXCHG(fixed1, div, kmp_int8, 8, /, 1i, 0,
989ATOMIC_CMPXCHG(fixed1u, div, kmp_uint8, 8, /, 1i, 0,
991ATOMIC_CMPXCHG(fixed1, mul, kmp_int8, 8, *, 1i, 0,
993ATOMIC_CMPXCHG(fixed1, orb, kmp_int8, 8, |, 1i, 0,
995ATOMIC_CMPXCHG(fixed1, shl, kmp_int8, 8, <<, 1i, 0,
997ATOMIC_CMPXCHG(fixed1, shr, kmp_int8, 8, >>, 1i, 0,
999ATOMIC_CMPXCHG(fixed1u, shr, kmp_uint8, 8, >>, 1i, 0,
1001ATOMIC_CMPXCHG(fixed1, sub, kmp_int8, 8, -, 1i, 0,
1003ATOMIC_CMPXCHG(fixed1, xor, kmp_int8, 8, ^, 1i, 0,
1005ATOMIC_CMPXCHG(fixed2, add, kmp_int16, 16, +, 2i, 1,
1007ATOMIC_CMPXCHG(fixed2, andb, kmp_int16, 16, &, 2i, 1,
1009ATOMIC_CMPXCHG(fixed2, div, kmp_int16, 16, /, 2i, 1,
1011ATOMIC_CMPXCHG(fixed2u, div, kmp_uint16, 16, /, 2i, 1,
1013ATOMIC_CMPXCHG(fixed2, mul, kmp_int16, 16, *, 2i, 1,
1015ATOMIC_CMPXCHG(fixed2, orb, kmp_int16, 16, |, 2i, 1,
1017ATOMIC_CMPXCHG(fixed2, shl, kmp_int16, 16, <<, 2i, 1,
1019ATOMIC_CMPXCHG(fixed2, shr, kmp_int16, 16, >>, 2i, 1,
1021ATOMIC_CMPXCHG(fixed2u, shr, kmp_uint16, 16, >>, 2i, 1,
1023ATOMIC_CMPXCHG(fixed2, sub, kmp_int16, 16, -, 2i, 1,
1025ATOMIC_CMPXCHG(fixed2, xor, kmp_int16, 16, ^, 2i, 1,
1027ATOMIC_CMPXCHG(fixed4, andb, kmp_int32, 32, &, 4i, 3,
1029ATOMIC_CMPXCHG(fixed4, div, kmp_int32, 32, /, 4i, 3,
1031ATOMIC_CMPXCHG(fixed4u, div, kmp_uint32, 32, /, 4i, 3,
1033ATOMIC_CMPXCHG(fixed4, mul, kmp_int32, 32, *, 4i, 3,
1035ATOMIC_CMPXCHG(fixed4, orb, kmp_int32, 32, |, 4i, 3,
1037ATOMIC_CMPXCHG(fixed4, shl, kmp_int32, 32, <<, 4i, 3,
1039ATOMIC_CMPXCHG(fixed4, shr, kmp_int32, 32, >>, 4i, 3,
1041ATOMIC_CMPXCHG(fixed4u, shr, kmp_uint32, 32, >>, 4i, 3,
1043ATOMIC_CMPXCHG(fixed4, xor, kmp_int32, 32, ^, 4i, 3,
1045ATOMIC_CMPXCHG(fixed8, andb, kmp_int64, 64, &, 8i, 7,
1047ATOMIC_CMPXCHG(fixed8, div, kmp_int64, 64, /, 8i, 7,
1049ATOMIC_CMPXCHG(fixed8u, div, kmp_uint64, 64, /, 8i, 7,
1051ATOMIC_CMPXCHG(fixed8, mul, kmp_int64, 64, *, 8i, 7,
1053ATOMIC_CMPXCHG(fixed8, orb, kmp_int64, 64, |, 8i, 7,
1055ATOMIC_CMPXCHG(fixed8, shl, kmp_int64, 64, <<, 8i, 7,
1057ATOMIC_CMPXCHG(fixed8, shr, kmp_int64, 64, >>, 8i, 7,
1059ATOMIC_CMPXCHG(fixed8u, shr, kmp_uint64, 64, >>, 8i, 7,
1061ATOMIC_CMPXCHG(fixed8, xor, kmp_int64, 64, ^, 8i, 7,
1063ATOMIC_CMPXCHG(float4, div, kmp_real32, 32, /, 4r, 3,
1065ATOMIC_CMPXCHG(float4, mul, kmp_real32, 32, *, 4r, 3,
1067ATOMIC_CMPXCHG(float8, div, kmp_real64, 64, /, 8r, 7,
1069ATOMIC_CMPXCHG(float8, mul, kmp_real64, 64, *, 8r, 7,
1079#define ATOMIC_CRIT_L(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
1080 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1081 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1082 OP_CRITICAL(= *lhs OP, LCK_ID) \
1085#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1089#define ATOMIC_CMPX_L(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) \
1090 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1091 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1092 OP_CMPXCHG(TYPE, BITS, OP) \
1098#define ATOMIC_CMPX_L(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, GOMP_FLAG) \
1099 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1100 OP_GOMP_CRITICAL(= *lhs OP, GOMP_FLAG) \
1101 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1102 OP_CMPXCHG(TYPE, BITS, OP) \
1105 OP_CRITICAL(= *lhs OP, LCK_ID) \
1110ATOMIC_CMPX_L(fixed1, andl,
char, 8, &&, 1i, 0,
1112ATOMIC_CMPX_L(fixed1, orl,
char, 8, ||, 1i, 0,
1114ATOMIC_CMPX_L(fixed2, andl,
short, 16, &&, 2i, 1,
1116ATOMIC_CMPX_L(fixed2, orl,
short, 16, ||, 2i, 1,
1118ATOMIC_CMPX_L(fixed4, andl, kmp_int32, 32, &&, 4i, 3,
1120ATOMIC_CMPX_L(fixed4, orl, kmp_int32, 32, ||, 4i, 3,
1122ATOMIC_CMPX_L(fixed8, andl, kmp_int64, 64, &&, 8i, 7,
1124ATOMIC_CMPX_L(fixed8, orl, kmp_int64, 64, ||, 8i, 7,
1136#define MIN_MAX_CRITSECT(OP, LCK_ID) \
1137 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
1139 if (*lhs OP rhs) { \
1142 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
1145#ifdef KMP_GOMP_COMPAT
1146#define GOMP_MIN_MAX_CRITSECT(OP, FLAG) \
1147 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
1149 MIN_MAX_CRITSECT(OP, 0); \
1153#define GOMP_MIN_MAX_CRITSECT(OP, FLAG)
1157#define MIN_MAX_CMPXCHG(TYPE, BITS, OP) \
1159 TYPE KMP_ATOMIC_VOLATILE temp_val; \
1162 old_value = temp_val; \
1163 while (old_value OP rhs && \
1164 !KMP_COMPARE_AND_STORE_ACQ##BITS( \
1165 (kmp_int##BITS *)lhs, \
1166 *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
1167 *VOLATILE_CAST(kmp_int##BITS *) & rhs)) { \
1169 old_value = temp_val; \
1175#define MIN_MAX_CRITICAL(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
1176 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1177 if (*lhs OP rhs) { \
1178 GOMP_MIN_MAX_CRITSECT(OP, GOMP_FLAG) \
1179 MIN_MAX_CRITSECT(OP, LCK_ID) \
1183#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1187#define MIN_MAX_COMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
1189 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1190 if (*lhs OP rhs) { \
1191 GOMP_MIN_MAX_CRITSECT(OP, GOMP_FLAG) \
1192 MIN_MAX_CMPXCHG(TYPE, BITS, OP) \
1199#define MIN_MAX_COMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
1201 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1202 if (*lhs OP rhs) { \
1203 GOMP_MIN_MAX_CRITSECT(OP, GOMP_FLAG) \
1204 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1205 MIN_MAX_CMPXCHG(TYPE, BITS, OP) \
1208 MIN_MAX_CRITSECT(OP, LCK_ID) \
1214MIN_MAX_COMPXCHG(fixed1, max,
char, 8, <, 1i, 0,
1216MIN_MAX_COMPXCHG(fixed1, min,
char, 8, >, 1i, 0,
1218MIN_MAX_COMPXCHG(fixed2, max,
short, 16, <, 2i, 1,
1220MIN_MAX_COMPXCHG(fixed2, min,
short, 16, >, 2i, 1,
1222MIN_MAX_COMPXCHG(fixed4, max, kmp_int32, 32, <, 4i, 3,
1224MIN_MAX_COMPXCHG(fixed4, min, kmp_int32, 32, >, 4i, 3,
1226MIN_MAX_COMPXCHG(fixed8, max, kmp_int64, 64, <, 8i, 7,
1228MIN_MAX_COMPXCHG(fixed8, min, kmp_int64, 64, >, 8i, 7,
1230MIN_MAX_COMPXCHG(float4, max, kmp_real32, 32, <, 4r, 3,
1232MIN_MAX_COMPXCHG(float4, min, kmp_real32, 32, >, 4r, 3,
1234MIN_MAX_COMPXCHG(float8, max, kmp_real64, 64, <, 8r, 7,
1236MIN_MAX_COMPXCHG(float8, min, kmp_real64, 64, >, 8r, 7,
1238#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1239MIN_MAX_CRITICAL(float10, max,
long double, <, 10r,
1241MIN_MAX_CRITICAL(float10, min,
long double, >, 10r,
1245MIN_MAX_CRITICAL(float16, max, QUAD_LEGACY, <, 16r,
1247MIN_MAX_CRITICAL(float16, min, QUAD_LEGACY, >, 16r,
1250MIN_MAX_CRITICAL(float16, max_a16, Quad_a16_t, <, 16r,
1252MIN_MAX_CRITICAL(float16, min_a16, Quad_a16_t, >, 16r,
1259#define ATOMIC_CRIT_EQV(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
1260 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1261 OP_GOMP_CRITICAL(^= (TYPE) ~, GOMP_FLAG) \
1262 OP_CRITICAL(^= (TYPE) ~, LCK_ID) \
1266#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1269#define ATOMIC_CMPX_EQV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
1271 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1272 OP_GOMP_CRITICAL(^= (TYPE) ~, GOMP_FLAG) \
1273 OP_CMPXCHG(TYPE, BITS, OP) \
1279#define ATOMIC_CMPX_EQV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK, \
1281 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1282 OP_GOMP_CRITICAL(^= (TYPE) ~, GOMP_FLAG) \
1283 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1284 OP_CMPXCHG(TYPE, BITS, OP) \
1287 OP_CRITICAL(^= (TYPE) ~, LCK_ID) \
1292ATOMIC_CMPXCHG(fixed1, neqv, kmp_int8, 8, ^, 1i, 0,
1294ATOMIC_CMPXCHG(fixed2, neqv, kmp_int16, 16, ^, 2i, 1,
1296ATOMIC_CMPXCHG(fixed4, neqv, kmp_int32, 32, ^, 4i, 3,
1298ATOMIC_CMPXCHG(fixed8, neqv, kmp_int64, 64, ^, 8i, 7,
1300ATOMIC_CMPX_EQV(fixed1, eqv, kmp_int8, 8, ^~, 1i, 0,
1302ATOMIC_CMPX_EQV(fixed2, eqv, kmp_int16, 16, ^~, 2i, 1,
1304ATOMIC_CMPX_EQV(fixed4, eqv, kmp_int32, 32, ^~, 4i, 3,
1306ATOMIC_CMPX_EQV(fixed8, eqv, kmp_int64, 64, ^~, 8i, 7,
1315#define ATOMIC_CRITICAL(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
1316 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
1317 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1318 OP_UPDATE_CRITICAL(TYPE, OP, LCK_ID) \
1322#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1324ATOMIC_CRITICAL(float10, add,
long double, +, 10r,
1326ATOMIC_CRITICAL(float10, sub,
long double, -, 10r,
1328ATOMIC_CRITICAL(float10, mul,
long double, *, 10r,
1330ATOMIC_CRITICAL(float10, div,
long double, /, 10r,
1335ATOMIC_CRITICAL(float16, add, QUAD_LEGACY, +, 16r,
1337ATOMIC_CRITICAL(float16, sub, QUAD_LEGACY, -, 16r,
1339ATOMIC_CRITICAL(float16, mul, QUAD_LEGACY, *, 16r,
1341ATOMIC_CRITICAL(float16, div, QUAD_LEGACY, /, 16r,
1344ATOMIC_CRITICAL(float16, add_a16, Quad_a16_t, +, 16r,
1346ATOMIC_CRITICAL(float16, sub_a16, Quad_a16_t, -, 16r,
1348ATOMIC_CRITICAL(float16, mul_a16, Quad_a16_t, *, 16r,
1350ATOMIC_CRITICAL(float16, div_a16, Quad_a16_t, /, 16r,
1358ATOMIC_CMPXCHG_WORKAROUND(cmplx4, add, kmp_cmplx32, 64, +, 8c, 7,
1360ATOMIC_CMPXCHG_WORKAROUND(cmplx4, sub, kmp_cmplx32, 64, -, 8c, 7,
1362ATOMIC_CMPXCHG_WORKAROUND(cmplx4, mul, kmp_cmplx32, 64, *, 8c, 7,
1364ATOMIC_CMPXCHG_WORKAROUND(cmplx4, div, kmp_cmplx32, 64, /, 8c, 7,
1368ATOMIC_CRITICAL(cmplx4, add, kmp_cmplx32, +, 8c, 1)
1369ATOMIC_CRITICAL(cmplx4, sub, kmp_cmplx32, -, 8c, 1)
1370ATOMIC_CRITICAL(cmplx4, mul, kmp_cmplx32, *, 8c, 1)
1371ATOMIC_CRITICAL(cmplx4, div, kmp_cmplx32, /, 8c, 1)
1374ATOMIC_CRITICAL(cmplx8, add, kmp_cmplx64, +, 16c, 1)
1375ATOMIC_CRITICAL(cmplx8, sub, kmp_cmplx64, -, 16c, 1)
1376ATOMIC_CRITICAL(cmplx8, mul, kmp_cmplx64, *, 16c, 1)
1377ATOMIC_CRITICAL(cmplx8, div, kmp_cmplx64, /, 16c, 1)
1378#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1379ATOMIC_CRITICAL(cmplx10, add, kmp_cmplx80, +, 20c,
1381ATOMIC_CRITICAL(cmplx10, sub, kmp_cmplx80, -, 20c,
1383ATOMIC_CRITICAL(cmplx10, mul, kmp_cmplx80, *, 20c,
1385ATOMIC_CRITICAL(cmplx10, div, kmp_cmplx80, /, 20c,
1389ATOMIC_CRITICAL(cmplx16, add, CPLX128_LEG, +, 32c,
1391ATOMIC_CRITICAL(cmplx16, sub, CPLX128_LEG, -, 32c,
1393ATOMIC_CRITICAL(cmplx16, mul, CPLX128_LEG, *, 32c,
1395ATOMIC_CRITICAL(cmplx16, div, CPLX128_LEG, /, 32c,
1398ATOMIC_CRITICAL(cmplx16, add_a16, kmp_cmplx128_a16_t, +, 32c,
1400ATOMIC_CRITICAL(cmplx16, sub_a16, kmp_cmplx128_a16_t, -, 32c,
1402ATOMIC_CRITICAL(cmplx16, mul_a16, kmp_cmplx128_a16_t, *, 32c,
1404ATOMIC_CRITICAL(cmplx16, div_a16, kmp_cmplx128_a16_t, /, 32c,
1411#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1419#define OP_CRITICAL_REV(TYPE, OP, LCK_ID) \
1420 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
1422 (*lhs) = (TYPE)((rhs)OP(*lhs)); \
1424 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
1426#ifdef KMP_GOMP_COMPAT
1427#define OP_GOMP_CRITICAL_REV(TYPE, OP, FLAG) \
1428 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
1430 OP_CRITICAL_REV(TYPE, OP, 0); \
1435#define OP_GOMP_CRITICAL_REV(TYPE, OP, FLAG)
1443#define ATOMIC_BEGIN_REV(TYPE_ID, OP_ID, TYPE, RET_TYPE) \
1444 RET_TYPE __kmpc_atomic_##TYPE_ID##_##OP_ID##_rev(ident_t *id_ref, int gtid, \
1445 TYPE *lhs, TYPE rhs) { \
1446 KMP_DEBUG_ASSERT(__kmp_init_serial); \
1447 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID "_rev: T#%d\n", gtid));
1456#define OP_CMPXCHG_REV(TYPE, BITS, OP) \
1458 TYPE KMP_ATOMIC_VOLATILE temp_val; \
1459 TYPE old_value, new_value; \
1461 old_value = temp_val; \
1462 new_value = (TYPE)(rhs OP old_value); \
1463 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
1464 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
1465 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
1469 old_value = temp_val; \
1470 new_value = (TYPE)(rhs OP old_value); \
1475#define ATOMIC_CMPXCHG_REV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, GOMP_FLAG) \
1476 ATOMIC_BEGIN_REV(TYPE_ID, OP_ID, TYPE, void) \
1477 OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG) \
1478 OP_CMPXCHG_REV(TYPE, BITS, OP) \
1495ATOMIC_CMPXCHG_REV(fixed1, div, kmp_int8, 8, /, 1i,
1497ATOMIC_CMPXCHG_REV(fixed1u, div, kmp_uint8, 8, /, 1i,
1499ATOMIC_CMPXCHG_REV(fixed1, shl, kmp_int8, 8, <<, 1i,
1501ATOMIC_CMPXCHG_REV(fixed1, shr, kmp_int8, 8, >>, 1i,
1503ATOMIC_CMPXCHG_REV(fixed1u, shr, kmp_uint8, 8, >>, 1i,
1505ATOMIC_CMPXCHG_REV(fixed1, sub, kmp_int8, 8, -, 1i,
1508ATOMIC_CMPXCHG_REV(fixed2, div, kmp_int16, 16, /, 2i,
1510ATOMIC_CMPXCHG_REV(fixed2u, div, kmp_uint16, 16, /, 2i,
1512ATOMIC_CMPXCHG_REV(fixed2, shl, kmp_int16, 16, <<, 2i,
1514ATOMIC_CMPXCHG_REV(fixed2, shr, kmp_int16, 16, >>, 2i,
1516ATOMIC_CMPXCHG_REV(fixed2u, shr, kmp_uint16, 16, >>, 2i,
1518ATOMIC_CMPXCHG_REV(fixed2, sub, kmp_int16, 16, -, 2i,
1521ATOMIC_CMPXCHG_REV(fixed4, div, kmp_int32, 32, /, 4i,
1523ATOMIC_CMPXCHG_REV(fixed4u, div, kmp_uint32, 32, /, 4i,
1525ATOMIC_CMPXCHG_REV(fixed4, shl, kmp_int32, 32, <<, 4i,
1527ATOMIC_CMPXCHG_REV(fixed4, shr, kmp_int32, 32, >>, 4i,
1529ATOMIC_CMPXCHG_REV(fixed4u, shr, kmp_uint32, 32, >>, 4i,
1531ATOMIC_CMPXCHG_REV(fixed4, sub, kmp_int32, 32, -, 4i,
1534ATOMIC_CMPXCHG_REV(fixed8, div, kmp_int64, 64, /, 8i,
1536ATOMIC_CMPXCHG_REV(fixed8u, div, kmp_uint64, 64, /, 8i,
1538ATOMIC_CMPXCHG_REV(fixed8, shl, kmp_int64, 64, <<, 8i,
1540ATOMIC_CMPXCHG_REV(fixed8, shr, kmp_int64, 64, >>, 8i,
1542ATOMIC_CMPXCHG_REV(fixed8u, shr, kmp_uint64, 64, >>, 8i,
1544ATOMIC_CMPXCHG_REV(fixed8, sub, kmp_int64, 64, -, 8i,
1547ATOMIC_CMPXCHG_REV(float4, div, kmp_real32, 32, /, 4r,
1549ATOMIC_CMPXCHG_REV(float4, sub, kmp_real32, 32, -, 4r,
1552ATOMIC_CMPXCHG_REV(float8, div, kmp_real64, 64, /, 8r,
1554ATOMIC_CMPXCHG_REV(float8, sub, kmp_real64, 64, -, 8r,
1564#define ATOMIC_CRITICAL_REV(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
1565 ATOMIC_BEGIN_REV(TYPE_ID, OP_ID, TYPE, void) \
1566 OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG) \
1567 OP_CRITICAL_REV(TYPE, OP, LCK_ID) \
1572ATOMIC_CRITICAL_REV(float10, sub,
long double, -, 10r,
1574ATOMIC_CRITICAL_REV(float10, div,
long double, /, 10r,
1578ATOMIC_CRITICAL_REV(float16, sub, QUAD_LEGACY, -, 16r,
1580ATOMIC_CRITICAL_REV(float16, div, QUAD_LEGACY, /, 16r,
1583ATOMIC_CRITICAL_REV(float16, sub_a16, Quad_a16_t, -, 16r,
1585ATOMIC_CRITICAL_REV(float16, div_a16, Quad_a16_t, /, 16r,
1591ATOMIC_CRITICAL_REV(cmplx4, sub, kmp_cmplx32, -, 8c,
1593ATOMIC_CRITICAL_REV(cmplx4, div, kmp_cmplx32, /, 8c,
1595ATOMIC_CRITICAL_REV(cmplx8, sub, kmp_cmplx64, -, 16c,
1597ATOMIC_CRITICAL_REV(cmplx8, div, kmp_cmplx64, /, 16c,
1599ATOMIC_CRITICAL_REV(cmplx10, sub, kmp_cmplx80, -, 20c,
1601ATOMIC_CRITICAL_REV(cmplx10, div, kmp_cmplx80, /, 20c,
1604ATOMIC_CRITICAL_REV(cmplx16, sub, CPLX128_LEG, -, 32c,
1606ATOMIC_CRITICAL_REV(cmplx16, div, CPLX128_LEG, /, 32c,
1609ATOMIC_CRITICAL_REV(cmplx16, sub_a16, kmp_cmplx128_a16_t, -, 32c,
1611ATOMIC_CRITICAL_REV(cmplx16, div_a16, kmp_cmplx128_a16_t, /, 32c,
1630#define ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1631 void __kmpc_atomic_##TYPE_ID##_##OP_ID##_##RTYPE_ID( \
1632 ident_t *id_ref, int gtid, TYPE *lhs, RTYPE rhs) { \
1633 KMP_DEBUG_ASSERT(__kmp_init_serial); \
1635 ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID "_" #RTYPE_ID ": T#%d\n", \
1639#define ATOMIC_CRITICAL_FP(TYPE_ID, TYPE, OP_ID, OP, RTYPE_ID, RTYPE, LCK_ID, \
1641 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1642 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1643 OP_UPDATE_CRITICAL(TYPE, OP, LCK_ID) \
1647#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1650#define ATOMIC_CMPXCHG_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1651 LCK_ID, MASK, GOMP_FLAG) \
1652 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1653 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1654 OP_CMPXCHG(TYPE, BITS, OP) \
1660#define ATOMIC_CMPXCHG_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1661 LCK_ID, MASK, GOMP_FLAG) \
1662 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1663 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1664 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1665 OP_CMPXCHG(TYPE, BITS, OP) \
1668 OP_UPDATE_CRITICAL(TYPE, OP, \
1675#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1677#define ATOMIC_CMPXCHG_REV_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, \
1678 RTYPE, LCK_ID, MASK, GOMP_FLAG) \
1679 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1680 OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG) \
1681 OP_CMPXCHG_REV(TYPE, BITS, OP) \
1683#define ATOMIC_CRITICAL_REV_FP(TYPE_ID, TYPE, OP_ID, OP, RTYPE_ID, RTYPE, \
1684 LCK_ID, GOMP_FLAG) \
1685 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1686 OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG) \
1687 OP_CRITICAL_REV(TYPE, OP, LCK_ID) \
1692ATOMIC_CMPXCHG_MIX(fixed1,
char, mul, 8, *, float8, kmp_real64, 1i, 0,
1694ATOMIC_CMPXCHG_MIX(fixed1,
char, div, 8, /, float8, kmp_real64, 1i, 0,
1696ATOMIC_CMPXCHG_MIX(fixed2,
short, mul, 16, *, float8, kmp_real64, 2i, 1,
1698ATOMIC_CMPXCHG_MIX(fixed2,
short, div, 16, /, float8, kmp_real64, 2i, 1,
1700ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, mul, 32, *, float8, kmp_real64, 4i, 3,
1702ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, div, 32, /, float8, kmp_real64, 4i, 3,
1704ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, mul, 64, *, float8, kmp_real64, 8i, 7,
1706ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, div, 64, /, float8, kmp_real64, 8i, 7,
1708ATOMIC_CMPXCHG_MIX(float4, kmp_real32, add, 32, +, float8, kmp_real64, 4r, 3,
1710ATOMIC_CMPXCHG_MIX(float4, kmp_real32, sub, 32, -, float8, kmp_real64, 4r, 3,
1712ATOMIC_CMPXCHG_MIX(float4, kmp_real32, mul, 32, *, float8, kmp_real64, 4r, 3,
1714ATOMIC_CMPXCHG_MIX(float4, kmp_real32, div, 32, /, float8, kmp_real64, 4r, 3,
1720ATOMIC_CMPXCHG_MIX(fixed1,
char, add, 8, +, fp, _Quad, 1i, 0,
1722ATOMIC_CMPXCHG_MIX(fixed1u, uchar, add, 8, +, fp, _Quad, 1i, 0,
1724ATOMIC_CMPXCHG_MIX(fixed1,
char, sub, 8, -, fp, _Quad, 1i, 0,
1726ATOMIC_CMPXCHG_MIX(fixed1u, uchar, sub, 8, -, fp, _Quad, 1i, 0,
1728ATOMIC_CMPXCHG_MIX(fixed1,
char, mul, 8, *, fp, _Quad, 1i, 0,
1730ATOMIC_CMPXCHG_MIX(fixed1u, uchar, mul, 8, *, fp, _Quad, 1i, 0,
1732ATOMIC_CMPXCHG_MIX(fixed1,
char, div, 8, /, fp, _Quad, 1i, 0,
1734ATOMIC_CMPXCHG_MIX(fixed1u, uchar, div, 8, /, fp, _Quad, 1i, 0,
1737ATOMIC_CMPXCHG_MIX(fixed2,
short, add, 16, +, fp, _Quad, 2i, 1,
1739ATOMIC_CMPXCHG_MIX(fixed2u, ushort, add, 16, +, fp, _Quad, 2i, 1,
1741ATOMIC_CMPXCHG_MIX(fixed2,
short, sub, 16, -, fp, _Quad, 2i, 1,
1743ATOMIC_CMPXCHG_MIX(fixed2u, ushort, sub, 16, -, fp, _Quad, 2i, 1,
1745ATOMIC_CMPXCHG_MIX(fixed2,
short, mul, 16, *, fp, _Quad, 2i, 1,
1747ATOMIC_CMPXCHG_MIX(fixed2u, ushort, mul, 16, *, fp, _Quad, 2i, 1,
1749ATOMIC_CMPXCHG_MIX(fixed2,
short, div, 16, /, fp, _Quad, 2i, 1,
1751ATOMIC_CMPXCHG_MIX(fixed2u, ushort, div, 16, /, fp, _Quad, 2i, 1,
1754ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, add, 32, +, fp, _Quad, 4i, 3,
1756ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, add, 32, +, fp, _Quad, 4i, 3,
1758ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, sub, 32, -, fp, _Quad, 4i, 3,
1760ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, sub, 32, -, fp, _Quad, 4i, 3,
1762ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, mul, 32, *, fp, _Quad, 4i, 3,
1764ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, mul, 32, *, fp, _Quad, 4i, 3,
1766ATOMIC_CMPXCHG_MIX(fixed4, kmp_int32, div, 32, /, fp, _Quad, 4i, 3,
1768ATOMIC_CMPXCHG_MIX(fixed4u, kmp_uint32, div, 32, /, fp, _Quad, 4i, 3,
1771ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, add, 64, +, fp, _Quad, 8i, 7,
1773ATOMIC_CMPXCHG_MIX(fixed8u, kmp_uint64, add, 64, +, fp, _Quad, 8i, 7,
1775ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, sub, 64, -, fp, _Quad, 8i, 7,
1777ATOMIC_CMPXCHG_MIX(fixed8u, kmp_uint64, sub, 64, -, fp, _Quad, 8i, 7,
1779ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, mul, 64, *, fp, _Quad, 8i, 7,
1781ATOMIC_CMPXCHG_MIX(fixed8u, kmp_uint64, mul, 64, *, fp, _Quad, 8i, 7,
1783ATOMIC_CMPXCHG_MIX(fixed8, kmp_int64, div, 64, /, fp, _Quad, 8i, 7,
1785ATOMIC_CMPXCHG_MIX(fixed8u, kmp_uint64, div, 64, /, fp, _Quad, 8i, 7,
1788ATOMIC_CMPXCHG_MIX(float4, kmp_real32, add, 32, +, fp, _Quad, 4r, 3,
1790ATOMIC_CMPXCHG_MIX(float4, kmp_real32, sub, 32, -, fp, _Quad, 4r, 3,
1792ATOMIC_CMPXCHG_MIX(float4, kmp_real32, mul, 32, *, fp, _Quad, 4r, 3,
1794ATOMIC_CMPXCHG_MIX(float4, kmp_real32, div, 32, /, fp, _Quad, 4r, 3,
1797ATOMIC_CMPXCHG_MIX(float8, kmp_real64, add, 64, +, fp, _Quad, 8r, 7,
1799ATOMIC_CMPXCHG_MIX(float8, kmp_real64, sub, 64, -, fp, _Quad, 8r, 7,
1801ATOMIC_CMPXCHG_MIX(float8, kmp_real64, mul, 64, *, fp, _Quad, 8r, 7,
1803ATOMIC_CMPXCHG_MIX(float8, kmp_real64, div, 64, /, fp, _Quad, 8r, 7,
1806#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1807ATOMIC_CRITICAL_FP(float10,
long double, add, +, fp, _Quad, 10r,
1809ATOMIC_CRITICAL_FP(float10,
long double, sub, -, fp, _Quad, 10r,
1811ATOMIC_CRITICAL_FP(float10,
long double, mul, *, fp, _Quad, 10r,
1813ATOMIC_CRITICAL_FP(float10,
long double, div, /, fp, _Quad, 10r,
1817ATOMIC_CMPXCHG_REV_MIX(fixed1,
char, sub_rev, 8, -, fp, _Quad, 1i, 0,
1819ATOMIC_CMPXCHG_REV_MIX(fixed1u, uchar, sub_rev, 8, -, fp, _Quad, 1i, 0,
1821ATOMIC_CMPXCHG_REV_MIX(fixed1,
char, div_rev, 8, /, fp, _Quad, 1i, 0,
1823ATOMIC_CMPXCHG_REV_MIX(fixed1u, uchar, div_rev, 8, /, fp, _Quad, 1i, 0,
1826ATOMIC_CMPXCHG_REV_MIX(fixed2,
short, sub_rev, 16, -, fp, _Quad, 2i, 1,
1828ATOMIC_CMPXCHG_REV_MIX(fixed2u, ushort, sub_rev, 16, -, fp, _Quad, 2i, 1,
1830ATOMIC_CMPXCHG_REV_MIX(fixed2,
short, div_rev, 16, /, fp, _Quad, 2i, 1,
1832ATOMIC_CMPXCHG_REV_MIX(fixed2u, ushort, div_rev, 16, /, fp, _Quad, 2i, 1,
1835ATOMIC_CMPXCHG_REV_MIX(fixed4, kmp_int32, sub_rev, 32, -, fp, _Quad, 4i, 3,
1837ATOMIC_CMPXCHG_REV_MIX(fixed4u, kmp_uint32, sub_rev, 32, -, fp, _Quad, 4i, 3,
1839ATOMIC_CMPXCHG_REV_MIX(fixed4, kmp_int32, div_rev, 32, /, fp, _Quad, 4i, 3,
1841ATOMIC_CMPXCHG_REV_MIX(fixed4u, kmp_uint32, div_rev, 32, /, fp, _Quad, 4i, 3,
1844ATOMIC_CMPXCHG_REV_MIX(fixed8, kmp_int64, sub_rev, 64, -, fp, _Quad, 8i, 7,
1846ATOMIC_CMPXCHG_REV_MIX(fixed8u, kmp_uint64, sub_rev, 64, -, fp, _Quad, 8i, 7,
1848ATOMIC_CMPXCHG_REV_MIX(fixed8, kmp_int64, div_rev, 64, /, fp, _Quad, 8i, 7,
1850ATOMIC_CMPXCHG_REV_MIX(fixed8u, kmp_uint64, div_rev, 64, /, fp, _Quad, 8i, 7,
1853ATOMIC_CMPXCHG_REV_MIX(float4, kmp_real32, sub_rev, 32, -, fp, _Quad, 4r, 3,
1855ATOMIC_CMPXCHG_REV_MIX(float4, kmp_real32, div_rev, 32, /, fp, _Quad, 4r, 3,
1858ATOMIC_CMPXCHG_REV_MIX(float8, kmp_real64, sub_rev, 64, -, fp, _Quad, 8r, 7,
1860ATOMIC_CMPXCHG_REV_MIX(float8, kmp_real64, div_rev, 64, /, fp, _Quad, 8r, 7,
1863ATOMIC_CRITICAL_REV_FP(float10,
long double, sub_rev, -, fp, _Quad, 10r,
1865ATOMIC_CRITICAL_REV_FP(float10,
long double, div_rev, /, fp, _Quad, 10r,
1871#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1876#define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1877 LCK_ID, MASK, GOMP_FLAG) \
1878 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1879 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1880 OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP) \
1884#define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1885 LCK_ID, MASK, GOMP_FLAG) \
1886 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1887 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1888 OP_CMPXCHG(TYPE, BITS, OP) \
1894#define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE, \
1895 LCK_ID, MASK, GOMP_FLAG) \
1896 ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE) \
1897 OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) \
1898 if (!((kmp_uintptr_t)lhs & 0x##MASK)) { \
1899 OP_CMPXCHG(TYPE, BITS, OP) \
1902 OP_UPDATE_CRITICAL(TYPE, OP, \
1908ATOMIC_CMPXCHG_CMPLX(cmplx4, kmp_cmplx32, add, 64, +, cmplx8, kmp_cmplx64, 8c,
1910ATOMIC_CMPXCHG_CMPLX(cmplx4, kmp_cmplx32, sub, 64, -, cmplx8, kmp_cmplx64, 8c,
1912ATOMIC_CMPXCHG_CMPLX(cmplx4, kmp_cmplx32, mul, 64, *, cmplx8, kmp_cmplx64, 8c,
1914ATOMIC_CMPXCHG_CMPLX(cmplx4, kmp_cmplx32, div, 64, /, cmplx8, kmp_cmplx64, 8c,
1918#if KMP_ARCH_X86 || KMP_ARCH_X86_64
1929#define ATOMIC_BEGIN_READ(TYPE_ID, OP_ID, TYPE, RET_TYPE) \
1930 RET_TYPE __kmpc_atomic_##TYPE_ID##_##OP_ID(ident_t *id_ref, int gtid, \
1932 KMP_DEBUG_ASSERT(__kmp_init_serial); \
1933 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID ": T#%d\n", gtid));
1944#define OP_CMPXCHG_READ(TYPE, BITS, OP) \
1946 TYPE KMP_ATOMIC_VOLATILE temp_val; \
1949 kmp_int##BITS i_val; \
1951 union f_i_union old_value; \
1953 old_value.f_val = temp_val; \
1954 old_value.i_val = KMP_COMPARE_AND_STORE_RET##BITS( \
1955 (kmp_int##BITS *)loc, \
1956 *VOLATILE_CAST(kmp_int##BITS *) & old_value.i_val, \
1957 *VOLATILE_CAST(kmp_int##BITS *) & old_value.i_val); \
1958 new_value = old_value.f_val; \
1968#define OP_CRITICAL_READ(OP, LCK_ID) \
1969 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
1971 new_value = (*loc); \
1973 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
1976#ifdef KMP_GOMP_COMPAT
1977#define OP_GOMP_CRITICAL_READ(OP, FLAG) \
1978 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
1980 OP_CRITICAL_READ(OP, 0); \
1984#define OP_GOMP_CRITICAL_READ(OP, FLAG)
1988#define ATOMIC_FIXED_READ(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
1989 ATOMIC_BEGIN_READ(TYPE_ID, OP_ID, TYPE, TYPE) \
1991 OP_GOMP_CRITICAL_READ(OP## =, GOMP_FLAG) \
1992 new_value = KMP_TEST_THEN_ADD##BITS(loc, OP 0); \
1996#define ATOMIC_CMPXCHG_READ(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
1997 ATOMIC_BEGIN_READ(TYPE_ID, OP_ID, TYPE, TYPE) \
1999 OP_GOMP_CRITICAL_READ(OP## =, GOMP_FLAG) \
2000 OP_CMPXCHG_READ(TYPE, BITS, OP) \
2008#define ATOMIC_CRITICAL_READ(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
2009 ATOMIC_BEGIN_READ(TYPE_ID, OP_ID, TYPE, TYPE) \
2011 OP_GOMP_CRITICAL_READ(OP## =, GOMP_FLAG) \
2012 OP_CRITICAL_READ(OP, LCK_ID) \
2022#define OP_CRITICAL_READ_WRK(OP, LCK_ID) \
2023 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2027 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
2029#ifdef KMP_GOMP_COMPAT
2030#define OP_GOMP_CRITICAL_READ_WRK(OP, FLAG) \
2031 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2033 OP_CRITICAL_READ_WRK(OP, 0); \
2036#define OP_GOMP_CRITICAL_READ_WRK(OP, FLAG)
2039#define ATOMIC_BEGIN_READ_WRK(TYPE_ID, OP_ID, TYPE) \
2040 void __kmpc_atomic_##TYPE_ID##_##OP_ID(TYPE *out, ident_t *id_ref, int gtid, \
2042 KMP_DEBUG_ASSERT(__kmp_init_serial); \
2043 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID ": T#%d\n", gtid));
2046#define ATOMIC_CRITICAL_READ_WRK(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
2047 ATOMIC_BEGIN_READ_WRK(TYPE_ID, OP_ID, TYPE) \
2048 OP_GOMP_CRITICAL_READ_WRK(OP## =, GOMP_FLAG) \
2049 OP_CRITICAL_READ_WRK(OP, LCK_ID) \
2056ATOMIC_FIXED_READ(fixed4, rd, kmp_int32, 32, +, 0)
2057ATOMIC_FIXED_READ(fixed8, rd, kmp_int64, 64, +,
2059ATOMIC_CMPXCHG_READ(float4, rd, kmp_real32, 32, +,
2061ATOMIC_CMPXCHG_READ(float8, rd, kmp_real64, 64, +,
2065ATOMIC_CMPXCHG_READ(fixed1, rd, kmp_int8, 8, +,
2067ATOMIC_CMPXCHG_READ(fixed2, rd, kmp_int16, 16, +,
2070ATOMIC_CRITICAL_READ(float10, rd,
long double, +, 10r,
2073ATOMIC_CRITICAL_READ(float16, rd, QUAD_LEGACY, +, 16r,
2079ATOMIC_CRITICAL_READ_WRK(cmplx4, rd, kmp_cmplx32, +, 8c,
2082ATOMIC_CRITICAL_READ(cmplx4, rd, kmp_cmplx32, +, 8c,
2085ATOMIC_CRITICAL_READ(cmplx8, rd, kmp_cmplx64, +, 16c,
2087ATOMIC_CRITICAL_READ(cmplx10, rd, kmp_cmplx80, +, 20c,
2090ATOMIC_CRITICAL_READ(cmplx16, rd, CPLX128_LEG, +, 32c,
2093ATOMIC_CRITICAL_READ(float16, a16_rd, Quad_a16_t, +, 16r,
2095ATOMIC_CRITICAL_READ(cmplx16, a16_rd, kmp_cmplx128_a16_t, +, 32c,
2103#define ATOMIC_XCHG_WR(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2104 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
2105 OP_GOMP_CRITICAL(OP, GOMP_FLAG) \
2106 KMP_XCHG_FIXED##BITS(lhs, rhs); \
2109#define ATOMIC_XCHG_FLOAT_WR(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2110 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
2111 OP_GOMP_CRITICAL(OP, GOMP_FLAG) \
2112 KMP_XCHG_REAL##BITS(lhs, rhs); \
2122#define OP_CMPXCHG_WR(TYPE, BITS, OP) \
2124 TYPE KMP_ATOMIC_VOLATILE temp_val; \
2125 TYPE old_value, new_value; \
2127 old_value = temp_val; \
2129 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
2130 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2131 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
2133 old_value = temp_val; \
2139#define ATOMIC_CMPXCHG_WR(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2140 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
2141 OP_GOMP_CRITICAL(OP, GOMP_FLAG) \
2142 OP_CMPXCHG_WR(TYPE, BITS, OP) \
2151#define ATOMIC_CRITICAL_WR(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
2152 ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void) \
2153 OP_GOMP_CRITICAL(OP, GOMP_FLAG) \
2154 OP_CRITICAL(OP, LCK_ID) \
2158ATOMIC_XCHG_WR(fixed1, wr, kmp_int8, 8, =,
2160ATOMIC_XCHG_WR(fixed2, wr, kmp_int16, 16, =,
2162ATOMIC_XCHG_WR(fixed4, wr, kmp_int32, 32, =,
2165ATOMIC_CMPXCHG_WR(fixed8, wr, kmp_int64, 64, =,
2168ATOMIC_XCHG_WR(fixed8, wr, kmp_int64, 64, =,
2172ATOMIC_XCHG_FLOAT_WR(float4, wr, kmp_real32, 32, =,
2175ATOMIC_CMPXCHG_WR(float8, wr, kmp_real64, 64, =,
2178ATOMIC_XCHG_FLOAT_WR(float8, wr, kmp_real64, 64, =,
2182ATOMIC_CRITICAL_WR(float10, wr,
long double, =, 10r,
2185ATOMIC_CRITICAL_WR(float16, wr, QUAD_LEGACY, =, 16r,
2188ATOMIC_CRITICAL_WR(cmplx4, wr, kmp_cmplx32, =, 8c, 1)
2189ATOMIC_CRITICAL_WR(cmplx8, wr, kmp_cmplx64, =, 16c,
2191ATOMIC_CRITICAL_WR(cmplx10, wr, kmp_cmplx80, =, 20c,
2194ATOMIC_CRITICAL_WR(cmplx16, wr, CPLX128_LEG, =, 32c,
2197ATOMIC_CRITICAL_WR(float16, a16_wr, Quad_a16_t, =, 16r,
2199ATOMIC_CRITICAL_WR(cmplx16, a16_wr, kmp_cmplx128_a16_t, =, 32c,
2212#define ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, RET_TYPE) \
2213 RET_TYPE __kmpc_atomic_##TYPE_ID##_##OP_ID(ident_t *id_ref, int gtid, \
2214 TYPE *lhs, TYPE rhs, int flag) { \
2215 KMP_DEBUG_ASSERT(__kmp_init_serial); \
2216 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID ": T#%d\n", gtid));
2224#define OP_CRITICAL_CPT(OP, LCK_ID) \
2225 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2229 new_value = (*lhs); \
2231 new_value = (*lhs); \
2235 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2238#define OP_UPDATE_CRITICAL_CPT(TYPE, OP, LCK_ID) \
2239 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2242 (*lhs) = (TYPE)((*lhs)OP rhs); \
2243 new_value = (*lhs); \
2245 new_value = (*lhs); \
2246 (*lhs) = (TYPE)((*lhs)OP rhs); \
2249 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2253#ifdef KMP_GOMP_COMPAT
2254#define OP_GOMP_CRITICAL_CPT(TYPE, OP, FLAG) \
2255 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2257 OP_UPDATE_CRITICAL_CPT(TYPE, OP, 0); \
2260#define OP_GOMP_CRITICAL_CPT(TYPE, OP, FLAG)
2270#define OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2272 TYPE KMP_ATOMIC_VOLATILE temp_val; \
2273 TYPE old_value, new_value; \
2275 old_value = temp_val; \
2276 new_value = (TYPE)(old_value OP rhs); \
2277 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
2278 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2279 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
2281 old_value = temp_val; \
2282 new_value = (TYPE)(old_value OP rhs); \
2291#define ATOMIC_CMPXCHG_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2292 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2295 OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) \
2296 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2300#define ATOMIC_FIXED_ADD_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2301 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2302 TYPE old_value, new_value; \
2304 OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) \
2306 old_value = KMP_TEST_THEN_ADD##BITS(lhs, OP rhs); \
2308 return old_value OP rhs; \
2314ATOMIC_FIXED_ADD_CPT(fixed4, add_cpt, kmp_int32, 32, +,
2316ATOMIC_FIXED_ADD_CPT(fixed4, sub_cpt, kmp_int32, 32, -,
2318ATOMIC_FIXED_ADD_CPT(fixed8, add_cpt, kmp_int64, 64, +,
2320ATOMIC_FIXED_ADD_CPT(fixed8, sub_cpt, kmp_int64, 64, -,
2323ATOMIC_CMPXCHG_CPT(float4, add_cpt, kmp_real32, 32, +,
2325ATOMIC_CMPXCHG_CPT(float4, sub_cpt, kmp_real32, 32, -,
2327ATOMIC_CMPXCHG_CPT(float8, add_cpt, kmp_real64, 64, +,
2329ATOMIC_CMPXCHG_CPT(float8, sub_cpt, kmp_real64, 64, -,
2344ATOMIC_CMPXCHG_CPT(fixed1, add_cpt, kmp_int8, 8, +,
2346ATOMIC_CMPXCHG_CPT(fixed1, andb_cpt, kmp_int8, 8, &,
2348ATOMIC_CMPXCHG_CPT(fixed1, div_cpt, kmp_int8, 8, /,
2350ATOMIC_CMPXCHG_CPT(fixed1u, div_cpt, kmp_uint8, 8, /,
2352ATOMIC_CMPXCHG_CPT(fixed1, mul_cpt, kmp_int8, 8, *,
2354ATOMIC_CMPXCHG_CPT(fixed1, orb_cpt, kmp_int8, 8, |,
2356ATOMIC_CMPXCHG_CPT(fixed1, shl_cpt, kmp_int8, 8, <<,
2358ATOMIC_CMPXCHG_CPT(fixed1, shr_cpt, kmp_int8, 8, >>,
2360ATOMIC_CMPXCHG_CPT(fixed1u, shr_cpt, kmp_uint8, 8, >>,
2362ATOMIC_CMPXCHG_CPT(fixed1, sub_cpt, kmp_int8, 8, -,
2364ATOMIC_CMPXCHG_CPT(fixed1, xor_cpt, kmp_int8, 8, ^,
2366ATOMIC_CMPXCHG_CPT(fixed2, add_cpt, kmp_int16, 16, +,
2368ATOMIC_CMPXCHG_CPT(fixed2, andb_cpt, kmp_int16, 16, &,
2370ATOMIC_CMPXCHG_CPT(fixed2, div_cpt, kmp_int16, 16, /,
2372ATOMIC_CMPXCHG_CPT(fixed2u, div_cpt, kmp_uint16, 16, /,
2374ATOMIC_CMPXCHG_CPT(fixed2, mul_cpt, kmp_int16, 16, *,
2376ATOMIC_CMPXCHG_CPT(fixed2, orb_cpt, kmp_int16, 16, |,
2378ATOMIC_CMPXCHG_CPT(fixed2, shl_cpt, kmp_int16, 16, <<,
2380ATOMIC_CMPXCHG_CPT(fixed2, shr_cpt, kmp_int16, 16, >>,
2382ATOMIC_CMPXCHG_CPT(fixed2u, shr_cpt, kmp_uint16, 16, >>,
2384ATOMIC_CMPXCHG_CPT(fixed2, sub_cpt, kmp_int16, 16, -,
2386ATOMIC_CMPXCHG_CPT(fixed2, xor_cpt, kmp_int16, 16, ^,
2388ATOMIC_CMPXCHG_CPT(fixed4, andb_cpt, kmp_int32, 32, &,
2390ATOMIC_CMPXCHG_CPT(fixed4, div_cpt, kmp_int32, 32, /,
2392ATOMIC_CMPXCHG_CPT(fixed4u, div_cpt, kmp_uint32, 32, /,
2394ATOMIC_CMPXCHG_CPT(fixed4, mul_cpt, kmp_int32, 32, *,
2396ATOMIC_CMPXCHG_CPT(fixed4, orb_cpt, kmp_int32, 32, |,
2398ATOMIC_CMPXCHG_CPT(fixed4, shl_cpt, kmp_int32, 32, <<,
2400ATOMIC_CMPXCHG_CPT(fixed4, shr_cpt, kmp_int32, 32, >>,
2402ATOMIC_CMPXCHG_CPT(fixed4u, shr_cpt, kmp_uint32, 32, >>,
2404ATOMIC_CMPXCHG_CPT(fixed4, xor_cpt, kmp_int32, 32, ^,
2406ATOMIC_CMPXCHG_CPT(fixed8, andb_cpt, kmp_int64, 64, &,
2408ATOMIC_CMPXCHG_CPT(fixed8, div_cpt, kmp_int64, 64, /,
2410ATOMIC_CMPXCHG_CPT(fixed8u, div_cpt, kmp_uint64, 64, /,
2412ATOMIC_CMPXCHG_CPT(fixed8, mul_cpt, kmp_int64, 64, *,
2414ATOMIC_CMPXCHG_CPT(fixed8, orb_cpt, kmp_int64, 64, |,
2416ATOMIC_CMPXCHG_CPT(fixed8, shl_cpt, kmp_int64, 64, <<,
2418ATOMIC_CMPXCHG_CPT(fixed8, shr_cpt, kmp_int64, 64, >>,
2420ATOMIC_CMPXCHG_CPT(fixed8u, shr_cpt, kmp_uint64, 64, >>,
2422ATOMIC_CMPXCHG_CPT(fixed8, xor_cpt, kmp_int64, 64, ^,
2424ATOMIC_CMPXCHG_CPT(float4, div_cpt, kmp_real32, 32, /,
2426ATOMIC_CMPXCHG_CPT(float4, mul_cpt, kmp_real32, 32, *,
2428ATOMIC_CMPXCHG_CPT(float8, div_cpt, kmp_real64, 64, /,
2430ATOMIC_CMPXCHG_CPT(float8, mul_cpt, kmp_real64, 64, *,
2442#define ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE) \
2443 TYPE __kmpc_atomic_##TYPE_ID##_##OP_ID##_##RTYPE_ID( \
2444 ident_t *id_ref, int gtid, TYPE *lhs, RTYPE rhs, int flag) { \
2445 KMP_DEBUG_ASSERT(__kmp_init_serial); \
2447 ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID "_" #RTYPE_ID ": T#%d\n", \
2451#define ATOMIC_CMPXCHG_CPT_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, \
2452 RTYPE, LCK_ID, MASK, GOMP_FLAG) \
2453 ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE) \
2455 OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) \
2456 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2460#define ATOMIC_CRITICAL_CPT_MIX(TYPE_ID, TYPE, OP_ID, OP, RTYPE_ID, RTYPE, \
2461 LCK_ID, GOMP_FLAG) \
2462 ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE) \
2464 OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) \
2465 OP_UPDATE_CRITICAL_CPT(TYPE, OP, LCK_ID) \
2468ATOMIC_CMPXCHG_CPT_MIX(fixed1,
char, add_cpt, 8, +, fp, _Quad, 1i, 0,
2470ATOMIC_CMPXCHG_CPT_MIX(fixed1u, uchar, add_cpt, 8, +, fp, _Quad, 1i, 0,
2472ATOMIC_CMPXCHG_CPT_MIX(fixed1,
char, sub_cpt, 8, -, fp, _Quad, 1i, 0,
2474ATOMIC_CMPXCHG_CPT_MIX(fixed1u, uchar, sub_cpt, 8, -, fp, _Quad, 1i, 0,
2476ATOMIC_CMPXCHG_CPT_MIX(fixed1,
char, mul_cpt, 8, *, fp, _Quad, 1i, 0,
2478ATOMIC_CMPXCHG_CPT_MIX(fixed1u, uchar, mul_cpt, 8, *, fp, _Quad, 1i, 0,
2480ATOMIC_CMPXCHG_CPT_MIX(fixed1,
char, div_cpt, 8, /, fp, _Quad, 1i, 0,
2482ATOMIC_CMPXCHG_CPT_MIX(fixed1u, uchar, div_cpt, 8, /, fp, _Quad, 1i, 0,
2485ATOMIC_CMPXCHG_CPT_MIX(fixed2,
short, add_cpt, 16, +, fp, _Quad, 2i, 1,
2487ATOMIC_CMPXCHG_CPT_MIX(fixed2u, ushort, add_cpt, 16, +, fp, _Quad, 2i, 1,
2489ATOMIC_CMPXCHG_CPT_MIX(fixed2,
short, sub_cpt, 16, -, fp, _Quad, 2i, 1,
2491ATOMIC_CMPXCHG_CPT_MIX(fixed2u, ushort, sub_cpt, 16, -, fp, _Quad, 2i, 1,
2493ATOMIC_CMPXCHG_CPT_MIX(fixed2,
short, mul_cpt, 16, *, fp, _Quad, 2i, 1,
2495ATOMIC_CMPXCHG_CPT_MIX(fixed2u, ushort, mul_cpt, 16, *, fp, _Quad, 2i, 1,
2497ATOMIC_CMPXCHG_CPT_MIX(fixed2,
short, div_cpt, 16, /, fp, _Quad, 2i, 1,
2499ATOMIC_CMPXCHG_CPT_MIX(fixed2u, ushort, div_cpt, 16, /, fp, _Quad, 2i, 1,
2502ATOMIC_CMPXCHG_CPT_MIX(fixed4, kmp_int32, add_cpt, 32, +, fp, _Quad, 4i, 3,
2504ATOMIC_CMPXCHG_CPT_MIX(fixed4u, kmp_uint32, add_cpt, 32, +, fp, _Quad, 4i, 3,
2506ATOMIC_CMPXCHG_CPT_MIX(fixed4, kmp_int32, sub_cpt, 32, -, fp, _Quad, 4i, 3,
2508ATOMIC_CMPXCHG_CPT_MIX(fixed4u, kmp_uint32, sub_cpt, 32, -, fp, _Quad, 4i, 3,
2510ATOMIC_CMPXCHG_CPT_MIX(fixed4, kmp_int32, mul_cpt, 32, *, fp, _Quad, 4i, 3,
2512ATOMIC_CMPXCHG_CPT_MIX(fixed4u, kmp_uint32, mul_cpt, 32, *, fp, _Quad, 4i, 3,
2514ATOMIC_CMPXCHG_CPT_MIX(fixed4, kmp_int32, div_cpt, 32, /, fp, _Quad, 4i, 3,
2516ATOMIC_CMPXCHG_CPT_MIX(fixed4u, kmp_uint32, div_cpt, 32, /, fp, _Quad, 4i, 3,
2519ATOMIC_CMPXCHG_CPT_MIX(fixed8, kmp_int64, add_cpt, 64, +, fp, _Quad, 8i, 7,
2521ATOMIC_CMPXCHG_CPT_MIX(fixed8u, kmp_uint64, add_cpt, 64, +, fp, _Quad, 8i, 7,
2523ATOMIC_CMPXCHG_CPT_MIX(fixed8, kmp_int64, sub_cpt, 64, -, fp, _Quad, 8i, 7,
2525ATOMIC_CMPXCHG_CPT_MIX(fixed8u, kmp_uint64, sub_cpt, 64, -, fp, _Quad, 8i, 7,
2527ATOMIC_CMPXCHG_CPT_MIX(fixed8, kmp_int64, mul_cpt, 64, *, fp, _Quad, 8i, 7,
2529ATOMIC_CMPXCHG_CPT_MIX(fixed8u, kmp_uint64, mul_cpt, 64, *, fp, _Quad, 8i, 7,
2531ATOMIC_CMPXCHG_CPT_MIX(fixed8, kmp_int64, div_cpt, 64, /, fp, _Quad, 8i, 7,
2533ATOMIC_CMPXCHG_CPT_MIX(fixed8u, kmp_uint64, div_cpt, 64, /, fp, _Quad, 8i, 7,
2536ATOMIC_CMPXCHG_CPT_MIX(float4, kmp_real32, add_cpt, 32, +, fp, _Quad, 4r, 3,
2538ATOMIC_CMPXCHG_CPT_MIX(float4, kmp_real32, sub_cpt, 32, -, fp, _Quad, 4r, 3,
2540ATOMIC_CMPXCHG_CPT_MIX(float4, kmp_real32, mul_cpt, 32, *, fp, _Quad, 4r, 3,
2542ATOMIC_CMPXCHG_CPT_MIX(float4, kmp_real32, div_cpt, 32, /, fp, _Quad, 4r, 3,
2545ATOMIC_CMPXCHG_CPT_MIX(float8, kmp_real64, add_cpt, 64, +, fp, _Quad, 8r, 7,
2547ATOMIC_CMPXCHG_CPT_MIX(float8, kmp_real64, sub_cpt, 64, -, fp, _Quad, 8r, 7,
2549ATOMIC_CMPXCHG_CPT_MIX(float8, kmp_real64, mul_cpt, 64, *, fp, _Quad, 8r, 7,
2551ATOMIC_CMPXCHG_CPT_MIX(float8, kmp_real64, div_cpt, 64, /, fp, _Quad, 8r, 7,
2554ATOMIC_CRITICAL_CPT_MIX(float10,
long double, add_cpt, +, fp, _Quad, 10r,
2556ATOMIC_CRITICAL_CPT_MIX(float10,
long double, sub_cpt, -, fp, _Quad, 10r,
2558ATOMIC_CRITICAL_CPT_MIX(float10,
long double, mul_cpt, *, fp, _Quad, 10r,
2560ATOMIC_CRITICAL_CPT_MIX(float10,
long double, div_cpt, /, fp, _Quad, 10r,
2574#define OP_CRITICAL_L_CPT(OP, LCK_ID) \
2575 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2579 (*lhs) = new_value; \
2581 new_value = (*lhs); \
2585 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
2588#ifdef KMP_GOMP_COMPAT
2589#define OP_GOMP_CRITICAL_L_CPT(OP, FLAG) \
2590 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2592 OP_CRITICAL_L_CPT(OP, 0); \
2596#define OP_GOMP_CRITICAL_L_CPT(OP, FLAG)
2601#define ATOMIC_CMPX_L_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2602 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2605 OP_GOMP_CRITICAL_L_CPT(= *lhs OP, GOMP_FLAG) \
2606 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2609ATOMIC_CMPX_L_CPT(fixed1, andl_cpt,
char, 8, &&,
2611ATOMIC_CMPX_L_CPT(fixed1, orl_cpt,
char, 8, ||,
2613ATOMIC_CMPX_L_CPT(fixed2, andl_cpt,
short, 16, &&,
2615ATOMIC_CMPX_L_CPT(fixed2, orl_cpt,
short, 16, ||,
2617ATOMIC_CMPX_L_CPT(fixed4, andl_cpt, kmp_int32, 32, &&,
2619ATOMIC_CMPX_L_CPT(fixed4, orl_cpt, kmp_int32, 32, ||,
2621ATOMIC_CMPX_L_CPT(fixed8, andl_cpt, kmp_int64, 64, &&,
2623ATOMIC_CMPX_L_CPT(fixed8, orl_cpt, kmp_int64, 64, ||,
2635#define MIN_MAX_CRITSECT_CPT(OP, LCK_ID) \
2636 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2638 if (*lhs OP rhs) { \
2644 new_value = old_value; \
2648 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2652#ifdef KMP_GOMP_COMPAT
2653#define GOMP_MIN_MAX_CRITSECT_CPT(OP, FLAG) \
2654 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2656 MIN_MAX_CRITSECT_CPT(OP, 0); \
2659#define GOMP_MIN_MAX_CRITSECT_CPT(OP, FLAG)
2663#define MIN_MAX_CMPXCHG_CPT(TYPE, BITS, OP) \
2665 TYPE KMP_ATOMIC_VOLATILE temp_val; \
2668 old_value = temp_val; \
2669 while (old_value OP rhs && \
2670 !KMP_COMPARE_AND_STORE_ACQ##BITS( \
2671 (kmp_int##BITS *)lhs, \
2672 *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2673 *VOLATILE_CAST(kmp_int##BITS *) & rhs)) { \
2675 old_value = temp_val; \
2685#define MIN_MAX_CRITICAL_CPT(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
2686 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2687 TYPE new_value, old_value; \
2688 if (*lhs OP rhs) { \
2689 GOMP_MIN_MAX_CRITSECT_CPT(OP, GOMP_FLAG) \
2690 MIN_MAX_CRITSECT_CPT(OP, LCK_ID) \
2695#define MIN_MAX_COMPXCHG_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2696 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2697 TYPE new_value, old_value; \
2699 if (*lhs OP rhs) { \
2700 GOMP_MIN_MAX_CRITSECT_CPT(OP, GOMP_FLAG) \
2701 MIN_MAX_CMPXCHG_CPT(TYPE, BITS, OP) \
2706MIN_MAX_COMPXCHG_CPT(fixed1, max_cpt,
char, 8, <,
2708MIN_MAX_COMPXCHG_CPT(fixed1, min_cpt,
char, 8, >,
2710MIN_MAX_COMPXCHG_CPT(fixed2, max_cpt,
short, 16, <,
2712MIN_MAX_COMPXCHG_CPT(fixed2, min_cpt,
short, 16, >,
2714MIN_MAX_COMPXCHG_CPT(fixed4, max_cpt, kmp_int32, 32, <,
2716MIN_MAX_COMPXCHG_CPT(fixed4, min_cpt, kmp_int32, 32, >,
2718MIN_MAX_COMPXCHG_CPT(fixed8, max_cpt, kmp_int64, 64, <,
2720MIN_MAX_COMPXCHG_CPT(fixed8, min_cpt, kmp_int64, 64, >,
2722MIN_MAX_COMPXCHG_CPT(float4, max_cpt, kmp_real32, 32, <,
2724MIN_MAX_COMPXCHG_CPT(float4, min_cpt, kmp_real32, 32, >,
2726MIN_MAX_COMPXCHG_CPT(float8, max_cpt, kmp_real64, 64, <,
2728MIN_MAX_COMPXCHG_CPT(float8, min_cpt, kmp_real64, 64, >,
2730MIN_MAX_CRITICAL_CPT(float10, max_cpt,
long double, <, 10r,
2732MIN_MAX_CRITICAL_CPT(float10, min_cpt,
long double, >, 10r,
2735MIN_MAX_CRITICAL_CPT(float16, max_cpt, QUAD_LEGACY, <, 16r,
2737MIN_MAX_CRITICAL_CPT(float16, min_cpt, QUAD_LEGACY, >, 16r,
2740MIN_MAX_CRITICAL_CPT(float16, max_a16_cpt, Quad_a16_t, <, 16r,
2742MIN_MAX_CRITICAL_CPT(float16, min_a16_cpt, Quad_a16_t, >, 16r,
2748#ifdef KMP_GOMP_COMPAT
2749#define OP_GOMP_CRITICAL_EQV_CPT(OP, FLAG) \
2750 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2752 OP_CRITICAL_CPT(OP, 0); \
2755#define OP_GOMP_CRITICAL_EQV_CPT(OP, FLAG)
2758#define ATOMIC_CMPX_EQV_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2759 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2762 OP_GOMP_CRITICAL_EQV_CPT(^= (TYPE) ~, GOMP_FLAG) \
2763 OP_CMPXCHG_CPT(TYPE, BITS, OP) \
2768ATOMIC_CMPXCHG_CPT(fixed1, neqv_cpt, kmp_int8, 8, ^,
2770ATOMIC_CMPXCHG_CPT(fixed2, neqv_cpt, kmp_int16, 16, ^,
2772ATOMIC_CMPXCHG_CPT(fixed4, neqv_cpt, kmp_int32, 32, ^,
2774ATOMIC_CMPXCHG_CPT(fixed8, neqv_cpt, kmp_int64, 64, ^,
2776ATOMIC_CMPX_EQV_CPT(fixed1, eqv_cpt, kmp_int8, 8, ^~,
2778ATOMIC_CMPX_EQV_CPT(fixed2, eqv_cpt, kmp_int16, 16, ^~,
2780ATOMIC_CMPX_EQV_CPT(fixed4, eqv_cpt, kmp_int32, 32, ^~,
2782ATOMIC_CMPX_EQV_CPT(fixed8, eqv_cpt, kmp_int64, 64, ^~,
2791#define ATOMIC_CRITICAL_CPT(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
2792 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2794 OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) \
2795 OP_UPDATE_CRITICAL_CPT(TYPE, OP, LCK_ID) \
2801#define OP_CRITICAL_CPT_WRK(OP, LCK_ID) \
2802 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2812 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2816#ifdef KMP_GOMP_COMPAT
2817#define OP_GOMP_CRITICAL_CPT_WRK(OP, FLAG) \
2818 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2820 OP_CRITICAL_CPT_WRK(OP## =, 0); \
2823#define OP_GOMP_CRITICAL_CPT_WRK(OP, FLAG)
2827#define ATOMIC_BEGIN_WRK(TYPE_ID, OP_ID, TYPE) \
2828 void __kmpc_atomic_##TYPE_ID##_##OP_ID(ident_t *id_ref, int gtid, TYPE *lhs, \
2829 TYPE rhs, TYPE *out, int flag) { \
2830 KMP_DEBUG_ASSERT(__kmp_init_serial); \
2831 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_" #OP_ID ": T#%d\n", gtid));
2834#define ATOMIC_CRITICAL_CPT_WRK(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
2835 ATOMIC_BEGIN_WRK(TYPE_ID, OP_ID, TYPE) \
2836 OP_GOMP_CRITICAL_CPT_WRK(OP, GOMP_FLAG) \
2837 OP_CRITICAL_CPT_WRK(OP## =, LCK_ID) \
2843ATOMIC_CRITICAL_CPT(float10, add_cpt,
long double, +, 10r,
2845ATOMIC_CRITICAL_CPT(float10, sub_cpt,
long double, -, 10r,
2847ATOMIC_CRITICAL_CPT(float10, mul_cpt,
long double, *, 10r,
2849ATOMIC_CRITICAL_CPT(float10, div_cpt,
long double, /, 10r,
2853ATOMIC_CRITICAL_CPT(float16, add_cpt, QUAD_LEGACY, +, 16r,
2855ATOMIC_CRITICAL_CPT(float16, sub_cpt, QUAD_LEGACY, -, 16r,
2857ATOMIC_CRITICAL_CPT(float16, mul_cpt, QUAD_LEGACY, *, 16r,
2859ATOMIC_CRITICAL_CPT(float16, div_cpt, QUAD_LEGACY, /, 16r,
2862ATOMIC_CRITICAL_CPT(float16, add_a16_cpt, Quad_a16_t, +, 16r,
2864ATOMIC_CRITICAL_CPT(float16, sub_a16_cpt, Quad_a16_t, -, 16r,
2866ATOMIC_CRITICAL_CPT(float16, mul_a16_cpt, Quad_a16_t, *, 16r,
2868ATOMIC_CRITICAL_CPT(float16, div_a16_cpt, Quad_a16_t, /, 16r,
2876ATOMIC_CRITICAL_CPT_WRK(cmplx4, add_cpt, kmp_cmplx32, +, 8c,
2878ATOMIC_CRITICAL_CPT_WRK(cmplx4, sub_cpt, kmp_cmplx32, -, 8c,
2880ATOMIC_CRITICAL_CPT_WRK(cmplx4, mul_cpt, kmp_cmplx32, *, 8c,
2882ATOMIC_CRITICAL_CPT_WRK(cmplx4, div_cpt, kmp_cmplx32, /, 8c,
2885ATOMIC_CRITICAL_CPT(cmplx8, add_cpt, kmp_cmplx64, +, 16c,
2887ATOMIC_CRITICAL_CPT(cmplx8, sub_cpt, kmp_cmplx64, -, 16c,
2889ATOMIC_CRITICAL_CPT(cmplx8, mul_cpt, kmp_cmplx64, *, 16c,
2891ATOMIC_CRITICAL_CPT(cmplx8, div_cpt, kmp_cmplx64, /, 16c,
2893ATOMIC_CRITICAL_CPT(cmplx10, add_cpt, kmp_cmplx80, +, 20c,
2895ATOMIC_CRITICAL_CPT(cmplx10, sub_cpt, kmp_cmplx80, -, 20c,
2897ATOMIC_CRITICAL_CPT(cmplx10, mul_cpt, kmp_cmplx80, *, 20c,
2899ATOMIC_CRITICAL_CPT(cmplx10, div_cpt, kmp_cmplx80, /, 20c,
2902ATOMIC_CRITICAL_CPT(cmplx16, add_cpt, CPLX128_LEG, +, 32c,
2904ATOMIC_CRITICAL_CPT(cmplx16, sub_cpt, CPLX128_LEG, -, 32c,
2906ATOMIC_CRITICAL_CPT(cmplx16, mul_cpt, CPLX128_LEG, *, 32c,
2908ATOMIC_CRITICAL_CPT(cmplx16, div_cpt, CPLX128_LEG, /, 32c,
2911ATOMIC_CRITICAL_CPT(cmplx16, add_a16_cpt, kmp_cmplx128_a16_t, +, 32c,
2913ATOMIC_CRITICAL_CPT(cmplx16, sub_a16_cpt, kmp_cmplx128_a16_t, -, 32c,
2915ATOMIC_CRITICAL_CPT(cmplx16, mul_a16_cpt, kmp_cmplx128_a16_t, *, 32c,
2917ATOMIC_CRITICAL_CPT(cmplx16, div_a16_cpt, kmp_cmplx128_a16_t, /, 32c,
2932#define OP_CRITICAL_CPT_REV(TYPE, OP, LCK_ID) \
2933 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2937 (*lhs) = (TYPE)((rhs)OP(*lhs)); \
2938 new_value = (*lhs); \
2940 new_value = (*lhs); \
2941 (*lhs) = (TYPE)((rhs)OP(*lhs)); \
2943 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
2947#ifdef KMP_GOMP_COMPAT
2948#define OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, FLAG) \
2949 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
2951 OP_CRITICAL_CPT_REV(TYPE, OP, 0); \
2954#define OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, FLAG)
2964#define OP_CMPXCHG_CPT_REV(TYPE, BITS, OP) \
2966 TYPE KMP_ATOMIC_VOLATILE temp_val; \
2967 TYPE old_value, new_value; \
2969 old_value = temp_val; \
2970 new_value = (TYPE)(rhs OP old_value); \
2971 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
2972 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
2973 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
2975 old_value = temp_val; \
2976 new_value = (TYPE)(rhs OP old_value); \
2985#define ATOMIC_CMPXCHG_CPT_REV(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG) \
2986 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
2989 OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG) \
2990 OP_CMPXCHG_CPT_REV(TYPE, BITS, OP) \
2993ATOMIC_CMPXCHG_CPT_REV(fixed1, div_cpt_rev, kmp_int8, 8, /,
2995ATOMIC_CMPXCHG_CPT_REV(fixed1u, div_cpt_rev, kmp_uint8, 8, /,
2997ATOMIC_CMPXCHG_CPT_REV(fixed1, shl_cpt_rev, kmp_int8, 8, <<,
2999ATOMIC_CMPXCHG_CPT_REV(fixed1, shr_cpt_rev, kmp_int8, 8, >>,
3001ATOMIC_CMPXCHG_CPT_REV(fixed1u, shr_cpt_rev, kmp_uint8, 8, >>,
3003ATOMIC_CMPXCHG_CPT_REV(fixed1, sub_cpt_rev, kmp_int8, 8, -,
3005ATOMIC_CMPXCHG_CPT_REV(fixed2, div_cpt_rev, kmp_int16, 16, /,
3007ATOMIC_CMPXCHG_CPT_REV(fixed2u, div_cpt_rev, kmp_uint16, 16, /,
3009ATOMIC_CMPXCHG_CPT_REV(fixed2, shl_cpt_rev, kmp_int16, 16, <<,
3011ATOMIC_CMPXCHG_CPT_REV(fixed2, shr_cpt_rev, kmp_int16, 16, >>,
3013ATOMIC_CMPXCHG_CPT_REV(fixed2u, shr_cpt_rev, kmp_uint16, 16, >>,
3015ATOMIC_CMPXCHG_CPT_REV(fixed2, sub_cpt_rev, kmp_int16, 16, -,
3017ATOMIC_CMPXCHG_CPT_REV(fixed4, div_cpt_rev, kmp_int32, 32, /,
3019ATOMIC_CMPXCHG_CPT_REV(fixed4u, div_cpt_rev, kmp_uint32, 32, /,
3021ATOMIC_CMPXCHG_CPT_REV(fixed4, shl_cpt_rev, kmp_int32, 32, <<,
3023ATOMIC_CMPXCHG_CPT_REV(fixed4, shr_cpt_rev, kmp_int32, 32, >>,
3025ATOMIC_CMPXCHG_CPT_REV(fixed4u, shr_cpt_rev, kmp_uint32, 32, >>,
3027ATOMIC_CMPXCHG_CPT_REV(fixed4, sub_cpt_rev, kmp_int32, 32, -,
3029ATOMIC_CMPXCHG_CPT_REV(fixed8, div_cpt_rev, kmp_int64, 64, /,
3031ATOMIC_CMPXCHG_CPT_REV(fixed8u, div_cpt_rev, kmp_uint64, 64, /,
3033ATOMIC_CMPXCHG_CPT_REV(fixed8, shl_cpt_rev, kmp_int64, 64, <<,
3035ATOMIC_CMPXCHG_CPT_REV(fixed8, shr_cpt_rev, kmp_int64, 64, >>,
3037ATOMIC_CMPXCHG_CPT_REV(fixed8u, shr_cpt_rev, kmp_uint64, 64, >>,
3039ATOMIC_CMPXCHG_CPT_REV(fixed8, sub_cpt_rev, kmp_int64, 64, -,
3041ATOMIC_CMPXCHG_CPT_REV(float4, div_cpt_rev, kmp_real32, 32, /,
3043ATOMIC_CMPXCHG_CPT_REV(float4, sub_cpt_rev, kmp_real32, 32, -,
3045ATOMIC_CMPXCHG_CPT_REV(float8, div_cpt_rev, kmp_real64, 64, /,
3047ATOMIC_CMPXCHG_CPT_REV(float8, sub_cpt_rev, kmp_real64, 64, -,
3057#define ATOMIC_CRITICAL_CPT_REV(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG) \
3058 ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE) \
3061 OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG) \
3062 OP_CRITICAL_CPT_REV(TYPE, OP, LCK_ID) \
3067ATOMIC_CRITICAL_CPT_REV(float10, sub_cpt_rev,
long double, -, 10r,
3069ATOMIC_CRITICAL_CPT_REV(float10, div_cpt_rev,
long double, /, 10r,
3073ATOMIC_CRITICAL_CPT_REV(float16, sub_cpt_rev, QUAD_LEGACY, -, 16r,
3075ATOMIC_CRITICAL_CPT_REV(float16, div_cpt_rev, QUAD_LEGACY, /, 16r,
3078ATOMIC_CRITICAL_CPT_REV(float16, sub_a16_cpt_rev, Quad_a16_t, -, 16r,
3080ATOMIC_CRITICAL_CPT_REV(float16, div_a16_cpt_rev, Quad_a16_t, /, 16r,
3090#define OP_CRITICAL_CPT_REV_WRK(OP, LCK_ID) \
3091 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3094 (*lhs) = (rhs)OP(*lhs); \
3098 (*lhs) = (rhs)OP(*lhs); \
3101 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3105#ifdef KMP_GOMP_COMPAT
3106#define OP_GOMP_CRITICAL_CPT_REV_WRK(OP, FLAG) \
3107 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
3109 OP_CRITICAL_CPT_REV_WRK(OP, 0); \
3112#define OP_GOMP_CRITICAL_CPT_REV_WRK(OP, FLAG)
3116#define ATOMIC_CRITICAL_CPT_REV_WRK(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, \
3118 ATOMIC_BEGIN_WRK(TYPE_ID, OP_ID, TYPE) \
3119 OP_GOMP_CRITICAL_CPT_REV_WRK(OP, GOMP_FLAG) \
3120 OP_CRITICAL_CPT_REV_WRK(OP, LCK_ID) \
3126ATOMIC_CRITICAL_CPT_REV_WRK(cmplx4, sub_cpt_rev, kmp_cmplx32, -, 8c,
3128ATOMIC_CRITICAL_CPT_REV_WRK(cmplx4, div_cpt_rev, kmp_cmplx32, /, 8c,
3131ATOMIC_CRITICAL_CPT_REV(cmplx8, sub_cpt_rev, kmp_cmplx64, -, 16c,
3133ATOMIC_CRITICAL_CPT_REV(cmplx8, div_cpt_rev, kmp_cmplx64, /, 16c,
3135ATOMIC_CRITICAL_CPT_REV(cmplx10, sub_cpt_rev, kmp_cmplx80, -, 20c,
3137ATOMIC_CRITICAL_CPT_REV(cmplx10, div_cpt_rev, kmp_cmplx80, /, 20c,
3140ATOMIC_CRITICAL_CPT_REV(cmplx16, sub_cpt_rev, CPLX128_LEG, -, 32c,
3142ATOMIC_CRITICAL_CPT_REV(cmplx16, div_cpt_rev, CPLX128_LEG, /, 32c,
3145ATOMIC_CRITICAL_CPT_REV(cmplx16, sub_a16_cpt_rev, kmp_cmplx128_a16_t, -, 32c,
3147ATOMIC_CRITICAL_CPT_REV(cmplx16, div_a16_cpt_rev, kmp_cmplx128_a16_t, /, 32c,
3161#define ATOMIC_CMPXCHG_CPT_REV_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, \
3162 RTYPE, LCK_ID, MASK, GOMP_FLAG) \
3163 ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE) \
3165 OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG) \
3166 OP_CMPXCHG_CPT_REV(TYPE, BITS, OP) \
3170#define ATOMIC_CRITICAL_CPT_REV_MIX(TYPE_ID, TYPE, OP_ID, OP, RTYPE_ID, RTYPE, \
3171 LCK_ID, GOMP_FLAG) \
3172 ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE) \
3174 OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG) \
3175 OP_CRITICAL_CPT_REV(TYPE, OP, LCK_ID) \
3178ATOMIC_CMPXCHG_CPT_REV_MIX(fixed1,
char, sub_cpt_rev, 8, -, fp, _Quad, 1i, 0,
3180ATOMIC_CMPXCHG_CPT_REV_MIX(fixed1u, uchar, sub_cpt_rev, 8, -, fp, _Quad, 1i, 0,
3182ATOMIC_CMPXCHG_CPT_REV_MIX(fixed1,
char, div_cpt_rev, 8, /, fp, _Quad, 1i, 0,
3184ATOMIC_CMPXCHG_CPT_REV_MIX(fixed1u, uchar, div_cpt_rev, 8, /, fp, _Quad, 1i, 0,
3187ATOMIC_CMPXCHG_CPT_REV_MIX(fixed2,
short, sub_cpt_rev, 16, -, fp, _Quad, 2i, 1,
3189ATOMIC_CMPXCHG_CPT_REV_MIX(fixed2u, ushort, sub_cpt_rev, 16, -, fp, _Quad, 2i,
3192ATOMIC_CMPXCHG_CPT_REV_MIX(fixed2,
short, div_cpt_rev, 16, /, fp, _Quad, 2i, 1,
3194ATOMIC_CMPXCHG_CPT_REV_MIX(fixed2u, ushort, div_cpt_rev, 16, /, fp, _Quad, 2i,
3198ATOMIC_CMPXCHG_CPT_REV_MIX(fixed4, kmp_int32, sub_cpt_rev, 32, -, fp, _Quad, 4i,
3200ATOMIC_CMPXCHG_CPT_REV_MIX(fixed4u, kmp_uint32, sub_cpt_rev, 32, -, fp, _Quad,
3202ATOMIC_CMPXCHG_CPT_REV_MIX(fixed4, kmp_int32, div_cpt_rev, 32, /, fp, _Quad, 4i,
3204ATOMIC_CMPXCHG_CPT_REV_MIX(fixed4u, kmp_uint32, div_cpt_rev, 32, /, fp, _Quad,
3207ATOMIC_CMPXCHG_CPT_REV_MIX(fixed8, kmp_int64, sub_cpt_rev, 64, -, fp, _Quad, 8i,
3210ATOMIC_CMPXCHG_CPT_REV_MIX(fixed8u, kmp_uint64, sub_cpt_rev, 64, -, fp, _Quad,
3213ATOMIC_CMPXCHG_CPT_REV_MIX(fixed8, kmp_int64, div_cpt_rev, 64, /, fp, _Quad, 8i,
3216ATOMIC_CMPXCHG_CPT_REV_MIX(fixed8u, kmp_uint64, div_cpt_rev, 64, /, fp, _Quad,
3220ATOMIC_CMPXCHG_CPT_REV_MIX(float4, kmp_real32, sub_cpt_rev, 32, -, fp, _Quad,
3223ATOMIC_CMPXCHG_CPT_REV_MIX(float4, kmp_real32, div_cpt_rev, 32, /, fp, _Quad,
3227ATOMIC_CMPXCHG_CPT_REV_MIX(float8, kmp_real64, sub_cpt_rev, 64, -, fp, _Quad,
3230ATOMIC_CMPXCHG_CPT_REV_MIX(float8, kmp_real64, div_cpt_rev, 64, /, fp, _Quad,
3234ATOMIC_CRITICAL_CPT_REV_MIX(float10,
long double, sub_cpt_rev, -, fp, _Quad,
3236ATOMIC_CRITICAL_CPT_REV_MIX(float10,
long double, div_cpt_rev, /, fp, _Quad,
3243#define ATOMIC_BEGIN_SWP(TYPE_ID, TYPE) \
3244 TYPE __kmpc_atomic_##TYPE_ID##_swp(ident_t *id_ref, int gtid, TYPE *lhs, \
3246 KMP_DEBUG_ASSERT(__kmp_init_serial); \
3247 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_swp: T#%d\n", gtid));
3249#define CRITICAL_SWP(LCK_ID) \
3250 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3252 old_value = (*lhs); \
3255 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3259#ifdef KMP_GOMP_COMPAT
3260#define GOMP_CRITICAL_SWP(FLAG) \
3261 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
3266#define GOMP_CRITICAL_SWP(FLAG)
3269#define ATOMIC_XCHG_SWP(TYPE_ID, TYPE, BITS, GOMP_FLAG) \
3270 ATOMIC_BEGIN_SWP(TYPE_ID, TYPE) \
3272 GOMP_CRITICAL_SWP(GOMP_FLAG) \
3273 old_value = KMP_XCHG_FIXED##BITS(lhs, rhs); \
3277#define ATOMIC_XCHG_FLOAT_SWP(TYPE_ID, TYPE, BITS, GOMP_FLAG) \
3278 ATOMIC_BEGIN_SWP(TYPE_ID, TYPE) \
3280 GOMP_CRITICAL_SWP(GOMP_FLAG) \
3281 old_value = KMP_XCHG_REAL##BITS(lhs, rhs); \
3286#define CMPXCHG_SWP(TYPE, BITS) \
3288 TYPE KMP_ATOMIC_VOLATILE temp_val; \
3289 TYPE old_value, new_value; \
3291 old_value = temp_val; \
3293 while (!KMP_COMPARE_AND_STORE_ACQ##BITS( \
3294 (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value, \
3295 *VOLATILE_CAST(kmp_int##BITS *) & new_value)) { \
3297 old_value = temp_val; \
3304#define ATOMIC_CMPXCHG_SWP(TYPE_ID, TYPE, BITS, GOMP_FLAG) \
3305 ATOMIC_BEGIN_SWP(TYPE_ID, TYPE) \
3308 GOMP_CRITICAL_SWP(GOMP_FLAG) \
3309 CMPXCHG_SWP(TYPE, BITS) \
3312ATOMIC_XCHG_SWP(fixed1, kmp_int8, 8, KMP_ARCH_X86)
3313ATOMIC_XCHG_SWP(fixed2, kmp_int16, 16, KMP_ARCH_X86)
3314ATOMIC_XCHG_SWP(fixed4, kmp_int32, 32, KMP_ARCH_X86)
3316ATOMIC_XCHG_FLOAT_SWP(float4, kmp_real32, 32,
3320ATOMIC_CMPXCHG_SWP(fixed8, kmp_int64, 64,
3322ATOMIC_CMPXCHG_SWP(float8, kmp_real64, 64,
3325ATOMIC_XCHG_SWP(fixed8, kmp_int64, 64, KMP_ARCH_X86)
3326ATOMIC_XCHG_FLOAT_SWP(float8, kmp_real64, 64,
3333#define ATOMIC_CRITICAL_SWP(TYPE_ID, TYPE, LCK_ID, GOMP_FLAG) \
3334 ATOMIC_BEGIN_SWP(TYPE_ID, TYPE) \
3336 GOMP_CRITICAL_SWP(GOMP_FLAG) \
3337 CRITICAL_SWP(LCK_ID) \
3345#define ATOMIC_BEGIN_SWP_WRK(TYPE_ID, TYPE) \
3346 void __kmpc_atomic_##TYPE_ID##_swp(ident_t *id_ref, int gtid, TYPE *lhs, \
3347 TYPE rhs, TYPE *out) { \
3348 KMP_DEBUG_ASSERT(__kmp_init_serial); \
3349 KA_TRACE(100, ("__kmpc_atomic_" #TYPE_ID "_swp: T#%d\n", gtid));
3351#define CRITICAL_SWP_WRK(LCK_ID) \
3352 __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3357 __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid); \
3361#ifdef KMP_GOMP_COMPAT
3362#define GOMP_CRITICAL_SWP_WRK(FLAG) \
3363 if ((FLAG) && (__kmp_atomic_mode == 2)) { \
3365 CRITICAL_SWP_WRK(0); \
3368#define GOMP_CRITICAL_SWP_WRK(FLAG)
3372#define ATOMIC_CRITICAL_SWP_WRK(TYPE_ID, TYPE, LCK_ID, GOMP_FLAG) \
3373 ATOMIC_BEGIN_SWP_WRK(TYPE_ID, TYPE) \
3375 GOMP_CRITICAL_SWP_WRK(GOMP_FLAG) \
3376 CRITICAL_SWP_WRK(LCK_ID) \
3380ATOMIC_CRITICAL_SWP(float10,
long double, 10r, 1)
3382ATOMIC_CRITICAL_SWP(float16, QUAD_LEGACY, 16r, 1)
3385ATOMIC_CRITICAL_SWP_WRK(cmplx4, kmp_cmplx32, 8c, 1)
3390ATOMIC_CRITICAL_SWP(cmplx8, kmp_cmplx64, 16c, 1)
3391ATOMIC_CRITICAL_SWP(cmplx10, kmp_cmplx80, 20c, 1)
3393ATOMIC_CRITICAL_SWP(cmplx16, CPLX128_LEG, 32c, 1)
3395ATOMIC_CRITICAL_SWP(float16_a16, Quad_a16_t, 16r,
3397ATOMIC_CRITICAL_SWP(cmplx16_a16, kmp_cmplx128_a16_t, 32c,
3411void __kmpc_atomic_1(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3412 void (*f)(
void *,
void *,
void *)) {
3413 KMP_DEBUG_ASSERT(__kmp_init_serial);
3416#
if KMP_ARCH_X86 && defined(KMP_GOMP_COMPAT)
3422 kmp_int8 old_value, new_value;
3424 old_value = *(kmp_int8 *)lhs;
3425 (*f)(&new_value, &old_value, rhs);
3428 while (!KMP_COMPARE_AND_STORE_ACQ8((kmp_int8 *)lhs, *(kmp_int8 *)&old_value,
3429 *(kmp_int8 *)&new_value)) {
3432 old_value = *(kmp_int8 *)lhs;
3433 (*f)(&new_value, &old_value, rhs);
3440#ifdef KMP_GOMP_COMPAT
3441 if (__kmp_atomic_mode == 2) {
3442 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3445 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_1i, gtid);
3447 (*f)(lhs, lhs, rhs);
3449#ifdef KMP_GOMP_COMPAT
3450 if (__kmp_atomic_mode == 2) {
3451 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3454 __kmp_release_atomic_lock(&__kmp_atomic_lock_1i, gtid);
3458void __kmpc_atomic_2(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3459 void (*f)(
void *,
void *,
void *)) {
3461#
if KMP_ARCH_X86 && defined(KMP_GOMP_COMPAT)
3463#elif KMP_ARCH_X86 || KMP_ARCH_X86_64
3466 !((kmp_uintptr_t)lhs & 0x1)
3469 kmp_int16 old_value, new_value;
3471 old_value = *(kmp_int16 *)lhs;
3472 (*f)(&new_value, &old_value, rhs);
3475 while (!KMP_COMPARE_AND_STORE_ACQ16(
3476 (kmp_int16 *)lhs, *(kmp_int16 *)&old_value, *(kmp_int16 *)&new_value)) {
3479 old_value = *(kmp_int16 *)lhs;
3480 (*f)(&new_value, &old_value, rhs);
3487#ifdef KMP_GOMP_COMPAT
3488 if (__kmp_atomic_mode == 2) {
3489 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3492 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_2i, gtid);
3494 (*f)(lhs, lhs, rhs);
3496#ifdef KMP_GOMP_COMPAT
3497 if (__kmp_atomic_mode == 2) {
3498 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3501 __kmp_release_atomic_lock(&__kmp_atomic_lock_2i, gtid);
3505void __kmpc_atomic_4(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3506 void (*f)(
void *,
void *,
void *)) {
3507 KMP_DEBUG_ASSERT(__kmp_init_serial);
3512#
if KMP_ARCH_X86 || KMP_ARCH_X86_64
3515 !((kmp_uintptr_t)lhs & 0x3)
3518 kmp_int32 old_value, new_value;
3520 old_value = *(kmp_int32 *)lhs;
3521 (*f)(&new_value, &old_value, rhs);
3524 while (!KMP_COMPARE_AND_STORE_ACQ32(
3525 (kmp_int32 *)lhs, *(kmp_int32 *)&old_value, *(kmp_int32 *)&new_value)) {
3528 old_value = *(kmp_int32 *)lhs;
3529 (*f)(&new_value, &old_value, rhs);
3537#ifdef KMP_GOMP_COMPAT
3538 if (__kmp_atomic_mode == 2) {
3539 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3542 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_4i, gtid);
3544 (*f)(lhs, lhs, rhs);
3546#ifdef KMP_GOMP_COMPAT
3547 if (__kmp_atomic_mode == 2) {
3548 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3551 __kmp_release_atomic_lock(&__kmp_atomic_lock_4i, gtid);
3555void __kmpc_atomic_8(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3556 void (*f)(
void *,
void *,
void *)) {
3557 KMP_DEBUG_ASSERT(__kmp_init_serial);
3560#
if KMP_ARCH_X86 && defined(KMP_GOMP_COMPAT)
3562#elif KMP_ARCH_X86 || KMP_ARCH_X86_64
3565 !((kmp_uintptr_t)lhs & 0x7)
3568 kmp_int64 old_value, new_value;
3570 old_value = *(kmp_int64 *)lhs;
3571 (*f)(&new_value, &old_value, rhs);
3573 while (!KMP_COMPARE_AND_STORE_ACQ64(
3574 (kmp_int64 *)lhs, *(kmp_int64 *)&old_value, *(kmp_int64 *)&new_value)) {
3577 old_value = *(kmp_int64 *)lhs;
3578 (*f)(&new_value, &old_value, rhs);
3586#ifdef KMP_GOMP_COMPAT
3587 if (__kmp_atomic_mode == 2) {
3588 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3591 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_8i, gtid);
3593 (*f)(lhs, lhs, rhs);
3595#ifdef KMP_GOMP_COMPAT
3596 if (__kmp_atomic_mode == 2) {
3597 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3600 __kmp_release_atomic_lock(&__kmp_atomic_lock_8i, gtid);
3603#if KMP_ARCH_X86 || KMP_ARCH_X86_64
3604void __kmpc_atomic_10(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3605 void (*f)(
void *,
void *,
void *)) {
3606 KMP_DEBUG_ASSERT(__kmp_init_serial);
3608#ifdef KMP_GOMP_COMPAT
3609 if (__kmp_atomic_mode == 2) {
3610 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3613 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_10r, gtid);
3615 (*f)(lhs, lhs, rhs);
3617#ifdef KMP_GOMP_COMPAT
3618 if (__kmp_atomic_mode == 2) {
3619 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3622 __kmp_release_atomic_lock(&__kmp_atomic_lock_10r, gtid);
3626void __kmpc_atomic_16(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3627 void (*f)(
void *,
void *,
void *)) {
3628 KMP_DEBUG_ASSERT(__kmp_init_serial);
3630#ifdef KMP_GOMP_COMPAT
3631 if (__kmp_atomic_mode == 2) {
3632 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3635 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_16c, gtid);
3637 (*f)(lhs, lhs, rhs);
3639#ifdef KMP_GOMP_COMPAT
3640 if (__kmp_atomic_mode == 2) {
3641 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3644 __kmp_release_atomic_lock(&__kmp_atomic_lock_16c, gtid);
3646#if KMP_ARCH_X86 || KMP_ARCH_X86_64
3647void __kmpc_atomic_20(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3648 void (*f)(
void *,
void *,
void *)) {
3649 KMP_DEBUG_ASSERT(__kmp_init_serial);
3651#ifdef KMP_GOMP_COMPAT
3652 if (__kmp_atomic_mode == 2) {
3653 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3656 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_20c, gtid);
3658 (*f)(lhs, lhs, rhs);
3660#ifdef KMP_GOMP_COMPAT
3661 if (__kmp_atomic_mode == 2) {
3662 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3665 __kmp_release_atomic_lock(&__kmp_atomic_lock_20c, gtid);
3668void __kmpc_atomic_32(
ident_t *id_ref,
int gtid,
void *lhs,
void *rhs,
3669 void (*f)(
void *,
void *,
void *)) {
3670 KMP_DEBUG_ASSERT(__kmp_init_serial);
3672#ifdef KMP_GOMP_COMPAT
3673 if (__kmp_atomic_mode == 2) {
3674 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3677 __kmp_acquire_atomic_lock(&__kmp_atomic_lock_32c, gtid);
3679 (*f)(lhs, lhs, rhs);
3681#ifdef KMP_GOMP_COMPAT
3682 if (__kmp_atomic_mode == 2) {
3683 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3686 __kmp_release_atomic_lock(&__kmp_atomic_lock_32c, gtid);
3692void __kmpc_atomic_start(
void) {
3693 int gtid = __kmp_entry_gtid();
3694 KA_TRACE(20, (
"__kmpc_atomic_start: T#%d\n", gtid));
3695 __kmp_acquire_atomic_lock(&__kmp_atomic_lock, gtid);
3698void __kmpc_atomic_end(
void) {
3699 int gtid = __kmp_get_gtid();
3700 KA_TRACE(20, (
"__kmpc_atomic_end: T#%d\n", gtid));
3701 __kmp_release_atomic_lock(&__kmp_atomic_lock, gtid);
3704#if KMP_ARCH_X86 || KMP_ARCH_X86_64
3722bool __kmpc_atomic_bool_1_cas(
ident_t *loc,
int gtid,
char *x,
char e,
char d) {
3723 return KMP_COMPARE_AND_STORE_ACQ8(x, e, d);
3725bool __kmpc_atomic_bool_2_cas(
ident_t *loc,
int gtid,
short *x,
short e,
3727 return KMP_COMPARE_AND_STORE_ACQ16(x, e, d);
3729bool __kmpc_atomic_bool_4_cas(
ident_t *loc,
int gtid, kmp_int32 *x, kmp_int32 e,
3731 return KMP_COMPARE_AND_STORE_ACQ32(x, e, d);
3733bool __kmpc_atomic_bool_8_cas(
ident_t *loc,
int gtid, kmp_int64 *x, kmp_int64 e,
3735 return KMP_COMPARE_AND_STORE_ACQ64(x, e, d);
3752char __kmpc_atomic_val_1_cas(
ident_t *loc,
int gtid,
char *x,
char e,
char d) {
3753 return KMP_COMPARE_AND_STORE_RET8(x, e, d);
3755short __kmpc_atomic_val_2_cas(
ident_t *loc,
int gtid,
short *x,
short e,
3757 return KMP_COMPARE_AND_STORE_RET16(x, e, d);
3759kmp_int32 __kmpc_atomic_val_4_cas(
ident_t *loc,
int gtid, kmp_int32 *x,
3760 kmp_int32 e, kmp_int32 d) {
3761 return KMP_COMPARE_AND_STORE_RET32(x, e, d);
3763kmp_int64 __kmpc_atomic_val_8_cas(
ident_t *loc,
int gtid, kmp_int64 *x,
3764 kmp_int64 e, kmp_int64 d) {
3765 return KMP_COMPARE_AND_STORE_RET64(x, e, d);
3784bool __kmpc_atomic_bool_1_cas_cpt(
ident_t *loc,
int gtid,
char *x,
char e,
3786 char old = KMP_COMPARE_AND_STORE_RET8(x, e, d);
3789 KMP_ASSERT(pv != NULL);
3793bool __kmpc_atomic_bool_2_cas_cpt(
ident_t *loc,
int gtid,
short *x,
short e,
3794 short d,
short *pv) {
3795 short old = KMP_COMPARE_AND_STORE_RET16(x, e, d);
3798 KMP_ASSERT(pv != NULL);
3802bool __kmpc_atomic_bool_4_cas_cpt(
ident_t *loc,
int gtid, kmp_int32 *x,
3803 kmp_int32 e, kmp_int32 d, kmp_int32 *pv) {
3804 kmp_int32 old = KMP_COMPARE_AND_STORE_RET32(x, e, d);
3807 KMP_ASSERT(pv != NULL);
3811bool __kmpc_atomic_bool_8_cas_cpt(
ident_t *loc,
int gtid, kmp_int64 *x,
3812 kmp_int64 e, kmp_int64 d, kmp_int64 *pv) {
3813 kmp_int64 old = KMP_COMPARE_AND_STORE_RET64(x, e, d);
3816 KMP_ASSERT(pv != NULL);
3837char __kmpc_atomic_val_1_cas_cpt(
ident_t *loc,
int gtid,
char *x,
char e,
3839 char old = KMP_COMPARE_AND_STORE_RET8(x, e, d);
3840 KMP_ASSERT(pv != NULL);
3841 *pv = old == e ? d : old;
3844short __kmpc_atomic_val_2_cas_cpt(
ident_t *loc,
int gtid,
short *x,
short e,
3845 short d,
short *pv) {
3846 short old = KMP_COMPARE_AND_STORE_RET16(x, e, d);
3847 KMP_ASSERT(pv != NULL);
3848 *pv = old == e ? d : old;
3851kmp_int32 __kmpc_atomic_val_4_cas_cpt(
ident_t *loc,
int gtid, kmp_int32 *x,
3852 kmp_int32 e, kmp_int32 d, kmp_int32 *pv) {
3853 kmp_int32 old = KMP_COMPARE_AND_STORE_RET32(x, e, d);
3854 KMP_ASSERT(pv != NULL);
3855 *pv = old == e ? d : old;
3858kmp_int64 __kmpc_atomic_val_8_cas_cpt(
ident_t *loc,
int gtid, kmp_int64 *x,
3859 kmp_int64 e, kmp_int64 d, kmp_int64 *pv) {
3860 kmp_int64 old = KMP_COMPARE_AND_STORE_RET64(x, e, d);
3861 KMP_ASSERT(pv != NULL);
3862 *pv = old == e ? d : old;