diff --git a/core/safe_refcount.h b/core/safe_refcount.h index 0b65ffb9ca3..54f540b0c72 100644 --- a/core/safe_refcount.h +++ b/core/safe_refcount.h @@ -97,8 +97,8 @@ static _ALWAYS_INLINE_ T atomic_exchange_if_greater(volatile T *pw, volatile V v /* Implementation for GCC & Clang */ -#include -#include +// GCC guarantees atomic intrinsics for sizes of 1, 2, 4 and 8 bytes. +// Clang states it supports GCC atomic builtins. template static _ALWAYS_INLINE_ T atomic_conditional_increment(volatile T *pw) { @@ -107,7 +107,7 @@ static _ALWAYS_INLINE_ T atomic_conditional_increment(volatile T *pw) { T tmp = static_cast(*pw); if (tmp == 0) return 0; // if zero, can't add to it anymore - if (__atomic_compare_exchange_n(pw, &tmp, tmp + 1, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) == true) + if (__sync_val_compare_and_swap(pw, tmp, tmp + 1) == tmp) return tmp + 1; } } @@ -115,25 +115,25 @@ static _ALWAYS_INLINE_ T atomic_conditional_increment(volatile T *pw) { template static _ALWAYS_INLINE_ T atomic_decrement(volatile T *pw) { - return __atomic_sub_fetch(pw, 1, __ATOMIC_SEQ_CST); + return __sync_sub_and_fetch(pw, 1); } template static _ALWAYS_INLINE_ T atomic_increment(volatile T *pw) { - return __atomic_add_fetch(pw, 1, __ATOMIC_SEQ_CST); + return __sync_add_and_fetch(pw, 1); } template static _ALWAYS_INLINE_ T atomic_sub(volatile T *pw, volatile V val) { - return __atomic_sub_fetch(pw, val, __ATOMIC_SEQ_CST); + return __sync_sub_and_fetch(pw, val); } template static _ALWAYS_INLINE_ T atomic_add(volatile T *pw, volatile V val) { - return __atomic_add_fetch(pw, val, __ATOMIC_SEQ_CST); + return __sync_add_and_fetch(pw, val); } template @@ -143,7 +143,7 @@ static _ALWAYS_INLINE_ T atomic_exchange_if_greater(volatile T *pw, volatile V v T tmp = static_cast(*pw); if (tmp >= val) return tmp; // already greater, or equal - if (__atomic_compare_exchange_n(pw, &tmp, val, false, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) == true) + if (__sync_val_compare_and_swap(pw, tmp, val) == tmp) return val; } }