diff options
Diffstat (limited to 'core/safe_refcount.cpp')
-rw-r--r-- | core/safe_refcount.cpp | 32 |
1 files changed, 16 insertions, 16 deletions
diff --git a/core/safe_refcount.cpp b/core/safe_refcount.cpp index 22b9ff9741..d5ee778ef7 100644 --- a/core/safe_refcount.cpp +++ b/core/safe_refcount.cpp @@ -42,24 +42,27 @@ /* taken from boost */ \ while (true) { \ m_cpp_type tmp = static_cast<m_cpp_type const volatile &>(*(m_pw)); \ - if (tmp == 0) \ + if (tmp == 0) { \ return 0; /* if zero, can't add to it anymore */ \ - if (m_win_cmpxchg((m_win_type volatile *)(m_pw), tmp + 1, tmp) == tmp) \ + } \ + if (m_win_cmpxchg((m_win_type volatile *)(m_pw), tmp + 1, tmp) == tmp) { \ return tmp + 1; \ + } \ } #define ATOMIC_EXCHANGE_IF_GREATER_BODY(m_pw, m_val, m_win_type, m_win_cmpxchg, m_cpp_type) \ while (true) { \ m_cpp_type tmp = static_cast<m_cpp_type const volatile &>(*(m_pw)); \ - if (tmp >= m_val) \ + if (tmp >= m_val) { \ return tmp; /* already greater, or equal */ \ - if (m_win_cmpxchg((m_win_type volatile *)(m_pw), m_val, tmp) == tmp) \ + } \ + if (m_win_cmpxchg((m_win_type volatile *)(m_pw), m_val, tmp) == tmp) { \ return m_val; \ + } \ } -_ALWAYS_INLINE_ uint32_t _atomic_conditional_increment_impl(volatile uint32_t *pw){ - - ATOMIC_CONDITIONAL_INCREMENT_BODY(pw, LONG, InterlockedCompareExchange, uint32_t) +_ALWAYS_INLINE_ uint32_t _atomic_conditional_increment_impl(volatile uint32_t *pw) { + ATOMIC_CONDITIONAL_INCREMENT_BODY(pw, LONG, InterlockedCompareExchange, uint32_t); } _ALWAYS_INLINE_ uint32_t _atomic_decrement_impl(volatile uint32_t *pw) { @@ -78,14 +81,12 @@ _ALWAYS_INLINE_ uint32_t _atomic_add_impl(volatile uint32_t *pw, volatile uint32 return InterlockedAdd((LONG volatile *)pw, val); } -_ALWAYS_INLINE_ uint32_t _atomic_exchange_if_greater_impl(volatile uint32_t *pw, volatile uint32_t val){ - - ATOMIC_EXCHANGE_IF_GREATER_BODY(pw, val, LONG, InterlockedCompareExchange, uint32_t) +_ALWAYS_INLINE_ uint32_t _atomic_exchange_if_greater_impl(volatile uint32_t *pw, volatile uint32_t val) { + ATOMIC_EXCHANGE_IF_GREATER_BODY(pw, val, LONG, InterlockedCompareExchange, uint32_t); } -_ALWAYS_INLINE_ uint64_t _atomic_conditional_increment_impl(volatile uint64_t *pw){ - - ATOMIC_CONDITIONAL_INCREMENT_BODY(pw, LONGLONG, InterlockedCompareExchange64, uint64_t) +_ALWAYS_INLINE_ uint64_t _atomic_conditional_increment_impl(volatile uint64_t *pw) { + ATOMIC_CONDITIONAL_INCREMENT_BODY(pw, LONGLONG, InterlockedCompareExchange64, uint64_t); } _ALWAYS_INLINE_ uint64_t _atomic_decrement_impl(volatile uint64_t *pw) { @@ -104,9 +105,8 @@ _ALWAYS_INLINE_ uint64_t _atomic_add_impl(volatile uint64_t *pw, volatile uint64 return InterlockedAdd64((LONGLONG volatile *)pw, val); } -_ALWAYS_INLINE_ uint64_t _atomic_exchange_if_greater_impl(volatile uint64_t *pw, volatile uint64_t val){ - - ATOMIC_EXCHANGE_IF_GREATER_BODY(pw, val, LONGLONG, InterlockedCompareExchange64, uint64_t) +_ALWAYS_INLINE_ uint64_t _atomic_exchange_if_greater_impl(volatile uint64_t *pw, volatile uint64_t val) { + ATOMIC_EXCHANGE_IF_GREATER_BODY(pw, val, LONGLONG, InterlockedCompareExchange64, uint64_t); } // The actual advertised functions; they'll call the right implementation |