summaryrefslogtreecommitdiff
path: root/core/safe_refcount.cpp
diff options
context:
space:
mode:
authorRémi Verschelde <rverschelde@gmail.com>2020-05-14 13:23:58 +0200
committerRémi Verschelde <rverschelde@gmail.com>2020-05-14 16:54:55 +0200
commit0be6d925dc3c6413bce7a3ccb49631b8e4a6e67a (patch)
treea27e497da7104dd0a64f98a04fa3067668735e91 /core/safe_refcount.cpp
parent710b34b70227becdc652b4ae027fe0ac47409642 (diff)
Style: clang-format: Disable KeepEmptyLinesAtTheStartOfBlocks
Which means that reduz' beloved style which we all became used to will now be changed automatically to remove the first empty line. This makes us lean closer to 1TBS (the one true brace style) instead of hybridating it with some Allman-inspired spacing. There's still the case of braces around single-statement blocks that needs to be addressed (but clang-format can't help with that, but clang-tidy may if we agree about it). Part of #33027.
Diffstat (limited to 'core/safe_refcount.cpp')
-rw-r--r--core/safe_refcount.cpp8
1 files changed, 0 insertions, 8 deletions
diff --git a/core/safe_refcount.cpp b/core/safe_refcount.cpp
index e4604faa09..22b9ff9741 100644
--- a/core/safe_refcount.cpp
+++ b/core/safe_refcount.cpp
@@ -63,22 +63,18 @@ _ALWAYS_INLINE_ uint32_t _atomic_conditional_increment_impl(volatile uint32_t *p
}
_ALWAYS_INLINE_ uint32_t _atomic_decrement_impl(volatile uint32_t *pw) {
-
return InterlockedDecrement((LONG volatile *)pw);
}
_ALWAYS_INLINE_ uint32_t _atomic_increment_impl(volatile uint32_t *pw) {
-
return InterlockedIncrement((LONG volatile *)pw);
}
_ALWAYS_INLINE_ uint32_t _atomic_sub_impl(volatile uint32_t *pw, volatile uint32_t val) {
-
return InterlockedExchangeAdd((LONG volatile *)pw, -(int32_t)val) - val;
}
_ALWAYS_INLINE_ uint32_t _atomic_add_impl(volatile uint32_t *pw, volatile uint32_t val) {
-
return InterlockedAdd((LONG volatile *)pw, val);
}
@@ -93,22 +89,18 @@ _ALWAYS_INLINE_ uint64_t _atomic_conditional_increment_impl(volatile uint64_t *p
}
_ALWAYS_INLINE_ uint64_t _atomic_decrement_impl(volatile uint64_t *pw) {
-
return InterlockedDecrement64((LONGLONG volatile *)pw);
}
_ALWAYS_INLINE_ uint64_t _atomic_increment_impl(volatile uint64_t *pw) {
-
return InterlockedIncrement64((LONGLONG volatile *)pw);
}
_ALWAYS_INLINE_ uint64_t _atomic_sub_impl(volatile uint64_t *pw, volatile uint64_t val) {
-
return InterlockedExchangeAdd64((LONGLONG volatile *)pw, -(int64_t)val) - val;
}
_ALWAYS_INLINE_ uint64_t _atomic_add_impl(volatile uint64_t *pw, volatile uint64_t val) {
-
return InterlockedAdd64((LONGLONG volatile *)pw, val);
}