summaryrefslogtreecommitdiff
path: root/core/safe_refcount.cpp
diff options
context:
space:
mode:
authorPedro J. Estébanez <RandomShaper@users.noreply.github.com>2017-07-11 14:19:45 +0200
committerGitHub <noreply@github.com>2017-07-11 14:19:45 +0200
commit6758ba0d933a4f83e246b500639cbb99f0f0cacd (patch)
tree6a51b27a5749ff390f7847360595428b89b39eb3 /core/safe_refcount.cpp
parenta5d500f0233913fe7622434225c5dc73ebaccd1e (diff)
parent779693a79bd6859d6b7d2fad58dc5e20c97160c0 (diff)
Merge pull request #9014 from RandomShaper/improve-mem-stats
Improve reliability of memory stats Memory block byte tagging
Diffstat (limited to 'core/safe_refcount.cpp')
-rw-r--r--core/safe_refcount.cpp67
1 files changed, 67 insertions, 0 deletions
diff --git a/core/safe_refcount.cpp b/core/safe_refcount.cpp
index e4a5a994e6..95a274cda7 100644
--- a/core/safe_refcount.cpp
+++ b/core/safe_refcount.cpp
@@ -57,6 +57,30 @@ uint32_t atomic_decrement(register uint32_t *pw) {
return *pw;
}
+uint64_t atomic_conditional_increment(register uint64_t *pw) {
+
+ if (*pw == 0)
+ return 0;
+
+ (*pw)++;
+
+ return *pw;
+}
+
+uint64_t atomic_increment(register uint64_t *pw) {
+
+ (*pw)++;
+
+ return *pw;
+}
+
+uint64_t atomic_decrement(register uint64_t *pw) {
+
+ (*pw)--;
+
+ return *pw;
+}
+
#else
#ifdef _MSC_VER
@@ -84,6 +108,28 @@ uint32_t atomic_decrement(register uint32_t *pw) {
uint32_t atomic_increment(register uint32_t *pw) {
return InterlockedIncrement((LONG volatile *)pw);
}
+
+uint64_t atomic_conditional_increment(register uint64_t *pw) {
+
+ /* try to increment until it actually works */
+ // taken from boost
+
+ while (true) {
+ uint64_t tmp = static_cast<uint64_t const volatile &>(*pw);
+ if (tmp == 0)
+ return 0; // if zero, can't add to it anymore
+ if (InterlockedCompareExchange64((LONGLONG volatile *)pw, tmp + 1, tmp) == tmp)
+ return tmp + 1;
+ }
+}
+
+uint64_t atomic_decrement(register uint64_t *pw) {
+ return InterlockedDecrement64((LONGLONG volatile *)pw);
+}
+
+uint64_t atomic_increment(register uint64_t *pw) {
+ return InterlockedIncrement64((LONGLONG volatile *)pw);
+}
#elif defined(__GNUC__)
uint32_t atomic_conditional_increment(register uint32_t *pw) {
@@ -107,6 +153,27 @@ uint32_t atomic_increment(register uint32_t *pw) {
return __sync_add_and_fetch(pw, 1);
}
+uint64_t atomic_conditional_increment(register uint64_t *pw) {
+
+ while (true) {
+ uint64_t tmp = static_cast<uint64_t const volatile &>(*pw);
+ if (tmp == 0)
+ return 0; // if zero, can't add to it anymore
+ if (__sync_val_compare_and_swap(pw, tmp, tmp + 1) == tmp)
+ return tmp + 1;
+ }
+}
+
+uint64_t atomic_decrement(register uint64_t *pw) {
+
+ return __sync_sub_and_fetch(pw, 1);
+}
+
+uint64_t atomic_increment(register uint64_t *pw) {
+
+ return __sync_add_and_fetch(pw, 1);
+}
+
#else
//no threads supported?
#error Must provide atomic functions for this platform or compiler!