diff options
author | Marat Dukhan <maratek@gmail.com> | 2020-04-22 17:37:02 -0700 |
---|---|---|
committer | Marat Dukhan <maratek@gmail.com> | 2020-04-22 17:37:02 -0700 |
commit | 6acde9cc88a995ead19e15a54ddd46d13ddfbbc6 (patch) | |
tree | 43489ded61e482b66b634577c3c2d91eb91e8694 /src | |
parent | e918b206d26b1f3b2100b0edabf445c18708d2b7 (diff) | |
download | pthreadpool-6acde9cc88a995ead19e15a54ddd46d13ddfbbc6.tar.gz |
Reorder C11 atomics before MSVC atomics
clang-cl, which supports both, should prefer C11 atomics
Diffstat (limited to 'src')
-rw-r--r-- | src/threadpool-atomics.h | 234 |
1 files changed, 117 insertions, 117 deletions
diff --git a/src/threadpool-atomics.h b/src/threadpool-atomics.h index 2b46c9a..474d12b 100644 --- a/src/threadpool-atomics.h +++ b/src/threadpool-atomics.h @@ -240,6 +240,123 @@ _WriteBarrier(); _mm_sfence(); } +#elif defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) + #include <stdatomic.h> + + typedef _Atomic(uint32_t) pthreadpool_atomic_uint32_t; + typedef _Atomic(size_t) pthreadpool_atomic_size_t; + typedef _Atomic(void*) pthreadpool_atomic_void_p; + + static inline uint32_t pthreadpool_load_relaxed_uint32_t( + pthreadpool_atomic_uint32_t* address) + { + return atomic_load_explicit(address, memory_order_relaxed); + } + + static inline size_t pthreadpool_load_relaxed_size_t( + pthreadpool_atomic_size_t* address) + { + return atomic_load_explicit(address, memory_order_relaxed); + } + + static inline void* pthreadpool_load_relaxed_void_p( + pthreadpool_atomic_void_p* address) + { + return atomic_load_explicit(address, memory_order_relaxed); + } + + static inline uint32_t pthreadpool_load_acquire_uint32_t( + pthreadpool_atomic_uint32_t* address) + { + return atomic_load_explicit(address, memory_order_acquire); + } + + static inline size_t pthreadpool_load_acquire_size_t( + pthreadpool_atomic_size_t* address) + { + return atomic_load_explicit(address, memory_order_acquire); + } + + static inline void pthreadpool_store_relaxed_uint32_t( + pthreadpool_atomic_uint32_t* address, + uint32_t value) + { + atomic_store_explicit(address, value, memory_order_relaxed); + } + + static inline void pthreadpool_store_relaxed_size_t( + pthreadpool_atomic_size_t* address, + size_t value) + { + atomic_store_explicit(address, value, memory_order_relaxed); + } + + static inline void pthreadpool_store_relaxed_void_p( + pthreadpool_atomic_void_p* address, + void* value) + { + atomic_store_explicit(address, value, memory_order_relaxed); + } + + static inline void pthreadpool_store_release_uint32_t( + pthreadpool_atomic_uint32_t* address, + uint32_t value) + { + atomic_store_explicit(address, value, memory_order_release); + } + + static inline void pthreadpool_store_release_size_t( + pthreadpool_atomic_size_t* address, + size_t value) + { + atomic_store_explicit(address, value, memory_order_release); + } + + static inline size_t pthreadpool_decrement_fetch_relaxed_size_t( + pthreadpool_atomic_size_t* address) + { + return atomic_fetch_sub_explicit(address, 1, memory_order_relaxed) - 1; + } + + static inline size_t pthreadpool_decrement_fetch_release_size_t( + pthreadpool_atomic_size_t* address) + { + return atomic_fetch_sub_explicit(address, 1, memory_order_release) - 1; + } + + static inline bool pthreadpool_try_decrement_relaxed_size_t( + pthreadpool_atomic_size_t* value) + { + #if defined(__clang__) && (defined(__arm__) || defined(__aarch64__)) + size_t actual_value; + do { + actual_value = __builtin_arm_ldrex((const volatile size_t*) value); + if (actual_value == 0) { + __builtin_arm_clrex(); + return false; + } + } while (__builtin_arm_strex(actual_value - 1, (volatile size_t*) value) != 0); + return true; + #else + size_t actual_value = pthreadpool_load_relaxed_size_t(value); + while (actual_value != 0) { + if (atomic_compare_exchange_weak_explicit( + value, &actual_value, actual_value - 1, memory_order_relaxed, memory_order_relaxed)) + { + return true; + } + } + return false; + #endif + } + + static inline void pthreadpool_fence_acquire() { + atomic_thread_fence(memory_order_acquire); + } + + static inline void pthreadpool_fence_release() { + atomic_thread_fence(memory_order_release); + } #elif defined(_MSC_VER) && defined(_M_IX86) typedef volatile uint32_t pthreadpool_atomic_uint32_t; typedef volatile size_t pthreadpool_atomic_size_t; @@ -581,123 +698,6 @@ _WriteBarrier(); __dmb(_ARM_BARRIER_ISH); } -#elif defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) - #include <stdatomic.h> - - typedef _Atomic(uint32_t) pthreadpool_atomic_uint32_t; - typedef _Atomic(size_t) pthreadpool_atomic_size_t; - typedef _Atomic(void*) pthreadpool_atomic_void_p; - - static inline uint32_t pthreadpool_load_relaxed_uint32_t( - pthreadpool_atomic_uint32_t* address) - { - return atomic_load_explicit(address, memory_order_relaxed); - } - - static inline size_t pthreadpool_load_relaxed_size_t( - pthreadpool_atomic_size_t* address) - { - return atomic_load_explicit(address, memory_order_relaxed); - } - - static inline void* pthreadpool_load_relaxed_void_p( - pthreadpool_atomic_void_p* address) - { - return atomic_load_explicit(address, memory_order_relaxed); - } - - static inline uint32_t pthreadpool_load_acquire_uint32_t( - pthreadpool_atomic_uint32_t* address) - { - return atomic_load_explicit(address, memory_order_acquire); - } - - static inline size_t pthreadpool_load_acquire_size_t( - pthreadpool_atomic_size_t* address) - { - return atomic_load_explicit(address, memory_order_acquire); - } - - static inline void pthreadpool_store_relaxed_uint32_t( - pthreadpool_atomic_uint32_t* address, - uint32_t value) - { - atomic_store_explicit(address, value, memory_order_relaxed); - } - - static inline void pthreadpool_store_relaxed_size_t( - pthreadpool_atomic_size_t* address, - size_t value) - { - atomic_store_explicit(address, value, memory_order_relaxed); - } - - static inline void pthreadpool_store_relaxed_void_p( - pthreadpool_atomic_void_p* address, - void* value) - { - atomic_store_explicit(address, value, memory_order_relaxed); - } - - static inline void pthreadpool_store_release_uint32_t( - pthreadpool_atomic_uint32_t* address, - uint32_t value) - { - atomic_store_explicit(address, value, memory_order_release); - } - - static inline void pthreadpool_store_release_size_t( - pthreadpool_atomic_size_t* address, - size_t value) - { - atomic_store_explicit(address, value, memory_order_release); - } - - static inline size_t pthreadpool_decrement_fetch_relaxed_size_t( - pthreadpool_atomic_size_t* address) - { - return atomic_fetch_sub_explicit(address, 1, memory_order_relaxed) - 1; - } - - static inline size_t pthreadpool_decrement_fetch_release_size_t( - pthreadpool_atomic_size_t* address) - { - return atomic_fetch_sub_explicit(address, 1, memory_order_release) - 1; - } - - static inline bool pthreadpool_try_decrement_relaxed_size_t( - pthreadpool_atomic_size_t* value) - { - #if defined(__clang__) && (defined(__arm__) || defined(__aarch64__)) - size_t actual_value; - do { - actual_value = __builtin_arm_ldrex((const volatile size_t*) value); - if (actual_value == 0) { - __builtin_arm_clrex(); - return false; - } - } while (__builtin_arm_strex(actual_value - 1, (volatile size_t*) value) != 0); - return true; - #else - size_t actual_value = pthreadpool_load_relaxed_size_t(value); - while (actual_value != 0) { - if (atomic_compare_exchange_weak_explicit( - value, &actual_value, actual_value - 1, memory_order_relaxed, memory_order_relaxed)) - { - return true; - } - } - return false; - #endif - } - - static inline void pthreadpool_fence_acquire() { - atomic_thread_fence(memory_order_acquire); - } - - static inline void pthreadpool_fence_release() { - atomic_thread_fence(memory_order_release); - } #else #error "Platform-specific implementation of threadpool-atomics.h required" #endif |