summaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/atomic.h123
1 files changed, 81 insertions, 42 deletions
diff --git a/include/atomic.h b/include/atomic.h
index 92401e29..8eb6820b 100644
--- a/include/atomic.h
+++ b/include/atomic.h
@@ -13,55 +13,76 @@ extern "C" {
#include <stdatomic.h>
-#define ATOMIC(T) struct { T _Atomic value; }
+#define almemory_order memory_order
+#define almemory_order_relaxed memory_order_relaxed
+#define almemory_order_consume memory_order_consume
+#define almemory_order_acquire memory_order_acquire
+#define almemory_order_release memory_order_release
+#define almemory_order_acq_rel memory_order_acq_rel
+#define almemory_order_seq_cst memory_order_seq_cst
-#define ATOMIC_INIT(_val, _newval) atomic_init(&(_val)->value, (_newval))
-#define ATOMIC_INIT_STATIC(_newval) {ATOMIC_VAR_INIT(_newval)}
+#define ATOMIC(T) T _Atomic
-#define ATOMIC_LOAD(_val) atomic_load(&(_val)->value)
-#define ATOMIC_STORE(_val, _newval) atomic_store(&(_val)->value, (_newval))
+#define ATOMIC_INIT(_val, _newval) atomic_init((_val), (_newval))
+#define ATOMIC_INIT_STATIC(_newval) ATOMIC_VAR_INIT(_newval)
-#define ATOMIC_ADD(T, _val, _incr) atomic_fetch_add(&(_val)->value, (_incr))
-#define ATOMIC_SUB(T, _val, _decr) atomic_fetch_sub(&(_val)->value, (_decr))
+#define PARAM2(f, a, b, ...) (f((a), (b)))
+#define PARAM3(f, a, b, c, ...) (f((a), (b), (c)))
+#define PARAM5(f, a, b, c, d, e, ...) (f((a), (b), (c), (d), (e)))
-#define ATOMIC_EXCHANGE(T, _val, _newval) atomic_exchange(&(_val)->value, (_newval))
-#define ATOMIC_COMPARE_EXCHANGE_STRONG(T, _val, _oldval, _newval) \
- atomic_compare_exchange_strong(&(_val)->value, (_oldval), (_newval))
-#define ATOMIC_COMPARE_EXCHANGE_WEAK(T, _val, _oldval, _newval) \
- atomic_compare_exchange_weak(&(_val)->value, (_oldval), (_newval))
+#define ATOMIC_LOAD(...) PARAM2(atomic_load_explicit, __VA_ARGS__, memory_order_seq_cst)
+#define ATOMIC_STORE(...) PARAM3(atomic_store_explicit, __VA_ARGS__, memory_order_seq_cst)
+
+#define ATOMIC_ADD(T, ...) PARAM3(atomic_fetch_add_explicit, __VA_ARGS__, memory_order_seq_cst)
+#define ATOMIC_SUB(T, ...) PARAM3(atomic_fetch_sub_explicit, __VA_ARGS__, memory_order_seq_cst)
+
+#define ATOMIC_EXCHANGE(T, ...) PARAM3(atomic_exchange_explicit, __VA_ARGS__, memory_order_seq_cst)
+#define ATOMIC_COMPARE_EXCHANGE_STRONG(T, ...) \
+ PARAM5(atomic_compare_exchange_strong_explicit, __VA_ARGS__, memory_order_seq_cst, memory_order_seq_cst)
+#define ATOMIC_COMPARE_EXCHANGE_WEAK(T, ...) \
+ PARAM5(atomic_compare_exchange_weak_explicit, __VA_ARGS__, memory_order_seq_cst, memory_order_seq_cst)
/* Atomics using GCC intrinsics */
#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1)) && !defined(__QNXNTO__)
+enum almemory_order {
+ almemory_order_relaxed,
+ almemory_order_consume,
+ almemory_order_acquire,
+ almemory_order_release,
+ almemory_order_acq_rel,
+ almemory_order_seq_cst
+};
+
#define ATOMIC(T) struct { T volatile value; }
#define ATOMIC_INIT(_val, _newval) do { (_val)->value = (_newval); } while(0)
#define ATOMIC_INIT_STATIC(_newval) {(_newval)}
-#define ATOMIC_LOAD(_val) __extension__({ \
+#define ATOMIC_LOAD(_val, ...) __extension__({ \
__typeof((_val)->value) _r = (_val)->value; \
__asm__ __volatile__("" ::: "memory"); \
_r; \
})
-#define ATOMIC_STORE(_val, _newval) do { \
- __asm__ __volatile__("" ::: "memory"); \
- (_val)->value = (_newval); \
+#define ATOMIC_STORE(_val, _newval, ...) do { \
+ __asm__ __volatile__("" ::: "memory"); \
+ (_val)->value = (_newval); \
} while(0)
-#define ATOMIC_ADD(T, _val, _incr) __extension__({ \
+#define ATOMIC_ADD(T, _val, _incr, ...) __extension__({ \
static_assert(sizeof(T)==sizeof((_val)->value), "Type "#T" has incorrect size!"); \
__sync_fetch_and_add(&(_val)->value, (_incr)); \
})
-#define ATOMIC_SUB(T, _val, _decr) __extension__({ \
+#define ATOMIC_SUB(T, _val, _decr, ...) __extension__({ \
static_assert(sizeof(T)==sizeof((_val)->value), "Type "#T" has incorrect size!"); \
__sync_fetch_and_sub(&(_val)->value, (_decr)); \
})
-#define ATOMIC_EXCHANGE(T, _val, _newval) __extension__({ \
+#define ATOMIC_EXCHANGE(T, _val, _newval, ...) __extension__({ \
static_assert(sizeof(T)==sizeof((_val)->value), "Type "#T" has incorrect size!"); \
__sync_lock_test_and_set(&(_val)->value, (_newval)); \
})
-#define ATOMIC_COMPARE_EXCHANGE_STRONG(T, _val, _oldval, _newval) __extension__({ \
+#define ATOMIC_COMPARE_EXCHANGE_STRONG(T, _val, _oldval, _newval, ...) __extension__({ \
static_assert(sizeof(T)==sizeof((_val)->value), "Type "#T" has incorrect size!"); \
T _o = *(_oldval); \
*(_oldval) = __sync_val_compare_and_swap(&(_val)->value, _o, (_newval)); \
@@ -98,29 +119,38 @@ extern "C" {
)
+enum almemory_order {
+ almemory_order_relaxed,
+ almemory_order_consume,
+ almemory_order_acquire,
+ almemory_order_release,
+ almemory_order_acq_rel,
+ almemory_order_seq_cst
+};
+
#define ATOMIC(T) struct { T volatile value; }
#define ATOMIC_INIT(_val, _newval) do { (_val)->value = (_newval); } while(0)
#define ATOMIC_INIT_STATIC(_newval) {(_newval)}
-#define ATOMIC_LOAD(_val) __extension__({ \
+#define ATOMIC_LOAD(_val, ...) __extension__({ \
__typeof((_val)->value) _r = (_val)->value; \
__asm__ __volatile__("" ::: "memory"); \
_r; \
})
-#define ATOMIC_STORE(_val, _newval) do { \
- __asm__ __volatile__("" ::: "memory"); \
- (_val)->value = (_newval); \
+#define ATOMIC_STORE(_val, _newval, ...) do { \
+ __asm__ __volatile__("" ::: "memory"); \
+ (_val)->value = (_newval); \
} while(0)
-#define ATOMIC_ADD(T, _val, _incr) __extension__({ \
+#define ATOMIC_ADD(T, _val, _incr, ...) __extension__({ \
static_assert(sizeof(T)==4, "Type "#T" has incorrect size!"); \
static_assert(sizeof(T)==sizeof((_val)->value), "Type "#T" has incorrect size!"); \
T _r; \
WRAP_ADD(_r, &(_val)->value, (T)(_incr)); \
_r; \
})
-#define ATOMIC_SUB(T, _val, _decr) __extension__({ \
+#define ATOMIC_SUB(T, _val, _decr, ...) __extension__({ \
static_assert(sizeof(T)==4, "Type "#T" has incorrect size!"); \
static_assert(sizeof(T)==sizeof((_val)->value), "Type "#T" has incorrect size!"); \
T _r; \
@@ -128,7 +158,7 @@ extern "C" {
_r; \
})
-#define ATOMIC_EXCHANGE(T, _val, _newval) __extension__({ \
+#define ATOMIC_EXCHANGE(T, _val, _newval, ...) __extension__({ \
static_assert(sizeof(T)==4 || sizeof(T)==8, "Type "#T" has incorrect size!"); \
static_assert(sizeof(T)==sizeof((_val)->value), "Type "#T" has incorrect size!"); \
T _r; \
@@ -136,7 +166,7 @@ extern "C" {
else if(sizeof(T) == 8) WRAP_XCHG("q", _r, &(_val)->value, (T)(_newval)); \
_r; \
})
-#define ATOMIC_COMPARE_EXCHANGE_STRONG(T, _val, _oldval, _newval) __extension__({ \
+#define ATOMIC_COMPARE_EXCHANGE_STRONG(T, _val, _oldval, _newval, ...) __extension__({ \
static_assert(sizeof(T)==4 || sizeof(T)==8, "Type "#T" has incorrect size!"); \
static_assert(sizeof(T)==sizeof((_val)->value), "Type "#T" has incorrect size!"); \
T _old = *(_oldval); \
@@ -197,30 +227,39 @@ inline bool CompareAndSwap64(volatile LONGLONG *dest, LONGLONG newval, LONGLONG
#define WRAP_CMPXCHG(T, _func, _ptr, _newval, _oldval) ((bool(*)(T volatile*,T,T*))_func)((_ptr), (_newval), (_oldval))
+enum almemory_order {
+ almemory_order_relaxed,
+ almemory_order_consume,
+ almemory_order_acquire,
+ almemory_order_release,
+ almemory_order_acq_rel,
+ almemory_order_seq_cst
+};
+
#define ATOMIC(T) struct { T volatile value; }
#define ATOMIC_INIT(_val, _newval) do { (_val)->value = (_newval); } while(0)
#define ATOMIC_INIT_STATIC(_newval) {(_newval)}
-#define ATOMIC_LOAD(_val) ((_val)->value)
-#define ATOMIC_STORE(_val, _newval) do { \
- (_val)->value = (_newval); \
+#define ATOMIC_LOAD(_val, ...) ((_val)->value)
+#define ATOMIC_STORE(_val, _newval, ...) do { \
+ (_val)->value = (_newval); \
} while(0)
int _al_invalid_atomic_size(); /* not defined */
-#define ATOMIC_ADD(T, _val, _incr) \
+#define ATOMIC_ADD(T, _val, _incr, ...) \
((sizeof(T)==4) ? WRAP_ADDSUB(T, AtomicAdd32, &(_val)->value, (_incr)) : \
(T)_al_invalid_atomic_size())
-#define ATOMIC_SUB(T, _val, _decr) \
+#define ATOMIC_SUB(T, _val, _decr, ...) \
((sizeof(T)==4) ? WRAP_ADDSUB(T, AtomicSub32, &(_val)->value, (_decr)) : \
(T)_al_invalid_atomic_size())
-#define ATOMIC_EXCHANGE(T, _val, _newval) \
+#define ATOMIC_EXCHANGE(T, _val, _newval, ...) \
((sizeof(T)==4) ? WRAP_XCHG(T, AtomicSwap32, &(_val)->value, (_newval)) : \
(sizeof(T)==8) ? WRAP_XCHG(T, AtomicSwap64, &(_val)->value, (_newval)) : \
(T)_al_invalid_atomic_size())
-#define ATOMIC_COMPARE_EXCHANGE_STRONG(T, _val, _oldval, _newval) \
+#define ATOMIC_COMPARE_EXCHANGE_STRONG(T, _val, _oldval, _newval, ...) \
((sizeof(T)==4) ? WRAP_CMPXCHG(T, CompareAndSwap32, &(_val)->value, (_newval), (_oldval)) : \
(sizeof(T)==8) ? WRAP_CMPXCHG(T, CompareAndSwap64, &(_val)->value, (_newval), (_oldval)) : \
(bool)_al_invalid_atomic_size())
@@ -236,20 +275,20 @@ int _al_invalid_atomic_size(); /* not defined */
#define ATOMIC_LOAD_UNSAFE(_val) (0)
#define ATOMIC_STORE_UNSAFE(_val, _newval) ((void)0)
-#define ATOMIC_LOAD(_val) (0)
-#define ATOMIC_STORE(_val, _newval) ((void)0)
+#define ATOMIC_LOAD(_val, ...) (0)
+#define ATOMIC_STORE(_val, _newval, ...) ((void)0)
-#define ATOMIC_ADD(T, _val, _incr) (0)
-#define ATOMIC_SUB(T, _val, _decr) (0)
+#define ATOMIC_ADD(T, _val, _incr, ...) (0)
+#define ATOMIC_SUB(T, _val, _decr, ...) (0)
-#define ATOMIC_EXCHANGE(T, _val, _newval) (0)
-#define ATOMIC_COMPARE_EXCHANGE_STRONG(T, _val, _oldval, _newval) (0)
+#define ATOMIC_EXCHANGE(T, _val, _newval, ...) (0)
+#define ATOMIC_COMPARE_EXCHANGE_STRONG(T, _val, _oldval, _newval, ...) (0)
#endif
/* If no weak cmpxchg is provided (not all systems will have one), substitute a
* strong cmpxchg. */
#ifndef ATOMIC_COMPARE_EXCHANGE_WEAK
-#define ATOMIC_COMPARE_EXCHANGE_WEAK(a, b, c, d) ATOMIC_COMPARE_EXCHANGE_STRONG(a, b, c, d)
+#define ATOMIC_COMPARE_EXCHANGE_WEAK ATOMIC_COMPARE_EXCHANGE_STRONG
#endif