From 93b9832faced7dc816ca1a9f3552488fc35037d1 Mon Sep 17 00:00:00 2001 From: Tom Date: Mon, 1 Jun 2020 22:55:30 -0600 Subject: [PATCH] AK: Add atomic free functions This allows for using atomic operations on any variables, not only those wrapped in AK::Atomic --- AK/Atomic.h | 67 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/AK/Atomic.h b/AK/Atomic.h index f06eca1203c67d..b9edb817fa0bfa 100644 --- a/AK/Atomic.h +++ b/AK/Atomic.h @@ -39,6 +39,63 @@ enum MemoryOrder { memory_order_seq_cst = __ATOMIC_SEQ_CST }; +template +static inline T atomic_exchange(volatile T* var, T desired, MemoryOrder order = memory_order_seq_cst) noexcept +{ + return __atomic_exchange_n(var, desired, order); +} + +template +static inline bool atomic_compare_exchange_strong(volatile T* var, T& expected, T desired, MemoryOrder order = memory_order_seq_cst) noexcept +{ + if (order == memory_order_acq_rel || order == memory_order_release) + return __atomic_compare_exchange_n(var, &expected, desired, false, memory_order_release, memory_order_acquire); + else + return __atomic_compare_exchange_n(var, &expected, desired, false, order, order); +} + +template +static inline T atomic_fetch_add(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept +{ + return __atomic_fetch_add(var, val, order); +} + +template +static inline T atomic_fetch_sub(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept +{ + return __atomic_fetch_sub(var, val, order); +} + +template +static inline T atomic_fetch_and(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept +{ + return __atomic_fetch_and(var, val, order); +} + +template +static inline T atomic_fetch_or(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept +{ + return __atomic_fetch_or(var, val, order); +} + +template +static inline T atomic_fetch_xor(volatile T* var, T val, MemoryOrder order = memory_order_seq_cst) noexcept +{ + return __atomic_fetch_xor(var, val, order); +} + +template +static inline T atomic_load(volatile T* var, MemoryOrder order = memory_order_seq_cst) noexcept +{ + return __atomic_load_n(var, order); +} + +template +static inline void atomic_store(volatile T* var, T desired, MemoryOrder order = memory_order_seq_cst) noexcept +{ + __atomic_store_n(var, desired, order); +} + template class Atomic { T m_value { 0 }; @@ -53,6 +110,11 @@ class Atomic { { } + volatile T* ptr() + { + return &m_value; + } + T exchange(T desired, MemoryOrder order = memory_order_seq_cst) volatile noexcept { return __atomic_exchange_n(&m_value, desired, order); @@ -177,6 +239,11 @@ class Atomic { { } + volatile T** ptr() + { + return &m_value; + } + T* exchange(T* desired, MemoryOrder order = memory_order_seq_cst) volatile noexcept { return __atomic_exchange_n(&m_value, desired, order);