1 #ifndef AL_ATOMIC_H
2 #define AL_ATOMIC_H
3 
4 #include <atomic>
5 
6 
7 using RefCount = std::atomic<unsigned int>;
8 
InitRef(RefCount & ref,unsigned int value)9 inline void InitRef(RefCount &ref, unsigned int value)
10 { ref.store(value, std::memory_order_relaxed); }
ReadRef(RefCount & ref)11 inline unsigned int ReadRef(RefCount &ref)
12 { return ref.load(std::memory_order_acquire); }
IncrementRef(RefCount & ref)13 inline unsigned int IncrementRef(RefCount &ref)
14 { return ref.fetch_add(1u, std::memory_order_acq_rel)+1u; }
DecrementRef(RefCount & ref)15 inline unsigned int DecrementRef(RefCount &ref)
16 { return ref.fetch_sub(1u, std::memory_order_acq_rel)-1u; }
17 
18 
19 /* WARNING: A livelock is theoretically possible if another thread keeps
20  * changing the head without giving this a chance to actually swap in the new
21  * one (practically impossible with this little code, but...).
22  */
23 template<typename T>
AtomicReplaceHead(std::atomic<T> & head,T newhead)24 inline void AtomicReplaceHead(std::atomic<T> &head, T newhead)
25 {
26     T first_ = head.load(std::memory_order_acquire);
27     do {
28         newhead->next.store(first_, std::memory_order_relaxed);
29     } while(!head.compare_exchange_weak(first_, newhead,
30             std::memory_order_acq_rel, std::memory_order_acquire));
31 }
32 
33 #endif /* AL_ATOMIC_H */
34