1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #ifndef mozilla_WasiAtomic_h
8 #define mozilla_WasiAtomic_h
9 
10 #include <cstdint>
11 
12 // WASI doesn't support <atomic> and we use it as single-threaded for now.
13 // This is a stub implementation of std atomics to build WASI port of SM.
14 
15 namespace std {
16 enum memory_order {
17   relaxed,
18   consume,  // load-consume
19   acquire,  // load-acquire
20   release,  // store-release
21   acq_rel,  // store-release load-acquire
22   seq_cst   // store-release load-acquire
23 };
24 
25 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
26 inline constexpr auto memory_order_consume = memory_order::consume;
27 inline constexpr auto memory_order_acquire = memory_order::acquire;
28 inline constexpr auto memory_order_release = memory_order::release;
29 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
30 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
31 
32 template <class T>
33 struct atomic {
34   using value_type = T;
35   value_type value_;
36 
37   atomic() noexcept = default;
atomicatomic38   constexpr atomic(T desired) noexcept : value_{desired} {}
39 
40   atomic(const atomic&) = delete;
41   atomic& operator=(const atomic&) = delete;
42   atomic& operator=(const atomic&) volatile = delete;
43   ~atomic() noexcept = default;
44 
45   T load(memory_order m = memory_order_seq_cst) const volatile noexcept {
46     return value_;
47   }
48 
49   void store(T desired,
50              memory_order m = memory_order_seq_cst) volatile noexcept {
51     value_ = desired;
52   }
53 
54   T operator=(T desired) volatile noexcept { return value_ = desired; }
55 
56   T exchange(T desired,
57              memory_order m = memory_order_seq_cst) volatile noexcept {
58     T tmp = value_;
59     value_ = desired;
60     return tmp;
61   }
62 
compare_exchange_weakatomic63   bool compare_exchange_weak(T& expected, T desired, memory_order,
64                              memory_order) volatile noexcept {
65     expected = desired;
66     return true;
67   }
68 
69   bool compare_exchange_weak(
70       T& expected, T desired,
71       memory_order m = memory_order_seq_cst) volatile noexcept {
72     expected = desired;
73     return true;
74   }
75 
compare_exchange_strongatomic76   bool compare_exchange_strong(T& expected, T desired, memory_order,
77                                memory_order) volatile noexcept {
78     expected = desired;
79     return true;
80   }
81 
82   bool compare_exchange_strong(
83       T& expected, T desired,
84       memory_order m = memory_order_seq_cst) volatile noexcept {
85     expected = desired;
86     return true;
87   }
88 
89   T fetch_add(T arg, memory_order m = memory_order_seq_cst) volatile noexcept {
90     T previous = value_;
91     value_ = value_ + arg;
92     return previous;
93   }
94 
95   T fetch_sub(T arg, memory_order m = memory_order_seq_cst) volatile noexcept {
96     T previous = value_;
97     value_ = value_ - arg;
98     return previous;
99   }
100 
101   T fetch_or(T arg, memory_order m = memory_order_seq_cst) volatile noexcept {
102     T previous = value_;
103     value_ = value_ | arg;
104     return previous;
105   }
106 
107   T fetch_xor(T arg, memory_order m = memory_order_seq_cst) volatile noexcept {
108     T previous = value_;
109     value_ = value_ ^ arg;
110     return previous;
111   }
112 
113   T fetch_and(T arg, memory_order m = memory_order_seq_cst) volatile noexcept {
114     T previous = value_;
115     value_ = value_ & arg;
116     return previous;
117   }
118 };
119 
120 template <class T>
121 struct atomic<T*> {
122   using value_type = T*;
123   using difference_type = ptrdiff_t;
124 
125   value_type value_;
126 
127   atomic() noexcept = default;
128   constexpr atomic(T* desired) noexcept : value_{desired} {}
129   atomic(const atomic&) = delete;
130   atomic& operator=(const atomic&) = delete;
131   atomic& operator=(const atomic&) volatile = delete;
132 
133   T* load(memory_order m = memory_order_seq_cst) const volatile noexcept {
134     return value_;
135   }
136 
137   void store(T* desired,
138              memory_order m = memory_order_seq_cst) volatile noexcept {
139     value_ = desired;
140   }
141 
142   T* operator=(T* other) volatile noexcept { return value_ = other; }
143 
144   T* exchange(T* desired,
145               memory_order m = memory_order_seq_cst) volatile noexcept {
146     T* previous = value_;
147     value_ = desired;
148     return previous;
149   }
150 
151   bool compare_exchange_weak(T*& expected, T* desired, memory_order s,
152                              memory_order f) volatile noexcept {
153     expected = desired;
154     return true;
155   }
156 
157   bool compare_exchange_weak(
158       T*& expected, T* desired,
159       memory_order m = memory_order_seq_cst) volatile noexcept {
160     expected = desired;
161     return true;
162   }
163 
164   bool compare_exchange_strong(T*& expected, T* desired, memory_order s,
165                                memory_order f) volatile noexcept {
166     expected = desired;
167     return true;
168   }
169 
170   T* fetch_add(ptrdiff_t arg,
171                memory_order m = memory_order_seq_cst) volatile noexcept {
172     T* previous = value_;
173     value_ = value_ + arg;
174     return previous;
175   }
176 
177   T* fetch_sub(ptrdiff_t arg,
178                memory_order m = memory_order_seq_cst) volatile noexcept {
179     T* previous = value_;
180     value_ = value_ - arg;
181     return previous;
182   }
183 };
184 
185 using atomic_uint8_t = atomic<uint8_t>;
186 using atomic_uint16_t = atomic<uint16_t>;
187 using atomic_uint32_t = atomic<uint32_t>;
188 using atomic_uint64_t = atomic<uint64_t>;
189 
190 }  // namespace std
191 
192 #endif  // mozilla_WasiAtomic_h
193