1 /*
2      * This file is part of FFmpeg.
3      *
4      * FFmpeg is free software; you can redistribute it and/or
5      * modify it under the terms of the GNU Lesser General Public
6      * License as published by the Free Software Foundation; either
7      * version 2.1 of the License, or (at your option) any later version.
8      *
9      * FFmpeg is distributed in the hope that it will be useful,
10      * but WITHOUT ANY WARRANTY; without even the implied warranty of
11      * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12      * Lesser General Public License for more details.
13      *
14      * You should have received a copy of the GNU Lesser General Public
15      * License along with FFmpeg; if not, write to the Free Software
16      * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17      */
18 
19 #ifndef COMPAT_ATOMICS_WIN32_STDATOMIC_H
20 #define COMPAT_ATOMICS_WIN32_STDATOMIC_H
21 
22 #define WIN32_LEAN_AND_MEAN
23 #include <stddef.h>
24 #include <stdint.h>
25 #include <windows.h>
26 
27 #ifdef __TINYC__
28 #endif
29 
30 #define ATOMIC_FLAG_INIT 0
31 
32 #define ATOMIC_VAR_INIT(value) (value)
33 
34 #define atomic_init(obj, value) \
35     do                          \
36     {                           \
37         *(obj) = (value);       \
38     } while (0)
39 
40 #define kill_dependency(y) ((void)0)
41 
42 #define atomic_thread_fence(order) \
43     MemoryBarrier();
44 
45 #define atomic_signal_fence(order) \
46     ((void)0)
47 
48 #define atomic_is_lock_free(obj) 0
49 
50 typedef intptr_t atomic_flag;
51 typedef intptr_t atomic_bool;
52 typedef intptr_t atomic_char;
53 typedef intptr_t atomic_schar;
54 typedef intptr_t atomic_uchar;
55 typedef intptr_t atomic_short;
56 typedef intptr_t atomic_ushort;
57 typedef intptr_t atomic_int;
58 typedef intptr_t atomic_uint;
59 typedef intptr_t atomic_long;
60 typedef intptr_t atomic_ulong;
61 typedef intptr_t atomic_llong;
62 typedef intptr_t atomic_ullong;
63 typedef intptr_t atomic_wchar_t;
64 typedef intptr_t atomic_int_least8_t;
65 typedef intptr_t atomic_uint_least8_t;
66 typedef intptr_t atomic_int_least16_t;
67 typedef intptr_t atomic_uint_least16_t;
68 typedef intptr_t atomic_int_least32_t;
69 typedef intptr_t atomic_uint_least32_t;
70 typedef intptr_t atomic_int_least64_t;
71 typedef intptr_t atomic_uint_least64_t;
72 typedef intptr_t atomic_int_fast8_t;
73 typedef intptr_t atomic_uint_fast8_t;
74 typedef intptr_t atomic_int_fast16_t;
75 typedef intptr_t atomic_uint_fast16_t;
76 typedef intptr_t atomic_int_fast32_t;
77 typedef intptr_t atomic_uint_fast32_t;
78 typedef intptr_t atomic_int_fast64_t;
79 typedef intptr_t atomic_uint_fast64_t;
80 typedef intptr_t atomic_intptr_t;
81 typedef intptr_t atomic_uintptr_t;
82 typedef intptr_t atomic_size_t;
83 typedef intptr_t atomic_ptrdiff_t;
84 typedef intptr_t atomic_intmax_t;
85 typedef intptr_t atomic_uintmax_t;
86 
87 #ifdef __TINYC__
88 /*
89     For TCC it is missing the x64 version of _InterlockedExchangeAdd64 so we
90     fake it (works the same) with InterlockedCompareExchange64 until it
91     succeeds
92 */
_InterlockedExchangeAdd64(LONGLONG volatile * Addend,LONGLONG Value)93 __CRT_INLINE LONGLONG _InterlockedExchangeAdd64(LONGLONG volatile *Addend, LONGLONG Value)
94 {
95     LONGLONG Old;
96     do
97     {
98         Old = *Addend;
99     } while (InterlockedCompareExchange64(Addend, Old + Value, Old) != Old);
100     return Old;
101 }
102 
_InterlockedExchangeAdd(LONG volatile * Addend,LONG Value)103 __CRT_INLINE LONG _InterlockedExchangeAdd(LONG volatile *Addend, LONG Value)
104 {
105     LONG Old;
106     do
107     {
108         Old = *Addend;
109     } while (InterlockedCompareExchange(Addend, Old + Value, Old) != Old);
110     return Old;
111 }
112 
_InterlockedExchangeAdd16(SHORT volatile * Addend,SHORT Value)113 __CRT_INLINE SHORT _InterlockedExchangeAdd16(SHORT volatile *Addend, SHORT Value)
114 {
115     SHORT Old;
116     do
117     {
118         Old = *Addend;
119     } while (InterlockedCompareExchange16(Addend, Old + Value, Old) != Old);
120     return Old;
121 }
122 
123 #define InterlockedIncrement64 _InterlockedExchangeAdd64
124 
__faststorefence()125 __CRT_INLINE VOID __faststorefence() {
126 	__asm__ __volatile__ ("mfence");
127 }
128 
129 #endif
130 
131 #define atomic_store(object, desired) \
132     do                                \
133     {                                 \
134         *(object) = (desired);        \
135         MemoryBarrier();              \
136     } while (0)
137 
138 #define atomic_store_explicit(object, desired, order) \
139     atomic_store(object, desired)
140 
141 #define atomic_load(object) \
142     (MemoryBarrier(), *(object))
143 
144 #define atomic_load_explicit(object, order) \
145     atomic_load(object)
146 
147 #define atomic_exchange(object, desired) \
148     InterlockedExchangePointer(object, desired)
149 
150 #define atomic_exchange_explicit(object, desired, order) \
151     atomic_exchange(object, desired)
152 
atomic_compare_exchange_strong(intptr_t * object,intptr_t * expected,intptr_t desired)153 static inline int atomic_compare_exchange_strong(intptr_t *object, intptr_t *expected,
154                                                  intptr_t desired)
155 {
156     intptr_t old = *expected;
157     *expected = (intptr_t)InterlockedCompareExchangePointer(
158         (PVOID *)object, (PVOID)desired, (PVOID)old);
159     return *expected == old;
160 }
161 
162 #define atomic_compare_exchange_strong_explicit(object, expected, desired, success, failure) \
163     atomic_compare_exchange_strong(object, expected, desired)
164 
165 #define atomic_compare_exchange_weak(object, expected, desired) \
166     atomic_compare_exchange_strong(object, expected, desired)
167 
168 #define atomic_compare_exchange_weak_explicit(object, expected, desired, success, failure) \
169     atomic_compare_exchange_weak(object, expected, desired)
170 
171 #ifdef _WIN64
172 
173 #define atomic_fetch_add(object, operand) \
174     InterlockedExchangeAdd64(object, operand)
175 
176 #define atomic_fetch_sub(object, operand) \
177     InterlockedExchangeAdd64(object, -(operand))
178 
179 #define atomic_fetch_or(object, operand) \
180     InterlockedOr64(object, operand)
181 
182 #define atomic_fetch_xor(object, operand) \
183     InterlockedXor64(object, operand)
184 
185 #define atomic_fetch_and(object, operand) \
186     InterlockedAnd64(object, operand)
187 #else
188 #define atomic_fetch_add(object, operand) \
189     InterlockedExchangeAdd(object, operand)
190 
191 #define atomic_fetch_sub(object, operand) \
192     InterlockedExchangeAdd(object, -(operand))
193 
194 #define atomic_fetch_or(object, operand) \
195     InterlockedOr(object, operand)
196 
197 #define atomic_fetch_xor(object, operand) \
198     InterlockedXor(object, operand)
199 
200 #define atomic_fetch_and(object, operand) \
201     InterlockedAnd(object, operand)
202 #endif /* _WIN64 */
203 
204 /* specialized versions with explicit object size */
205 
206 #define atomic_load_ptr atomic_load
207 #define atomic_store_ptr atomic_store
208 #define atomic_compare_exchange_weak_ptr atomic_compare_exchange_weak
209 #define atomic_compare_exchange_strong_ptr atomic_compare_exchange_strong
210 #define atomic_exchange_ptr atomic_exchange
211 #define atomic_fetch_add_ptr atomic_fetch_add
212 #define atomic_fetch_sub_ptr atomic_fetch_sub
213 #define atomic_fetch_and_ptr atomic_fetch_and
214 #define atomic_fetch_or_ptr atomic_fetch_or
215 #define atomic_fetch_xor_ptr atomic_fetch_xor
216 
atomic_store_u64(unsigned long long * object,unsigned long long desired)217 static inline void atomic_store_u64(unsigned long long* object, unsigned long long desired) {
218     do {
219         *(object) = (desired);
220         MemoryBarrier();
221     } while (0);
222 }
223 
atomic_load_u64(unsigned long long * object)224 static inline unsigned long long atomic_load_u64(unsigned long long* object) {
225     return (MemoryBarrier(), *(object));
226 }
227 
228 #define atomic_exchange_u64(object, desired) \
229     InterlockedExchange64(object, desired)
230 
atomic_compare_exchange_strong_u64(unsigned long long * object,unsigned long long * expected,unsigned long long desired)231 static inline int atomic_compare_exchange_strong_u64(unsigned long long* object, unsigned long long* expected,
232                                                  unsigned long long desired)
233 {
234 	unsigned long long old = *expected;
235     *expected = InterlockedCompareExchange64(object, desired, old);
236     return *expected == old;
237 }
238 
239 #define atomic_compare_exchange_weak_u64(object, expected, desired) \
240     atomic_compare_exchange_strong_u64(object, expected, desired)
241 
242 #define atomic_fetch_add_u64(object, operand) \
243     InterlockedExchangeAdd64(object, operand)
244 
245 #define atomic_fetch_sub_u64(object, operand) \
246     InterlockedExchangeAdd64(object, -(operand))
247 
248 #define atomic_fetch_or_u64(object, operand) \
249     InterlockedOr64(object, operand)
250 
251 #define atomic_fetch_xor_u64(object, operand) \
252     InterlockedXor64(object, operand)
253 
254 #define atomic_fetch_and_u64(object, operand) \
255     InterlockedAnd64(object, operand)
256 
257 
258 
atomic_store_u32(unsigned * object,unsigned desired)259 static inline void atomic_store_u32(unsigned* object, unsigned desired) {
260     do {
261         *(object) = (desired);
262         MemoryBarrier();
263     } while (0);
264 }
265 
atomic_load_u32(unsigned * object)266 static inline unsigned atomic_load_u32(unsigned* object) {
267     return (MemoryBarrier(), *(object));
268 }
269 
270 #define atomic_exchange_u32(object, desired) \
271     InterlockedExchange(object, desired)
272 
atomic_compare_exchange_strong_u32(unsigned * object,unsigned * expected,unsigned desired)273 static inline int atomic_compare_exchange_strong_u32(unsigned* object, unsigned* expected,
274                                                  unsigned desired)
275 {
276 	unsigned old = *expected;
277     *expected = InterlockedCompareExchange(object, desired, old);
278     return *expected == old;
279 }
280 
281 #define atomic_compare_exchange_weak_u32(object, expected, desired) \
282     atomic_compare_exchange_strong_u32(object, expected, desired)
283 
284 #define atomic_fetch_add_u32(object, operand) \
285     InterlockedExchangeAdd(object, operand)
286 
287 #define atomic_fetch_sub_u32(object, operand) \
288     InterlockedExchangeAdd(object, -(operand))
289 
290 #define atomic_fetch_or_u32(object, operand) \
291     InterlockedOr(object, operand)
292 
293 #define atomic_fetch_xor_u32(object, operand) \
294     InterlockedXor(object, operand)
295 
296 #define atomic_fetch_and_u32(object, operand) \
297     InterlockedAnd(object, operand)
298 
299 
300 
atomic_store_u16(unsigned short * object,unsigned short desired)301 static inline void atomic_store_u16(unsigned short* object, unsigned short desired) {
302     do {
303         *(object) = (desired);
304         MemoryBarrier();
305     } while (0);
306 }
307 
atomic_load_u16(unsigned short * object)308 static inline unsigned short atomic_load_u16(unsigned short* object) {
309     return (MemoryBarrier(), *(object));
310 }
311 
312 #define atomic_exchange_u16(object, desired) \
313     InterlockedExchange16(object, desired)
314 
atomic_compare_exchange_strong_u16(unsigned short * object,unsigned short * expected,unsigned short desired)315 static inline int atomic_compare_exchange_strong_u16(unsigned short* object, unsigned short* expected,
316                                                  unsigned short desired)
317 {
318 	unsigned short old = *expected;
319     *expected = InterlockedCompareExchange16(object, desired, old);
320     return *expected == old;
321 }
322 
323 #define atomic_compare_exchange_weak_u16(object, expected, desired) \
324     atomic_compare_exchange_strong_u16(object, expected, desired)
325 
326 #define atomic_fetch_add_u16(object, operand) \
327     InterlockedExchangeAdd16(object, operand)
328 
329 #define atomic_fetch_sub_u16(object, operand) \
330     InterlockedExchangeAdd16(object, -(operand))
331 
332 #define atomic_fetch_or_u16(object, operand) \
333     InterlockedOr16(object, operand)
334 
335 #define atomic_fetch_xor_u16(object, operand) \
336     InterlockedXor16(object, operand)
337 
338 #define atomic_fetch_and_u16(object, operand) \
339     InterlockedAnd16(object, operand)
340 
341 
342 
343 #define atomic_fetch_add_explicit(object, operand, order) \
344     atomic_fetch_add(object, operand)
345 
346 #define atomic_fetch_sub_explicit(object, operand, order) \
347     atomic_fetch_sub(object, operand)
348 
349 #define atomic_fetch_or_explicit(object, operand, order) \
350     atomic_fetch_or(object, operand)
351 
352 #define atomic_fetch_xor_explicit(object, operand, order) \
353     atomic_fetch_xor(object, operand)
354 
355 #define atomic_fetch_and_explicit(object, operand, order) \
356     atomic_fetch_and(object, operand)
357 
358 #define atomic_flag_test_and_set(object) \
359     atomic_exchange(object, 1)
360 
361 #define atomic_flag_test_and_set_explicit(object, order) \
362     atomic_flag_test_and_set(object)
363 
364 #define atomic_flag_clear(object) \
365     atomic_store(object, 0)
366 
367 #define atomic_flag_clear_explicit(object, order) \
368     atomic_flag_clear(object)
369 
370 #endif /* COMPAT_ATOMICS_WIN32_STDATOMIC_H */
371