1 /****************************************************************************
2 **
3 ** Copyright (C) 2016 The Qt Company Ltd.
4 ** Copyright (C) 2016 Intel Corporation.
5 ** Contact: https://www.qt.io/licensing/
6 **
7 ** This file is part of the QtCore module of the Qt Toolkit.
8 **
9 ** $QT_BEGIN_LICENSE:LGPL$
10 ** Commercial License Usage
11 ** Licensees holding valid commercial Qt licenses may use this file in
12 ** accordance with the commercial license agreement provided with the
13 ** Software or, alternatively, in accordance with the terms contained in
14 ** a written agreement between you and The Qt Company. For licensing terms
15 ** and conditions see https://www.qt.io/terms-conditions. For further
16 ** information use the contact form at https://www.qt.io/contact-us.
17 **
18 ** GNU Lesser General Public License Usage
19 ** Alternatively, this file may be used under the terms of the GNU Lesser
20 ** General Public License version 3 as published by the Free Software
21 ** Foundation and appearing in the file LICENSE.LGPL3 included in the
22 ** packaging of this file. Please review the following information to
23 ** ensure the GNU Lesser General Public License version 3 requirements
24 ** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
25 **
26 ** GNU General Public License Usage
27 ** Alternatively, this file may be used under the terms of the GNU
28 ** General Public License version 2.0 or (at your option) the GNU General
29 ** Public license version 3 or any later version approved by the KDE Free
30 ** Qt Foundation. The licenses are as published by the Free Software
31 ** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
32 ** included in the packaging of this file. Please review the following
33 ** information to ensure the GNU General Public License requirements will
34 ** be met: https://www.gnu.org/licenses/gpl-2.0.html and
35 ** https://www.gnu.org/licenses/gpl-3.0.html.
36 **
37 ** $QT_END_LICENSE$
38 **
39 ****************************************************************************/
40 
41 #ifndef QATOMIC_MSVC_H
42 #define QATOMIC_MSVC_H
43 
44 #include <QtCore/qgenericatomic.h>
45 
46 ////////////////////////////////////////////////////////////////////////////////////////////////////
47 
48 // use compiler intrinsics for all atomic functions
49 # define QT_INTERLOCKED_PREFIX _
50 # define QT_INTERLOCKED_PROTOTYPE
51 # define QT_INTERLOCKED_DECLARE_PROTOTYPES
52 # define QT_INTERLOCKED_INTRINSIC
53 # define Q_ATOMIC_INT16_IS_SUPPORTED
54 
55 # ifdef _WIN64
56 #  define Q_ATOMIC_INT64_IS_SUPPORTED
57 # endif
58 
59 ////////////////////////////////////////////////////////////////////////////////////////////////////
60 // Prototype declaration
61 
62 #define QT_INTERLOCKED_CONCAT_I(prefix, suffix) \
63     prefix ## suffix
64 #define QT_INTERLOCKED_CONCAT(prefix, suffix) \
65     QT_INTERLOCKED_CONCAT_I(prefix, suffix)
66 
67 // MSVC intrinsics prefix function names with an underscore. Also, if platform
68 // SDK headers have been included, the Interlocked names may be defined as
69 // macros.
70 // To avoid double underscores, we paste the prefix with Interlocked first and
71 // then the remainder of the function name.
72 #define QT_INTERLOCKED_FUNCTION(name) \
73     QT_INTERLOCKED_CONCAT( \
74         QT_INTERLOCKED_CONCAT(QT_INTERLOCKED_PREFIX, Interlocked), name)
75 
76 #ifndef QT_INTERLOCKED_VOLATILE
77 # define QT_INTERLOCKED_VOLATILE volatile
78 #endif
79 
80 #ifndef QT_INTERLOCKED_PREFIX
81 #define QT_INTERLOCKED_PREFIX
82 #endif
83 
84 #ifndef QT_INTERLOCKED_PROTOTYPE
85 #define QT_INTERLOCKED_PROTOTYPE
86 #endif
87 
88 #ifdef QT_INTERLOCKED_DECLARE_PROTOTYPES
89 #undef QT_INTERLOCKED_DECLARE_PROTOTYPES
90 
91 extern "C" {
92 
93     long QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( Increment )(long QT_INTERLOCKED_VOLATILE *);
94     long QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( Decrement )(long QT_INTERLOCKED_VOLATILE *);
95     long QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( CompareExchange )(long QT_INTERLOCKED_VOLATILE *, long, long);
96     long QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( Exchange )(long QT_INTERLOCKED_VOLATILE *, long);
97     long QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( ExchangeAdd )(long QT_INTERLOCKED_VOLATILE *, long);
98 
99 # if !defined(__i386__) && !defined(_M_IX86)
100     void * QT_INTERLOCKED_FUNCTION( CompareExchangePointer )(void * QT_INTERLOCKED_VOLATILE *, void *, void *);
101     void * QT_INTERLOCKED_FUNCTION( ExchangePointer )(void * QT_INTERLOCKED_VOLATILE *, void *);
102     __int64 QT_INTERLOCKED_FUNCTION( ExchangeAdd64 )(__int64 QT_INTERLOCKED_VOLATILE *, __int64);
103 # endif
104 
105 # ifdef Q_ATOMIC_INT16_IS_SUPPORTED
106     short QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( Increment16 )(short QT_INTERLOCKED_VOLATILE *);
107     short QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( Decrement16 )(short QT_INTERLOCKED_VOLATILE *);
108     short QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( CompareExchange16 )(short QT_INTERLOCKED_VOLATILE *, short, short);
109     short QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( Exchange16 )(short QT_INTERLOCKED_VOLATILE *, short);
110     short QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( ExchangeAdd16 )(short QT_INTERLOCKED_VOLATILE *, short);
111 # endif
112 # ifdef Q_ATOMIC_INT64_IS_SUPPORTED
113     __int64 QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( Increment64 )(__int64 QT_INTERLOCKED_VOLATILE *);
114     __int64 QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( Decrement64 )(__int64 QT_INTERLOCKED_VOLATILE *);
115     __int64 QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( CompareExchange64 )(__int64 QT_INTERLOCKED_VOLATILE *, __int64, __int64);
116     __int64 QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( Exchange64 )(__int64 QT_INTERLOCKED_VOLATILE *, __int64);
117     //above already: qint64 QT_INTERLOCKED_PROTOTYPE QT_INTERLOCKED_FUNCTION( ExchangeAdd64 )(qint64 QT_INTERLOCKED_VOLATILE *, qint64);
118 # endif
119 }
120 
121 #endif // QT_INTERLOCKED_DECLARE_PROTOTYPES
122 
123 #undef QT_INTERLOCKED_PROTOTYPE
124 
125 ////////////////////////////////////////////////////////////////////////////////////////////////////
126 
127 #ifdef QT_INTERLOCKED_INTRINSIC
128 #undef QT_INTERLOCKED_INTRINSIC
129 
130 # pragma intrinsic (_InterlockedIncrement)
131 # pragma intrinsic (_InterlockedDecrement)
132 # pragma intrinsic (_InterlockedExchange)
133 # pragma intrinsic (_InterlockedCompareExchange)
134 # pragma intrinsic (_InterlockedExchangeAdd)
135 
136 # if !defined(_M_IX86)
137 #  pragma intrinsic (_InterlockedCompareExchangePointer)
138 #  pragma intrinsic (_InterlockedExchangePointer)
139 #  pragma intrinsic (_InterlockedExchangeAdd64)
140 # endif
141 
142 #endif // QT_INTERLOCKED_INTRINSIC
143 
144 ////////////////////////////////////////////////////////////////////////////////////////////////////
145 // Interlocked* replacement macros
146 
147 #if defined(__i386__) || defined(_M_IX86)
148 
149 # define QT_INTERLOCKED_COMPARE_EXCHANGE_POINTER(value, newValue, expectedValue) \
150     reinterpret_cast<void *>( \
151         QT_INTERLOCKED_FUNCTION(CompareExchange)( \
152                 reinterpret_cast<long QT_INTERLOCKED_VOLATILE *>(value), \
153                 long(newValue), \
154                 long(expectedValue)))
155 
156 # define QT_INTERLOCKED_EXCHANGE_POINTER(value, newValue) \
157     QT_INTERLOCKED_FUNCTION(Exchange)( \
158             reinterpret_cast<long QT_INTERLOCKED_VOLATILE *>(value), \
159             long(newValue))
160 
161 # define QT_INTERLOCKED_EXCHANGE_ADD_POINTER(value, valueToAdd) \
162     QT_INTERLOCKED_FUNCTION(ExchangeAdd)( \
163             reinterpret_cast<long QT_INTERLOCKED_VOLATILE *>(value), \
164             (valueToAdd))
165 
166 #else // !defined(__i386__) && !defined(_M_IX86)
167 
168 # define QT_INTERLOCKED_COMPARE_EXCHANGE_POINTER(value, newValue, expectedValue) \
169     QT_INTERLOCKED_FUNCTION(CompareExchangePointer)( \
170             (void * QT_INTERLOCKED_VOLATILE *)(value), \
171             (void *) (newValue), \
172             (void *) (expectedValue))
173 
174 # define QT_INTERLOCKED_EXCHANGE_POINTER(value, newValue) \
175     QT_INTERLOCKED_FUNCTION(ExchangePointer)( \
176             (void * QT_INTERLOCKED_VOLATILE *)(value), \
177             (void *) (newValue))
178 
179 # define QT_INTERLOCKED_EXCHANGE_ADD_POINTER(value, valueToAdd) \
180     QT_INTERLOCKED_FUNCTION(ExchangeAdd64)( \
181             reinterpret_cast<qint64 QT_INTERLOCKED_VOLATILE *>(value), \
182             (valueToAdd))
183 
184 #endif // !defined(__i386__) && !defined(_M_IX86)
185 
186 ////////////////////////////////////////////////////////////////////////////////////////////////////
187 
188 QT_BEGIN_NAMESPACE
189 
190 #if 0
191 // silence syncqt warnings
192 QT_END_NAMESPACE
193 #pragma qt_sync_skip_header_check
194 #pragma qt_sync_stop_processing
195 #endif
196 
197 #define Q_ATOMIC_INT_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
198 #define Q_ATOMIC_INT_REFERENCE_COUNTING_IS_WAIT_FREE
199 
200 #define Q_ATOMIC_INT_TEST_AND_SET_IS_ALWAYS_NATIVE
201 #define Q_ATOMIC_INT_TEST_AND_SET_IS_WAIT_FREE
202 
203 #define Q_ATOMIC_INT_FETCH_AND_STORE_IS_ALWAYS_NATIVE
204 #define Q_ATOMIC_INT_FETCH_AND_STORE_IS_WAIT_FREE
205 
206 #define Q_ATOMIC_INT_FETCH_AND_ADD_IS_ALWAYS_NATIVE
207 #define Q_ATOMIC_INT_FETCH_AND_ADD_IS_WAIT_FREE
208 
209 #define Q_ATOMIC_INT32_IS_SUPPORTED
210 
211 #define Q_ATOMIC_INT32_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
212 #define Q_ATOMIC_INT32_REFERENCE_COUNTING_IS_WAIT_FREE
213 
214 #define Q_ATOMIC_INT32_TEST_AND_SET_IS_ALWAYS_NATIVE
215 #define Q_ATOMIC_INT32_TEST_AND_SET_IS_WAIT_FREE
216 
217 #define Q_ATOMIC_INT32_FETCH_AND_STORE_IS_ALWAYS_NATIVE
218 #define Q_ATOMIC_INT32_FETCH_AND_STORE_IS_WAIT_FREE
219 
220 #define Q_ATOMIC_INT32_FETCH_AND_ADD_IS_ALWAYS_NATIVE
221 #define Q_ATOMIC_INT32_FETCH_AND_ADD_IS_WAIT_FREE
222 
223 #define Q_ATOMIC_POINTER_TEST_AND_SET_IS_ALWAYS_NATIVE
224 #define Q_ATOMIC_POINTER_TEST_AND_SET_IS_WAIT_FREE
225 
226 #define Q_ATOMIC_POINTER_FETCH_AND_STORE_IS_ALWAYS_NATIVE
227 #define Q_ATOMIC_POINTER_FETCH_AND_STORE_IS_WAIT_FREE
228 
229 #define Q_ATOMIC_POINTER_FETCH_AND_ADD_IS_ALWAYS_NATIVE
230 #define Q_ATOMIC_POINTER_FETCH_AND_ADD_IS_WAIT_FREE
231 
232 #ifdef Q_ATOMIC_INT16_IS_SUPPORTED
233 # define Q_ATOMIC_INT16_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
234 # define Q_ATOMIC_INT16_REFERENCE_COUNTING_IS_WAIT_FREE
235 
236 # define Q_ATOMIC_INT16_TEST_AND_SET_IS_ALWAYS_NATIVE
237 # define Q_ATOMIC_INT16_TEST_AND_SET_IS_WAIT_FREE
238 
239 # define Q_ATOMIC_INT16_FETCH_AND_STORE_IS_ALWAYS_NATIVE
240 # define Q_ATOMIC_INT16_FETCH_AND_STORE_IS_WAIT_FREE
241 
242 # define Q_ATOMIC_INT16_FETCH_AND_ADD_IS_ALWAYS_NATIVE
243 # define Q_ATOMIC_INT16_FETCH_AND_ADD_IS_WAIT_FREE
244 
245 template<> struct QAtomicOpsSupport<2> { enum { IsSupported = 1 }; };
246 #endif
247 
248 #ifdef Q_ATOMIC_INT64_IS_SUPPORTED
249 # define Q_ATOMIC_INT64_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
250 # define Q_ATOMIC_INT64_REFERENCE_COUNTING_IS_WAIT_FREE
251 
252 # define Q_ATOMIC_INT64_TEST_AND_SET_IS_ALWAYS_NATIVE
253 # define Q_ATOMIC_INT64_TEST_AND_SET_IS_WAIT_FREE
254 
255 # define Q_ATOMIC_INT64_FETCH_AND_STORE_IS_ALWAYS_NATIVE
256 # define Q_ATOMIC_INT64_FETCH_AND_STORE_IS_WAIT_FREE
257 
258 # define Q_ATOMIC_INT64_FETCH_AND_ADD_IS_ALWAYS_NATIVE
259 # define Q_ATOMIC_INT64_FETCH_AND_ADD_IS_WAIT_FREE
260 
261 template<> struct QAtomicOpsSupport<8> { enum { IsSupported = 1 }; };
262 #endif
263 
264 ////////////////////////////////////////////////////////////////////////////////////////////////////
265 
266 template <int N> struct QAtomicWindowsType { typedef typename QIntegerForSize<N>::Signed Type; };
267 template <> struct QAtomicWindowsType<4> { typedef long Type; };
268 
269 
270 template <int N> struct QAtomicOpsBySize : QGenericAtomicOps<QAtomicOpsBySize<N> >
271 {
272     static inline Q_DECL_CONSTEXPR bool isReferenceCountingNative() noexcept { return true; }
273     static inline Q_DECL_CONSTEXPR bool isReferenceCountingWaitFree() noexcept { return true; }
274     template <typename T> static bool ref(T &_q_value) noexcept;
275     template <typename T> static bool deref(T &_q_value) noexcept;
276 
277     static inline Q_DECL_CONSTEXPR bool isTestAndSetNative() noexcept { return true; }
278     static inline Q_DECL_CONSTEXPR bool isTestAndSetWaitFree() noexcept { return true; }
279     template <typename T> static bool testAndSetRelaxed(T &_q_value, T expectedValue, T newValue) noexcept;
280     template <typename T>
281     static bool testAndSetRelaxed(T &_q_value, T expectedValue, T newValue, T *currentValue) noexcept;
282 
283     static inline Q_DECL_CONSTEXPR bool isFetchAndStoreNative() noexcept { return true; }
284     static inline Q_DECL_CONSTEXPR bool isFetchAndStoreWaitFree() noexcept { return true; }
285     template <typename T> static T fetchAndStoreRelaxed(T &_q_value, T newValue) noexcept;
286 
287     static inline Q_DECL_CONSTEXPR bool isFetchAndAddNative() noexcept { return true; }
288     static inline Q_DECL_CONSTEXPR bool isFetchAndAddWaitFree() noexcept { return true; }
289     template <typename T> static T fetchAndAddRelaxed(T &_q_value, typename QAtomicAdditiveType<T>::AdditiveT valueToAdd) noexcept;
290 
291 private:
292     typedef typename QAtomicWindowsType<N>::Type Type;
293     template <typename T> static inline Type *atomic(T *t)
294     { Q_STATIC_ASSERT(sizeof(T) == sizeof(Type)); return reinterpret_cast<Type *>(t); }
295     template <typename T> static inline Type value(T t)
296     { Q_STATIC_ASSERT(sizeof(T) == sizeof(Type)); return Type(t); }
297 };
298 
299 template <typename T>
300 struct QAtomicOps : QAtomicOpsBySize<sizeof(T)>
301 {
302     typedef T Type;
303 };
304 
305 template<> template<typename T>
306 inline bool QAtomicOpsBySize<4>::ref(T &_q_value) noexcept
307 {
308     return QT_INTERLOCKED_FUNCTION(Increment)(atomic(&_q_value)) != 0;
309 }
310 
311 template<> template<typename T>
312 inline bool QAtomicOpsBySize<4>::deref(T &_q_value) noexcept
313 {
314     return QT_INTERLOCKED_FUNCTION(Decrement)(atomic(&_q_value)) != 0;
315 }
316 
317 template<> template<typename T>
318 inline bool QAtomicOpsBySize<4>::testAndSetRelaxed(T &_q_value, T expectedValue, T newValue) noexcept
319 {
320     return QT_INTERLOCKED_FUNCTION(CompareExchange)(atomic(&_q_value), value(newValue), value(expectedValue)) == value(expectedValue);
321 }
322 
323 template<> template <typename T>
324 inline bool QAtomicOpsBySize<4>::testAndSetRelaxed(T &_q_value, T expectedValue, T newValue, T *currentValue) noexcept
325 {
326     *currentValue = T(QT_INTERLOCKED_FUNCTION(CompareExchange)(atomic(&_q_value), newValue, expectedValue));
327     return *currentValue == expectedValue;
328 }
329 
330 template<> template<typename T>
331 inline T QAtomicOpsBySize<4>::fetchAndStoreRelaxed(T &_q_value, T newValue) noexcept
332 {
333     return QT_INTERLOCKED_FUNCTION(Exchange)(atomic(&_q_value), value(newValue));
334 }
335 
336 template<> template<typename T>
337 inline T QAtomicOpsBySize<4>::fetchAndAddRelaxed(T &_q_value, typename QAtomicAdditiveType<T>::AdditiveT valueToAdd) noexcept
338 {
339     return QT_INTERLOCKED_FUNCTION(ExchangeAdd)(atomic(&_q_value), value<T>(valueToAdd * QAtomicAdditiveType<T>::AddScale));
340 }
341 
342 #ifdef Q_ATOMIC_INT16_IS_SUPPORTED
343 template<> template<typename T>
344 inline bool QAtomicOpsBySize<2>::ref(T &_q_value) noexcept
345 {
346     return QT_INTERLOCKED_FUNCTION(Increment16)(atomic(&_q_value)) != 0;
347 }
348 
349 template<> template<typename T>
350 inline bool QAtomicOpsBySize<2>::deref(T &_q_value) noexcept
351 {
352     return QT_INTERLOCKED_FUNCTION(Decrement16)(atomic(&_q_value)) != 0;
353 }
354 
355 template<> template<typename T>
356 inline bool QAtomicOpsBySize<2>::testAndSetRelaxed(T &_q_value, T expectedValue, T newValue) noexcept
357 {
358     return QT_INTERLOCKED_FUNCTION(CompareExchange16)(atomic(&_q_value), value(newValue), value(expectedValue)) == value(expectedValue);
359 }
360 
361 template<> template <typename T>
362 inline bool QAtomicOpsBySize<2>::testAndSetRelaxed(T &_q_value, T expectedValue, T newValue, T *currentValue) noexcept
363 {
364     *currentValue = T(QT_INTERLOCKED_FUNCTION(CompareExchange16)(atomic(&_q_value), newValue, expectedValue));
365     return *currentValue == expectedValue;
366 }
367 
368 template<> template<typename T>
369 inline T QAtomicOpsBySize<2>::fetchAndStoreRelaxed(T &_q_value, T newValue) noexcept
370 {
371     return QT_INTERLOCKED_FUNCTION(Exchange16)(atomic(&_q_value), value(newValue));
372 }
373 
374 template<> template<typename T>
375 inline T QAtomicOpsBySize<2>::fetchAndAddRelaxed(T &_q_value, typename QAtomicAdditiveType<T>::AdditiveT valueToAdd) noexcept
376 {
377     return QT_INTERLOCKED_FUNCTION(ExchangeAdd16)(atomic(&_q_value), value<T>(valueToAdd * QAtomicAdditiveType<T>::AddScale));
378 }
379 #endif
380 
381 #ifdef Q_ATOMIC_INT64_IS_SUPPORTED
382 template<> template<typename T>
383 inline bool QAtomicOpsBySize<8>::ref(T &_q_value) noexcept
384 {
385     return QT_INTERLOCKED_FUNCTION(Increment64)(atomic(&_q_value)) != 0;
386 }
387 
388 template<> template<typename T>
389 inline bool QAtomicOpsBySize<8>::deref(T &_q_value) noexcept
390 {
391     return QT_INTERLOCKED_FUNCTION(Decrement64)(atomic(&_q_value)) != 0;
392 }
393 
394 template<> template<typename T>
395 inline bool QAtomicOpsBySize<8>::testAndSetRelaxed(T &_q_value, T expectedValue, T newValue) noexcept
396 {
397     return QT_INTERLOCKED_FUNCTION(CompareExchange64)(atomic(&_q_value), value(newValue), value(expectedValue)) == value(expectedValue);
398 }
399 
400 template<> template <typename T>
401 inline bool QAtomicOpsBySize<8>::testAndSetRelaxed(T &_q_value, T expectedValue, T newValue, T *currentValue) noexcept
402 {
403     *currentValue = T(QT_INTERLOCKED_FUNCTION(CompareExchange64)(atomic(&_q_value), newValue, expectedValue));
404     return *currentValue == expectedValue;
405 }
406 
407 template<> template<typename T>
408 inline T QAtomicOpsBySize<8>::fetchAndStoreRelaxed(T &_q_value, T newValue) noexcept
409 {
410     return QT_INTERLOCKED_FUNCTION(Exchange64)(atomic(&_q_value), value(newValue));
411 }
412 
413 template<> template<typename T>
414 inline T QAtomicOpsBySize<8>::fetchAndAddRelaxed(T &_q_value, typename QAtomicAdditiveType<T>::AdditiveT valueToAdd) noexcept
415 {
416     return QT_INTERLOCKED_FUNCTION(ExchangeAdd64)(atomic(&_q_value), value<T>(valueToAdd * QAtomicAdditiveType<T>::AddScale));
417 }
418 #endif
419 
420 // Specialization for pointer types, since we have Interlocked*Pointer() variants in some configurations
421 template <typename T>
422 struct QAtomicOps<T *> : QGenericAtomicOps<QAtomicOps<T *> >
423 {
424     typedef T *Type;
425 
426     static inline Q_DECL_CONSTEXPR bool isTestAndSetNative() noexcept { return true; }
427     static inline Q_DECL_CONSTEXPR bool isTestAndSetWaitFree() noexcept { return true; }
428     static bool testAndSetRelaxed(T *&_q_value, T *expectedValue, T *newValue) noexcept;
429     static bool testAndSetRelaxed(T *&_q_value, T *expectedValue, T *newValue, T **currentValue) noexcept;
430 
431     static inline Q_DECL_CONSTEXPR bool isFetchAndStoreNative() noexcept { return true; }
432     static inline Q_DECL_CONSTEXPR bool isFetchAndStoreWaitFree() noexcept { return true; }
433     static T *fetchAndStoreRelaxed(T *&_q_value, T *newValue) noexcept;
434 
435     static inline Q_DECL_CONSTEXPR bool isFetchAndAddNative() noexcept { return true; }
436     static inline Q_DECL_CONSTEXPR bool isFetchAndAddWaitFree() noexcept { return true; }
437     static T *fetchAndAddRelaxed(T *&_q_value, qptrdiff valueToAdd) noexcept;
438 };
439 
440 template <typename T>
441 inline bool QAtomicOps<T *>::testAndSetRelaxed(T *&_q_value, T *expectedValue, T *newValue) noexcept
442 {
443     return QT_INTERLOCKED_COMPARE_EXCHANGE_POINTER(&_q_value, newValue, expectedValue) == expectedValue;
444 }
445 
446 template <typename T>
447 inline bool QAtomicOps<T *>::testAndSetRelaxed(T *&_q_value, T *expectedValue, T *newValue, T **currentValue) noexcept
448 {
449     *currentValue = reinterpret_cast<T *>(QT_INTERLOCKED_COMPARE_EXCHANGE_POINTER(&_q_value, newValue, expectedValue));
450     return *currentValue == expectedValue;
451 }
452 
453 template <typename T>
454 inline T *QAtomicOps<T *>::fetchAndStoreRelaxed(T *&_q_value, T *newValue) noexcept
455 {
456     return reinterpret_cast<T *>(QT_INTERLOCKED_EXCHANGE_POINTER(&_q_value, newValue));
457 }
458 
459 template <typename T>
460 inline T *QAtomicOps<T *>::fetchAndAddRelaxed(T *&_q_value, qptrdiff valueToAdd) noexcept
461 {
462     return reinterpret_cast<T *>(QT_INTERLOCKED_EXCHANGE_ADD_POINTER(&_q_value, valueToAdd * sizeof(T)));
463 }
464 
465 ////////////////////////////////////////////////////////////////////////////////////////////////////
466 // Cleanup
467 
468 #undef QT_INTERLOCKED_CONCAT_I
469 #undef QT_INTERLOCKED_CONCAT
470 #undef QT_INTERLOCKED_FUNCTION
471 #undef QT_INTERLOCKED_PREFIX
472 
473 #undef QT_INTERLOCKED_VOLATILE
474 
475 #undef QT_INTERLOCKED_INCREMENT
476 #undef QT_INTERLOCKED_DECREMENT
477 #undef QT_INTERLOCKED_COMPARE_EXCHANGE
478 #undef QT_INTERLOCKED_EXCHANGE
479 #undef QT_INTERLOCKED_EXCHANGE_ADD
480 #undef QT_INTERLOCKED_COMPARE_EXCHANGE_POINTER
481 #undef QT_INTERLOCKED_EXCHANGE_POINTER
482 #undef QT_INTERLOCKED_EXCHANGE_ADD_POINTER
483 
484 QT_END_NAMESPACE
485 #endif // QATOMIC_MSVC_H
486