1 #ifndef ATOMIC_MSC_INCLUDED
2 #define ATOMIC_MSC_INCLUDED
3 
4 /* Copyright (c) 2006, 2014, Oracle and/or its affiliates. All rights reserved.
5 
6    This program is free software; you can redistribute it and/or modify
7    it under the terms of the GNU General Public License as published by
8    the Free Software Foundation; version 2 of the License.
9 
10    This program is distributed in the hope that it will be useful,
11    but WITHOUT ANY WARRANTY; without even the implied warranty of
12    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13    GNU General Public License for more details.
14 
15    You should have received a copy of the GNU General Public License
16    along with this program; if not, write to the Free Software
17    Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1335  USA */
18 
19 #include <windows.h>
20 
my_atomic_cas32(int32 volatile * a,int32 * cmp,int32 set)21 static inline int my_atomic_cas32(int32 volatile *a, int32 *cmp, int32 set)
22 {
23   int32 initial_cmp= *cmp;
24   int32 initial_a= InterlockedCompareExchange((volatile LONG*)a,
25                                               set, initial_cmp);
26   int ret= (initial_a == initial_cmp);
27   if (!ret)
28     *cmp= initial_a;
29   return ret;
30 }
31 
my_atomic_cas64(int64 volatile * a,int64 * cmp,int64 set)32 static inline int my_atomic_cas64(int64 volatile *a, int64 *cmp, int64 set)
33 {
34   int64 initial_cmp= *cmp;
35   int64 initial_a= InterlockedCompareExchange64((volatile LONGLONG*)a,
36                                                 (LONGLONG)set,
37                                                 (LONGLONG)initial_cmp);
38   int ret= (initial_a == initial_cmp);
39   if (!ret)
40     *cmp= initial_a;
41   return ret;
42 }
43 
my_atomic_casptr(void * volatile * a,void ** cmp,void * set)44 static inline int my_atomic_casptr(void * volatile *a, void **cmp, void *set)
45 {
46   void *initial_cmp= *cmp;
47   void *initial_a= InterlockedCompareExchangePointer(a, set, initial_cmp);
48   int ret= (initial_a == initial_cmp);
49   if (!ret)
50     *cmp= initial_a;
51   return ret;
52 }
53 
my_atomic_add32(int32 volatile * a,int32 v)54 static inline int32 my_atomic_add32(int32 volatile *a, int32 v)
55 {
56   return (int32)InterlockedExchangeAdd((volatile LONG*)a, v);
57 }
58 
my_atomic_add64(int64 volatile * a,int64 v)59 static inline int64 my_atomic_add64(int64 volatile *a, int64 v)
60 {
61   return (int64)InterlockedExchangeAdd64((volatile LONGLONG*)a, (LONGLONG)v);
62 }
63 
64 
65 /*
66   According to MSDN:
67 
68   Simple reads and writes to properly-aligned 32-bit variables are atomic
69   operations.
70   ...
71   Simple reads and writes to properly aligned 64-bit variables are atomic on
72   64-bit Windows. Reads and writes to 64-bit values are not guaranteed to be
73   atomic on 32-bit Windows.
74 
75   https://msdn.microsoft.com/en-us/library/windows/desktop/ms684122(v=vs.85).aspx
76 */
77 
my_atomic_load32(int32 volatile * a)78 static inline int32 my_atomic_load32(int32 volatile *a)
79 {
80   int32 value= *a;
81   MemoryBarrier();
82   return value;
83 }
84 
my_atomic_load64(int64 volatile * a)85 static inline int64 my_atomic_load64(int64 volatile *a)
86 {
87 #ifdef _M_X64
88   int64 value= *a;
89   MemoryBarrier();
90   return value;
91 #else
92   return (int64) InterlockedCompareExchange64((volatile LONGLONG *) a, 0, 0);
93 #endif
94 }
95 
my_atomic_loadptr(void * volatile * a)96 static inline void* my_atomic_loadptr(void * volatile *a)
97 {
98   void *value= *a;
99   MemoryBarrier();
100   return value;
101 }
102 
my_atomic_fas32(int32 volatile * a,int32 v)103 static inline int32 my_atomic_fas32(int32 volatile *a, int32 v)
104 {
105   return (int32)InterlockedExchange((volatile LONG*)a, v);
106 }
107 
my_atomic_fas64(int64 volatile * a,int64 v)108 static inline int64 my_atomic_fas64(int64 volatile *a, int64 v)
109 {
110   return (int64)InterlockedExchange64((volatile LONGLONG*)a, v);
111 }
112 
my_atomic_fasptr(void * volatile * a,void * v)113 static inline void * my_atomic_fasptr(void * volatile *a, void * v)
114 {
115   return InterlockedExchangePointer(a, v);
116 }
117 
my_atomic_store32(int32 volatile * a,int32 v)118 static inline void my_atomic_store32(int32 volatile *a, int32 v)
119 {
120   MemoryBarrier();
121   *a= v;
122 }
123 
my_atomic_store64(int64 volatile * a,int64 v)124 static inline void my_atomic_store64(int64 volatile *a, int64 v)
125 {
126 #ifdef _M_X64
127   MemoryBarrier();
128   *a= v;
129 #else
130   (void) InterlockedExchange64((volatile LONGLONG *) a, v);
131 #endif
132 }
133 
my_atomic_storeptr(void * volatile * a,void * v)134 static inline void my_atomic_storeptr(void * volatile *a, void *v)
135 {
136   MemoryBarrier();
137   *a= v;
138 }
139 
140 #endif /* ATOMIC_MSC_INCLUDED */
141