1 /*=========================================================================
2 
3   Program:   Visualization Toolkit
4   Module:    vtkAtomic.cxx
5 
6   Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
7   All rights reserved.
8   See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
9 
10      This software is distributed WITHOUT ANY WARRANTY; without even
11      the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
12      PURPOSE.  See the above copyright notice for more information.
13 
14 =========================================================================*/
15 
16 #include "vtkAtomic.h"
17 
18 
19 #if !defined(VTK_GCC_ATOMICS_32) && !defined(VTK_APPLE_ATOMICS_32) &&\
20     !defined(VTK_WINDOWS_ATOMICS_32)
21 # define VTK_LOCK_BASED_ATOMICS_32
22 #endif
23 
24 #if !defined(VTK_GCC_ATOMICS_64) && !defined(VTK_APPLE_ATOMICS_64) &&\
25     !defined(VTK_WINDOWS_ATOMICS_64)
26 # define VTK_LOCK_BASED_ATOMICS_64
27 #endif
28 
29 
30 #if defined(VTK_WINDOWS_ATOMICS_32) || defined(VTK_WINDOWS_ATOMICS_64)
31 # include "vtkWindows.h"
32 #endif
33 
34 #if defined(VTK_LOCK_BASED_ATOMICS_32) || defined(VTK_LOCK_BASED_ATOMICS_64)
35 
36 #include "vtkSimpleCriticalSection.h"
37 
38 class CriticalSectionGuard
39     {
40 public:
CriticalSectionGuard(vtkSimpleCriticalSection & cs)41   CriticalSectionGuard(vtkSimpleCriticalSection &cs) : CriticalSection(cs)
42   {
43     this->CriticalSection.Lock();
44   }
45 
~CriticalSectionGuard()46   ~CriticalSectionGuard()
47   {
48     this->CriticalSection.Unlock();
49   }
50 
51 private:
52   // not copyable
53   CriticalSectionGuard(const CriticalSectionGuard&);
54   void operator=(const CriticalSectionGuard&);
55 
56   vtkSimpleCriticalSection &CriticalSection;
57     };
58 
59 #if defined(VTK_LOCK_BASED_ATOMICS_64)
atomic_type(vtkTypeInt64 init)60 detail::AtomicOps<8>::atomic_type::atomic_type(vtkTypeInt64 init)
61   : var(init)
62 {
63   this->csec = new vtkSimpleCriticalSection;
64 }
65 
~atomic_type()66 detail::AtomicOps<8>::atomic_type::~atomic_type()
67 {
68   delete this->csec;
69 }
70 #endif
71 
72 #if defined(VTK_LOCK_BASED_ATOMICS_32)
atomic_type(vtkTypeInt32 init)73 detail::AtomicOps<4>::atomic_type::atomic_type(vtkTypeInt32 init)
74   : var(init)
75 {
76   this->csec = new vtkSimpleCriticalSection;
77 }
78 
~atomic_type()79 detail::AtomicOps<4>::atomic_type::~atomic_type()
80 {
81   delete this->csec;
82 }
83 #endif
84 
85 #endif // VTK_LOCK_BASED_ATOMICS
86 
87 
88 namespace detail
89 {
90 
91 #if defined(VTK_WINDOWS_ATOMICS_64) || defined(VTK_LOCK_BASED_ATOMICS_64)
92 
AddAndFetch(atomic_type * ref,vtkTypeInt64 val)93 vtkTypeInt64 AtomicOps<8>::AddAndFetch(atomic_type *ref, vtkTypeInt64 val)
94 {
95 #if defined(VTK_WINDOWS_ATOMICS_64)
96 # if defined(VTK_HAS_INTERLOCKEDADD)
97   return InterlockedAdd64(ref, val);
98 # else
99   return InterlockedExchangeAdd64(ref, val) + val;
100 # endif
101 #else
102   CriticalSectionGuard csg(*ref->csec);
103   return ref->var += val;
104 #endif
105 }
106 
SubAndFetch(atomic_type * ref,vtkTypeInt64 val)107 vtkTypeInt64 AtomicOps<8>::SubAndFetch(atomic_type *ref, vtkTypeInt64 val)
108 {
109 #if defined(VTK_WINDOWS_ATOMICS_64)
110 # if defined(VTK_HAS_INTERLOCKEDADD)
111   return InterlockedAdd64(ref, -val);
112 # else
113   return InterlockedExchangeAdd64(ref, -val) - val;
114 # endif
115 #else
116   CriticalSectionGuard csg(*ref->csec);
117   return ref->var -= val;
118 #endif
119 }
120 
PreIncrement(atomic_type * ref)121 vtkTypeInt64 AtomicOps<8>::PreIncrement(atomic_type *ref)
122 {
123 #if defined(VTK_WINDOWS_ATOMICS_64)
124   return InterlockedIncrement64(ref);
125 #else
126   CriticalSectionGuard csg(*ref->csec);
127   return ++(ref->var);
128 #endif
129 }
130 
PreDecrement(atomic_type * ref)131 vtkTypeInt64 AtomicOps<8>::PreDecrement(atomic_type *ref)
132 {
133 #if defined(VTK_WINDOWS_ATOMICS_64)
134   return InterlockedDecrement64(ref);
135 #else
136   CriticalSectionGuard csg(*ref->csec);
137   return --(ref->var);
138 #endif
139 }
140 
PostIncrement(atomic_type * ref)141 vtkTypeInt64 AtomicOps<8>::PostIncrement(atomic_type *ref)
142 {
143 #if defined(VTK_WINDOWS_ATOMICS_64)
144   vtkTypeInt64 val = InterlockedIncrement64(ref);
145   return --val;
146 #else
147   CriticalSectionGuard csg(*ref->csec);
148   return (ref->var)++;
149 #endif
150 }
151 
PostDecrement(atomic_type * ref)152 vtkTypeInt64 AtomicOps<8>::PostDecrement(atomic_type *ref)
153 {
154 #if defined(VTK_WINDOWS_ATOMICS_64)
155   vtkTypeInt64 val = InterlockedDecrement64(ref);
156   return ++val;
157 #else
158   CriticalSectionGuard csg(*ref->csec);
159   return (ref->var)--;
160 #endif
161 }
162 
Load(const atomic_type * ref)163 vtkTypeInt64 AtomicOps<8>::Load(const atomic_type *ref)
164 {
165 #if defined(VTK_WINDOWS_ATOMICS_64)
166   vtkTypeInt64 val;
167   InterlockedExchange64(&val, *ref);
168   return val;
169 #else
170   CriticalSectionGuard csg(*ref->csec);
171   return ref->var;
172 #endif
173 }
174 
Store(atomic_type * ref,vtkTypeInt64 val)175 void AtomicOps<8>::Store(atomic_type *ref, vtkTypeInt64 val)
176 {
177 #if defined(VTK_WINDOWS_ATOMICS_64)
178   InterlockedExchange64(ref, val);
179 #else
180   CriticalSectionGuard csg(*ref->csec);
181   ref->var = val;
182 #endif
183 }
184 
185 #endif // defined(VTK_WINDOWS_ATOMICS_64) || defined(VTK_LOCK_BASED_ATOMICS_64)
186 
187 
188 #if defined(VTK_WINDOWS_ATOMICS_32) || defined(VTK_LOCK_BASED_ATOMICS_32)
189 
AddAndFetch(atomic_type * ref,vtkTypeInt32 val)190 vtkTypeInt32 AtomicOps<4>::AddAndFetch(atomic_type *ref, vtkTypeInt32 val)
191 {
192 #if defined(VTK_WINDOWS_ATOMICS_32)
193 # if defined(VTK_HAS_INTERLOCKEDADD)
194   return InterlockedAdd(reinterpret_cast<long*>(ref), val);
195 # else
196   return InterlockedExchangeAdd(reinterpret_cast<long*>(ref), val) + val;
197 # endif
198 #else
199   CriticalSectionGuard csg(*ref->csec);
200   return ref->var += val;
201 #endif
202 }
203 
SubAndFetch(atomic_type * ref,vtkTypeInt32 val)204 vtkTypeInt32 AtomicOps<4>::SubAndFetch(atomic_type *ref, vtkTypeInt32 val)
205 {
206 #if defined(VTK_WINDOWS_ATOMICS_32)
207 # if defined(VTK_HAS_INTERLOCKEDADD)
208   return InterlockedAdd(reinterpret_cast<long*>(ref), -val);
209 # else
210   return InterlockedExchangeAdd(reinterpret_cast<long*>(ref), -val) - val;
211 # endif
212 #else
213   CriticalSectionGuard csg(*ref->csec);
214   return ref->var -= val;
215 #endif
216 }
217 
PreIncrement(atomic_type * ref)218 vtkTypeInt32 AtomicOps<4>::PreIncrement(atomic_type *ref)
219 {
220 #if defined(VTK_WINDOWS_ATOMICS_32)
221   return InterlockedIncrement(reinterpret_cast<long*>(ref));
222 #else
223   CriticalSectionGuard csg(*ref->csec);
224   return ++(ref->var);
225 #endif
226 }
227 
PreDecrement(atomic_type * ref)228 vtkTypeInt32 AtomicOps<4>::PreDecrement(atomic_type *ref)
229 {
230 #if defined(VTK_WINDOWS_ATOMICS_32)
231   return InterlockedDecrement(reinterpret_cast<long*>(ref));
232 #else
233   CriticalSectionGuard csg(*ref->csec);
234   return --(ref->var);
235 #endif
236 }
237 
PostIncrement(atomic_type * ref)238 vtkTypeInt32 AtomicOps<4>::PostIncrement(atomic_type *ref)
239 {
240 #if defined(VTK_WINDOWS_ATOMICS_32)
241   vtkTypeInt32 val = InterlockedIncrement(reinterpret_cast<long*>(ref));
242   return --val;
243 #else
244   CriticalSectionGuard csg(*ref->csec);
245   return (ref->var)++;
246 #endif
247 }
248 
PostDecrement(atomic_type * ref)249 vtkTypeInt32 AtomicOps<4>::PostDecrement(atomic_type *ref)
250 {
251 #if defined(VTK_WINDOWS_ATOMICS_32)
252   vtkTypeInt32 val = InterlockedDecrement(reinterpret_cast<long*>(ref));
253   return ++val;
254 #else
255   CriticalSectionGuard csg(*ref->csec);
256   return (ref->var)--;
257 #endif
258 }
259 
Load(const atomic_type * ref)260 vtkTypeInt32 AtomicOps<4>::Load(const atomic_type *ref)
261 {
262 #if defined(VTK_WINDOWS_ATOMICS_32)
263   long val;
264   InterlockedExchange(&val, *ref);
265   return val;
266 #else
267   CriticalSectionGuard csg(*ref->csec);
268   return ref->var;
269 #endif
270 }
271 
Store(atomic_type * ref,vtkTypeInt32 val)272 void AtomicOps<4>::Store(atomic_type *ref, vtkTypeInt32 val)
273 {
274 #if defined(VTK_WINDOWS_ATOMICS_32)
275   InterlockedExchange(reinterpret_cast<long*>(ref), val);
276 #else
277   CriticalSectionGuard csg(*ref->csec);
278   ref->var = val;
279 #endif
280 }
281 
282 #endif // defined(VTK_WINDOWS_ATOMICS_32) || defined(VTK_LOCK_BASED_ATOMICS_32)
283 
284 } // namespace detail
285