1 /* Copyright (c) 2008, 2021, Oracle and/or its affiliates.
2 
3    This program is free software; you can redistribute it and/or modify
4    it under the terms of the GNU General Public License, version 2.0,
5    as published by the Free Software Foundation.
6 
7    This program is also distributed with certain software (including
8    but not limited to OpenSSL) that is licensed under separate terms,
9    as designated in a particular file or component or in included license
10    documentation.  The authors of MySQL hereby grant you an additional
11    permission to link the program and your derivative works with the
12    separately licensed software that they have included with MySQL.
13 
14    This program is distributed in the hope that it will be useful,
15    but WITHOUT ANY WARRANTY; without even the implied warranty of
16    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17    GNU General Public License, version 2.0, for more details.
18 
19    You should have received a copy of the GNU General Public License
20    along with this program; if not, write to the Free Software
21    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA */
22 
23 /**
24  * Only memory barriers *must* be ported
25  * if XCNG (x86-sematics) is provided, spinlocks will be enabled
26  */
27 #ifndef NDB_MT_ASM_H
28 #define NDB_MT_ASM_H
29 
30 /**
31  * Remove comment on NDB_USE_SPINLOCK if it is desired to use spinlocks
32  * instead of the normal mutex calls. This will not work when configuring
33  * with realtime and is thus disabled by default, but can be activated for
34  * special builds.
35  */
36 //#define NDB_USE_SPINLOCK
37 
38 #if defined(__GNUC__)
39 /********************
40  * GCC
41  *******************/
42 #if defined(__x86_64__) || defined (__i386__) /* 64 or 32 bit x86 */
43 
44 #define NDB_HAVE_MB
45 #define NDB_HAVE_RMB
46 #define NDB_HAVE_WMB
47 #define NDB_HAVE_READ_BARRIER_DEPENDS
48 #define NDB_HAVE_XCNG
49 #define NDB_HAVE_CPU_PAUSE
50 
51 /* Memory barriers, these definitions are for x64_64. */
52 #define mb()    asm volatile("mfence":::"memory")
53 /* According to Intel docs, it does not reorder loads. */
54 /* #define rmb() asm volatile("lfence":::"memory") */
55 #define rmb()   asm volatile("" ::: "memory")
56 #define wmb()   asm volatile("" ::: "memory")
57 #define read_barrier_depends()  do {} while(0)
58 
59 static
60 inline
61 int
xcng(volatile unsigned * addr,int val)62 xcng(volatile unsigned * addr, int val)
63 {
64   asm volatile ("xchg %0, %1;" : "+r" (val) , "+m" (*addr));
65   return val;
66 }
67 
68 static
69 inline
70 void
cpu_pause()71 cpu_pause()
72 {
73   asm volatile ("rep;nop");
74 }
75 
76 #elif defined(__sparc__)
77 
78 #define NDB_HAVE_MB
79 #define NDB_HAVE_RMB
80 #define NDB_HAVE_WMB
81 #define NDB_HAVE_READ_BARRIER_DEPENDS
82 
83 #define mb()    asm volatile("membar #LoadLoad | #LoadStore | #StoreLoad | #StoreStore":::"memory")
84 #define rmb()   asm volatile("membar #LoadLoad" ::: "memory")
85 #define wmb()   asm volatile("membar #StoreStore" ::: "memory")
86 #define read_barrier_depends()  do {} while(0)
87 
88 #ifdef HAVE_ATOMIC_H
89 #include <atomic.h>
90 #endif
91 
92 #ifdef HAVE_ATOMIC_SWAP_32
93 static inline
94 int
xcng(volatile unsigned * addr,int val)95 xcng(volatile unsigned * addr, int val)
96 {
97   asm volatile("membar #StoreLoad | #LoadLoad");
98   int ret = atomic_swap_32(addr, val);
99   asm volatile("membar #StoreLoad | #StoreStore");
100   return ret;
101 }
102 #define cpu_pause()
103 #define NDB_HAVE_XCNG
104 #define NDB_HAVE_CPU_PAUSE
105 #else
106 /* link error if used incorrectly (i.e wo/ having NDB_HAVE_XCNG) */
107 extern  int xcng(volatile unsigned * addr, int val);
108 extern void cpu_pause();
109 #endif
110 
111 #elif defined(__powerpc__)
112 #define NDB_HAVE_MB
113 #define NDB_HAVE_RMB
114 #define NDB_HAVE_WMB
115 #define NDB_HAVE_READ_BARRIER_DEPENDS
116 #define NDB_HAVE_XCNG
117 
118 #define mb() asm volatile("lwsync;" ::: "memory")
119 #define rmb() asm volatile("lwsync;" ::: "memory")
120 #define wmb() asm volatile("lwsync;" ::: "memory")
121 #define read_barrier_depends() do {} while(0)
122 
123 static
124 inline
125 int
xcng(volatile unsigned * addr,int val)126 xcng(volatile unsigned * addr, int val)
127 {
128   int prev;
129 
130   asm volatile ( "lwsync;\n"
131 		 "1: lwarx   %0,0,%2;"
132 		 "   stwcx.  %3,0,%2;"
133 		 "   bne-    1b;"
134 		 "isync;"
135 		 : "=&r" (prev), "+m" (*(volatile unsigned int *)addr)
136 		 : "r" (addr), "r" (val)
137 		 : "cc", "memory");
138 
139   return prev;
140 }
141 
142 #else
143 #define NDB_NO_ASM "Unsupported architecture (gcc)"
144 #endif
145 
146 #elif defined(__sun)
147 /********************
148  * SUN STUDIO
149  *******************/
150 
151 /**
152  * TODO check that asm ("") implies a compiler barrier
153  *      i.e that it clobbers memory
154  */
155 #if defined(__x86_64) || defined (__i386) /* 64 or 32 bit x86 */
156 #define NDB_HAVE_MB
157 #define NDB_HAVE_RMB
158 #define NDB_HAVE_WMB
159 #define NDB_HAVE_READ_BARRIER_DEPENDS
160 
161 #define mb()    asm ("mfence")
162 /* According to Intel docs, it does not reorder loads. */
163 /* #define rmb() asm ("lfence") */
164 #define rmb()   asm ("")
165 #define wmb()   asm ("")
166 #define read_barrier_depends()  do {} while(0)
167 
168 #elif defined(__sparc)
169 #define NDB_HAVE_MB
170 #define NDB_HAVE_RMB
171 #define NDB_HAVE_WMB
172 #define NDB_HAVE_READ_BARRIER_DEPENDS
173 
174 #define mb() asm ("membar #LoadLoad | #LoadStore | #StoreLoad | #StoreStore")
175 #define rmb() asm ("membar #LoadLoad")
176 #define wmb() asm ("membar #StoreStore")
177 #define read_barrier_depends()  do {} while(0)
178 #else
179 #define NDB_NO_ASM "Unsupported architecture (sun studio)"
180 #error "Unsupported architecture (sun studio)"
181 #endif
182 
183 #if defined(__x86_64) || defined (__i386) || defined(__sparc)
184 /**
185  * we should probably use assembler for x86 aswell...
186  *   but i'm not really sure how you do this in sun-studio :-(
187  */
188 #ifdef HAVE_ATOMIC_H
189 #include <atomic.h>
190 #endif
191 
192 #ifdef HAVE_ATOMIC_SWAP_32
193 #define NDB_HAVE_XCNG
194 #define NDB_HAVE_CPU_PAUSE
195 #if defined(__sparc)
196 static inline
197 int
xcng(volatile unsigned * addr,int val)198 xcng(volatile unsigned * addr, int val)
199 {
200   asm ("membar #StoreLoad | #LoadLoad");
201   int ret = atomic_swap_32(addr, val);
202   asm ("membar #StoreLoad | #StoreStore");
203   return ret;
204 }
205 #define cpu_pause()
206 #elif defined(__x86_64) || defined (__i386)
207 static inline
208 int
xcng(volatile unsigned * addr,int val)209 xcng(volatile unsigned * addr, int val)
210 {
211   /**
212    * TODO check that atomic_swap_32 on x86-64 with sun-studio implies
213    *  proper barriers
214    */
215   int ret = atomic_swap_32(addr, val);
216   return ret;
217 }
218 static
219 inline
220 void
cpu_pause()221 cpu_pause()
222 {
223   asm volatile ("rep;nop");
224 }
225 #endif
226 #else
227 /* link error if used incorrectly (i.e wo/ having NDB_HAVE_XCNG) */
228 extern  int xcng(volatile unsigned * addr, int val);
229 extern void cpu_pause();
230 #endif
231 #endif
232 #elif defined (_MSC_VER)
233 
234 #define NDB_HAVE_MB
235 #define NDB_HAVE_RMB
236 #define NDB_HAVE_WMB
237 #define NDB_HAVE_READ_BARRIER_DEPENDS
238 
239 #include <windows.h>
240 #define mb()    MemoryBarrier()
241 #define read_barrier_depends()  do {} while(0)
242 #ifdef _DEBUG
243 #define rmb()   do {} while(0)
244 #define wmb()   do {} while(0)
245 #else
246 #include <intrin.h>
247 /********************
248  * Microsoft
249  *******************/
250 /* Using instrinsics available on all architectures */
251 #define rmb()   _ReadBarrier()
252 #define wmb()   _WriteBarrier()
253 #endif
254 
255 #define NDB_HAVE_XCNG
256 #define NDB_HAVE_CPU_PAUSE
257 
258 static inline
259 int
xcng(volatile unsigned * addr,int val)260 xcng(volatile unsigned * addr, int val)
261 {
262   return InterlockedExchange((volatile LONG*)addr, val);
263 }
264 
265 static
266 inline
267 void
cpu_pause()268 cpu_pause()
269 {
270   YieldProcessor();
271 }
272 #else
273 #define NDB_NO_ASM "Unsupported compiler"
274 #endif
275 
276 #endif
277