1 /*
2  * %CopyrightBegin%
3  *
4  * Copyright Ericsson AB 2005-2016. All Rights Reserved.
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  *
18  * %CopyrightEnd%
19  */
20 
21 /*
22  * Native ethread atomics on x86/x86-64.
23  * Author: Mikael Pettersson.
24  *
25  * This code requires a 486 or newer processor.
26  */
27 
28 #undef ETHR_INCLUDE_ATOMIC_IMPL__
29 #if !defined(ETHR_X86_ATOMIC32_H__) \
30     && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__)
31 #  define ETHR_X86_ATOMIC32_H__
32 #  define ETHR_INCLUDE_ATOMIC_IMPL__ 4
33 #  undef ETHR_ATOMIC_WANT_32BIT_IMPL__
34 #elif !defined(ETHR_X86_ATOMIC64_H__) \
35       && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__)
36 #  define ETHR_X86_ATOMIC64_H__
37 #  define ETHR_INCLUDE_ATOMIC_IMPL__ 8
38 #  undef ETHR_ATOMIC_WANT_64BIT_IMPL__
39 #endif
40 
41 #ifdef ETHR_INCLUDE_ATOMIC_IMPL__
42 
43 #  ifndef ETHR_X86_ATOMIC_COMMON__
44 #    define ETHR_X86_ATOMIC_COMMON__
45 #    define ETHR_ATOMIC_HAVE_INC_DEC_INSTRUCTIONS 1
46 #  endif /* ETHR_X86_ATOMIC_COMMON__ */
47 
48 #  if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
49 #    define ETHR_HAVE_NATIVE_ATOMIC32 1
50 #    define ETHR_NATIVE_ATOMIC32_IMPL "ethread"
51 #    define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
52 #    define ETHR_ATMC_T__ ethr_native_atomic32_t
53 #    define ETHR_AINT_T__ ethr_sint32_t
54 #    define ETHR_AINT_SUFFIX__ "l"
55 #  elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
56 #    define ETHR_HAVE_NATIVE_ATOMIC64 1
57 #    define ETHR_NATIVE_ATOMIC64_IMPL "ethread"
58 #    define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
59 #    define ETHR_ATMC_T__ ethr_native_atomic64_t
60 #    define ETHR_AINT_T__ ethr_sint64_t
61 #    define ETHR_AINT_SUFFIX__ "q"
62 #  else
63 #    error "Unsupported integer size"
64 #  endif
65 
66 /* An atomic is an aligned ETHR_AINT_T__ accessed via locked operations.
67  */
68 typedef struct {
69     volatile ETHR_AINT_T__ counter;
70 } ETHR_ATMC_T__;
71 
72 #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
73 
74 #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
75 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADDR 1
76 #else
77 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADDR 1
78 #endif
79 
80 static ETHR_INLINE ETHR_AINT_T__ *
ETHR_NATMC_FUNC__(addr)81 ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
82 {
83     return (ETHR_AINT_T__ *) &var->counter;
84 }
85 
86 #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
87 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_CMPXCHG_MB 1
88 #else
89 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_CMPXCHG_MB 1
90 #endif
91 
92 static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(cmpxchg_mb)93 ETHR_NATMC_FUNC__(cmpxchg_mb)(ETHR_ATMC_T__ *var,
94 			      ETHR_AINT_T__ new_value,
95 			      ETHR_AINT_T__ old_value)
96 {
97     __asm__ __volatile__(
98       "lock; cmpxchg" ETHR_AINT_SUFFIX__ " %2, %3"
99       : "=a"(old_value), "=m"(var->counter)
100       : "r"(new_value), "m"(var->counter), "0"(old_value)
101       : "cc", "memory"); /* full memory clobber to make this a compiler barrier */
102     return old_value;
103 }
104 
105 #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
106 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_XCHG_MB 1
107 #else
108 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_XCHG_MB 1
109 #endif
110 
111 static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(xchg_mb)112 ETHR_NATMC_FUNC__(xchg_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
113 {
114     ETHR_AINT_T__ tmp = val;
115     __asm__ __volatile__(
116 	"xchg" ETHR_AINT_SUFFIX__ " %0, %1"
117 	: "=r"(tmp)
118 	: "m"(var->counter), "0"(tmp)
119 	: "memory");
120     /* now tmp is the atomic's previous value */
121     return tmp;
122 }
123 
124 #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
125 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET 1
126 #else
127 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET 1
128 #endif
129 
130 static ETHR_INLINE void
ETHR_NATMC_FUNC__(set)131 ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
132 {
133     var->counter = i;
134 }
135 
136 #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
137 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_RELB 1
138 #else
139 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_RELB 1
140 #endif
141 
142 static ETHR_INLINE void
ETHR_NATMC_FUNC__(set_relb)143 ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
144 {
145 #if defined(_M_IX86)
146     if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
147 	(void) ETHR_NATMC_FUNC__(xchg_mb)(var, i);
148     else
149 #endif /* _M_IX86 */
150     {
151 	ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
152 	var->counter = i;
153     }
154 }
155 
156 #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
157 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_SET_MB 1
158 #else
159 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_SET_MB 1
160 #endif
161 
162 static ETHR_INLINE void
ETHR_NATMC_FUNC__(set_mb)163 ETHR_NATMC_FUNC__(set_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
164 {
165     (void) ETHR_NATMC_FUNC__(xchg_mb)(var, i);
166 }
167 
168 #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
169 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_READ 1
170 #else
171 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_READ 1
172 #endif
173 
174 static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(read)175 ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
176 {
177     return var->counter;
178 }
179 
180 #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
181 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_MB 1
182 #else
183 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_MB 1
184 #endif
185 
186 static ETHR_INLINE void
ETHR_NATMC_FUNC__(add_mb)187 ETHR_NATMC_FUNC__(add_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
188 {
189     __asm__ __volatile__(
190        "lock; add" ETHR_AINT_SUFFIX__ " %1, %0"
191        : "=m"(var->counter)
192        : "ir"(incr), "m"(var->counter)
193        : "memory");
194 }
195 
196 #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
197 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_INC_MB 1
198 #else
199 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_INC_MB 1
200 #endif
201 
202 static ETHR_INLINE void
ETHR_NATMC_FUNC__(inc_mb)203 ETHR_NATMC_FUNC__(inc_mb)(ETHR_ATMC_T__ *var)
204 {
205     __asm__ __volatile__(
206 	"lock; inc" ETHR_AINT_SUFFIX__ " %0"
207 	: "=m"(var->counter)
208 	: "m"(var->counter)
209 	: "memory");
210 }
211 
212 #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
213 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_DEC_MB 1
214 #else
215 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_DEC_MB 1
216 #endif
217 
218 static ETHR_INLINE void
ETHR_NATMC_FUNC__(dec_mb)219 ETHR_NATMC_FUNC__(dec_mb)(ETHR_ATMC_T__ *var)
220 {
221     __asm__ __volatile__(
222 	"lock; dec" ETHR_AINT_SUFFIX__ " %0"
223 	: "=m"(var->counter)
224 	: "m"(var->counter)
225 	: "memory");
226 }
227 
228 #if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
229 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC32_ADD_RETURN_MB 1
230 #else
231 #  define ETHR_HAVE_ETHR_NATIVE_ATOMIC64_ADD_RETURN_MB 1
232 #endif
233 
234 static ETHR_INLINE ETHR_AINT_T__
ETHR_NATMC_FUNC__(add_return_mb)235 ETHR_NATMC_FUNC__(add_return_mb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
236 {
237     ETHR_AINT_T__ tmp;
238 
239     tmp = incr;
240     __asm__ __volatile__(
241 	"lock; xadd" ETHR_AINT_SUFFIX__ " %0, %1" /* xadd didn't exist prior to the 486 */
242 	: "=r"(tmp)
243 	: "m"(var->counter), "0"(tmp)
244 	: "memory");
245     /* now tmp is the atomic's previous value */
246     return tmp + incr;
247 }
248 
249 #endif /* ETHR_TRY_INLINE_FUNCS */
250 
251 #undef ETHR_NATMC_FUNC__
252 #undef ETHR_ATMC_T__
253 #undef ETHR_AINT_T__
254 #undef ETHR_AINT_SUFFIX__
255 
256 #endif /* ETHR_INCLUDE_ATOMIC_IMPL__ */
257