1 /*
2  * %CopyrightBegin%
3  *
4  * Copyright Ericsson AB 2011-2020. All Rights Reserved.
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  *
18  * %CopyrightEnd%
19  */
20 
21 /*
22  * Description: Native double word atomics using gcc's __atomic
23  *              and __sync builtins
24  * Author: Rickard Green
25  *
26  * Note: The C11 memory model implemented by gcc's __atomic
27  *       builtins does not match the ethread API very well.
28  *
29  *       Due to this we cannot use the __ATOMIC_SEQ_CST
30  *       memory model. For more information see the comment
31  *       in the beginning of ethr_membar.h in this directory.
32  */
33 
34 #undef ETHR_INCLUDE_DW_ATOMIC_IMPL__
35 #if !defined(ETHR_GCC_ATOMIC_DW_ATOMIC_H__)				\
36     && ((ETHR_HAVE___sync_val_compare_and_swap & (2*ETHR_SIZEOF_PTR))	\
37 	|| (ETHR_HAVE___atomic_compare_exchange_n & (2*ETHR_SIZEOF_PTR)))
38 #  define ETHR_GCC_ATOMIC_DW_ATOMIC_H__
39 #  define ETHR_INCLUDE_DW_ATOMIC_IMPL__
40 #endif
41 
42 #ifdef ETHR_INCLUDE_DW_ATOMIC_IMPL__
43 #  define ETHR_HAVE_NATIVE_SU_DW_ATOMIC
44 
45 #if ((ETHR_HAVE___sync_val_compare_and_swap & (2*ETHR_SIZEOF_PTR))	\
46      && (ETHR_HAVE___atomic_compare_exchange_n & (2*ETHR_SIZEOF_PTR)))
47 #  define ETHR_NATIVE_DW_ATOMIC_IMPL "gcc_atomic_and_sync_builtins"
48 #elif (ETHR_HAVE___atomic_compare_exchange_n & (2*ETHR_SIZEOF_PTR))
49 #  define ETHR_NATIVE_DW_ATOMIC_IMPL "gcc_atomic_builtins"
50 #elif (ETHR_HAVE___sync_val_compare_and_swap & (2*ETHR_SIZEOF_PTR))
51 #  define ETHR_NATIVE_DW_ATOMIC_IMPL "gcc_sync_builtins"
52 #else
53 #  error "!?"
54 #endif
55 
56 #  if ETHR_SIZEOF_PTR == 4
57 #    define ETHR_DW_NATMC_ALIGN_MASK__ 0x7
58 #    define ETHR_NATIVE_SU_DW_SINT_T ethr_sint64_t
59 #  elif ETHR_SIZEOF_PTR == 8
60 #    define ETHR_DW_NATMC_ALIGN_MASK__ 0xf
61 #    define ETHR_NATIVE_SU_DW_SINT_T ethr_sint128_t
62 #  endif
63 
64 typedef volatile ETHR_NATIVE_SU_DW_SINT_T * ethr_native_dw_ptr_t;
65 
66 /*
67  * We need 16 byte aligned memory in 64-bit mode, and 8 byte aligned
68  * memory in 32-bit mode. 16 byte aligned malloc in 64-bit mode is
69  * not common, and at least some glibc malloc implementations
70  * only 4 byte align in 32-bit mode.
71  *
72  * This code assumes 8 byte aligned memory in 64-bit mode, and 4 byte
73  * aligned memory in 32-bit mode. A malloc implementation that does
74  * not adhere to these alignment requirements is seriously broken,
75  * and we wont bother trying to work around it.
76  *
77  * Since memory alignment may be off by one word we need to align at
78  * runtime. We, therefore, need an extra word allocated.
79  */
80 #define ETHR_DW_NATMC_MEM__(VAR) \
81    (&(VAR)->c[(int) ((ethr_uint_t) &(VAR)->c[0]) & ETHR_DW_NATMC_ALIGN_MASK__])
82 typedef union {
83     volatile ETHR_NATIVE_SU_DW_SINT_T dw_sint;
84     volatile ethr_sint_t sint[3];
85     volatile char c[ETHR_SIZEOF_PTR*3];
86 } ethr_native_dw_atomic_t;
87 
88 #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
89 
90 #  ifdef ETHR_DEBUG
91 #    define ETHR_DW_DBG_ALIGNED__(PTR) \
92        ETHR_ASSERT((((ethr_uint_t) (PTR)) & ETHR_DW_NATMC_ALIGN_MASK__) == 0);
93 #  else
94 #    define ETHR_DW_DBG_ALIGNED__(PTR)
95 #  endif
96 
97 
98 #define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR 1
99 
100 static ETHR_INLINE ethr_sint_t *
ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t * var)101 ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t *var)
102 {
103     return (ethr_sint_t *) ETHR_DW_NATMC_MEM__(var);
104 }
105 
106 #if (ETHR_HAVE___atomic_store_n & (2*ETHR_SIZEOF_PTR))
107 
108 #if (ETHR_GCC_RELAXED_VERSIONS__ & (2*ETHR_SIZEOF_PTR))
109 
110 #define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET 1
111 
112 static ETHR_INLINE void
ethr_native_su_dw_atomic_set(ethr_native_dw_atomic_t * var,ETHR_NATIVE_SU_DW_SINT_T value)113 ethr_native_su_dw_atomic_set(ethr_native_dw_atomic_t *var,
114 			     ETHR_NATIVE_SU_DW_SINT_T value)
115 {
116     ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
117     ETHR_DW_DBG_ALIGNED__(p);
118     __atomic_store_n(p, value, __ATOMIC_RELAXED);
119 }
120 
121 #endif /* ETHR_GCC_RELAXED_VERSIONS__ */
122 
123 #if (ETHR_GCC_RELB_VERSIONS__ & (2*ETHR_SIZEOF_PTR))
124 
125 #define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_SET_RELB 1
126 
127 static ETHR_INLINE void
ethr_native_su_dw_atomic_set_relb(ethr_native_dw_atomic_t * var,ETHR_NATIVE_SU_DW_SINT_T value)128 ethr_native_su_dw_atomic_set_relb(ethr_native_dw_atomic_t *var,
129 				  ETHR_NATIVE_SU_DW_SINT_T value)
130 {
131     ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
132     ETHR_DW_DBG_ALIGNED__(p);
133     __atomic_store_n(p, value, __ATOMIC_RELEASE);
134 }
135 
136 #endif /* ETHR_GCC_RELB_VERSIONS__ */
137 
138 #endif /* ETHR_HAVE___atomic_store_n */
139 
140 #if (ETHR_HAVE___atomic_load_n & (2*ETHR_SIZEOF_PTR))
141 
142 #if (ETHR_GCC_RELAXED_VERSIONS__ & (2*ETHR_SIZEOF_PTR))
143 
144 #define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ 1
145 
146 static ETHR_INLINE ETHR_NATIVE_SU_DW_SINT_T
ethr_native_su_dw_atomic_read(ethr_native_dw_atomic_t * var)147 ethr_native_su_dw_atomic_read(ethr_native_dw_atomic_t *var)
148 {
149     ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
150     ETHR_DW_DBG_ALIGNED__(p);
151     return __atomic_load_n(p, __ATOMIC_RELAXED);
152 }
153 
154 #endif /* ETHR_GCC_RELAXED_VERSIONS__ */
155 
156 #if ((ETHR_GCC_ACQB_VERSIONS__ & (2*ETHR_SIZEOF_PTR))	\
157      & ~ETHR___atomic_load_ACQUIRE_barrier_bug)
158 
159 #define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_READ_ACQB 1
160 
161 static ETHR_INLINE ETHR_NATIVE_SU_DW_SINT_T
ethr_native_su_dw_atomic_read_acqb(ethr_native_dw_atomic_t * var)162 ethr_native_su_dw_atomic_read_acqb(ethr_native_dw_atomic_t *var)
163 {
164     ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
165     ETHR_DW_DBG_ALIGNED__(p);
166     return __atomic_load_n(p, __ATOMIC_ACQUIRE);
167 }
168 
169 #endif /* ETHR_GCC_ACQB_VERSIONS__ */
170 
171 #endif /* ETHR_HAVE___atomic_load_n */
172 
173 #if (ETHR_HAVE___atomic_compare_exchange_n & (2*ETHR_SIZEOF_PTR))
174 
175 #if (ETHR_GCC_RELAXED_MOD_VERSIONS__ & (2*ETHR_SIZEOF_PTR))
176 
177 #define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG 1
178 
179 static ETHR_INLINE ETHR_NATIVE_SU_DW_SINT_T
ethr_native_su_dw_atomic_cmpxchg(ethr_native_dw_atomic_t * var,ETHR_NATIVE_SU_DW_SINT_T new,ETHR_NATIVE_SU_DW_SINT_T exp)180 ethr_native_su_dw_atomic_cmpxchg(ethr_native_dw_atomic_t *var,
181 				 ETHR_NATIVE_SU_DW_SINT_T new,
182 				 ETHR_NATIVE_SU_DW_SINT_T exp)
183 {
184     ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
185     ETHR_NATIVE_SU_DW_SINT_T xchg = exp;
186     ETHR_DW_DBG_ALIGNED__(p);
187     if (__atomic_compare_exchange_n(p,
188 				    &xchg,
189 				    new,
190 				    0,
191 				    __ATOMIC_RELAXED,
192 				    __ATOMIC_RELAXED))
193 	return exp;
194     return xchg;
195 }
196 
197 #endif /* ETHR_GCC_RELAXED_MOD_VERSIONS__ */
198 
199 #if (ETHR_GCC_ACQB_MOD_VERSIONS__ & (2*ETHR_SIZEOF_PTR))
200 
201 #define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_ACQB 1
202 
203 static ETHR_INLINE ETHR_NATIVE_SU_DW_SINT_T
ethr_native_su_dw_atomic_cmpxchg_acqb(ethr_native_dw_atomic_t * var,ETHR_NATIVE_SU_DW_SINT_T new,ETHR_NATIVE_SU_DW_SINT_T exp)204 ethr_native_su_dw_atomic_cmpxchg_acqb(ethr_native_dw_atomic_t *var,
205 				      ETHR_NATIVE_SU_DW_SINT_T new,
206 				      ETHR_NATIVE_SU_DW_SINT_T exp)
207 {
208     ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
209     ETHR_NATIVE_SU_DW_SINT_T xchg = exp;
210     ETHR_DW_DBG_ALIGNED__(p);
211     if (__atomic_compare_exchange_n(p,
212 				    &xchg,
213 				    new,
214 				    0,
215 				    __ATOMIC_ACQUIRE,
216 				    __ATOMIC_ACQUIRE))
217 	return exp;
218     return xchg;
219 }
220 
221 #endif /* ETHR_GCC_ACQB_MOD_VERSIONS__ */
222 
223 #endif /* ETHR_HAVE___atomic_compare_exchange_n */
224 
225 #if ((ETHR_HAVE___sync_val_compare_and_swap & (2*ETHR_SIZEOF_PTR)) \
226      & ETHR_GCC_MB_MOD_VERSIONS__)
227 
228 #define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB 1
229 
230 static ETHR_INLINE ETHR_NATIVE_SU_DW_SINT_T
ethr_native_su_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t * var,ETHR_NATIVE_SU_DW_SINT_T new,ETHR_NATIVE_SU_DW_SINT_T old)231 ethr_native_su_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var,
232 				    ETHR_NATIVE_SU_DW_SINT_T new,
233 				    ETHR_NATIVE_SU_DW_SINT_T old)
234 {
235     ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
236     ETHR_DW_DBG_ALIGNED__(p);
237     return __sync_val_compare_and_swap(p, old, new);
238 }
239 
240 #endif /* ETHR_HAVE___sync_val_compare_and_swap */
241 
242 #endif /* ETHR_TRY_INLINE_FUNCS */
243 
244 #endif /* ETHR_INCLUDE_DW_ATOMIC_IMPL__ */
245