1 /*
2  * %CopyrightBegin%
3  *
4  * Copyright Ericsson AB 2011-2020. All Rights Reserved.
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  *
18  * %CopyrightEnd%
19  */
20 
21 /*
22  * Description: Native double word atomics for windows
23  * Author: Rickard Green
24  */
25 
26 #undef ETHR_INCLUDE_DW_ATOMIC_IMPL__
27 #ifndef ETHR_X86_DW_ATOMIC_H__
28 #  define ETHR_X86_DW_ATOMIC_H__
29 #  if ((ETHR_SIZEOF_PTR == 4 \
30         && defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE64)) \
31        || (ETHR_SIZEOF_PTR == 8 \
32            && defined(ETHR_HAVE__INTERLOCKEDCOMPAREEXCHANGE128)))
33 #    define ETHR_INCLUDE_DW_ATOMIC_IMPL__
34 #  endif
35 #endif
36 
37 #ifdef ETHR_INCLUDE_DW_ATOMIC_IMPL__
38 
39 #  if ETHR_SIZEOF_PTR == 4
40 #    define ETHR_HAVE_NATIVE_SU_DW_ATOMIC
41 #  else
42 #    define ETHR_HAVE_NATIVE_DW_ATOMIC
43 #  endif
44 #  define ETHR_NATIVE_DW_ATOMIC_IMPL "windows-interlocked"
45 
46 #  if defined(_M_IX86) || defined(_M_AMD64)
47 /*
48  * If ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ is defined, it will be used
49  * at runtime in order to determine if native or fallback implementation
50  * should be used.
51  */
52 #    define ETHR_RTCHK_USE_NATIVE_DW_ATOMIC_IMPL__ \
53        ETHR_X86_RUNTIME_CONF_HAVE_DW_CMPXCHG__
54 #  endif
55 
56 #  include <intrin.h>
57 #  if ETHR_SIZEOF_PTR == 4
58 #    pragma intrinsic(_InterlockedCompareExchange64)
59 #    define ETHR_DW_NATMC_ALIGN_MASK__ 0x7
60 #    define ETHR_NATIVE_SU_DW_SINT_T ethr_sint64_t
61 #  else
62 #    pragma intrinsic(_InterlockedCompareExchange128)
63 #    define ETHR_DW_NATMC_ALIGN_MASK__ 0xf
64 #  endif
65 
66 typedef volatile __int64 * ethr_native_dw_ptr_t;
67 
68 /*
69  * We need 16 byte aligned memory in 64-bit mode, and 8 byte aligned
70  * memory in 32-bit mode. 16 byte aligned malloc in 64-bit mode is
71  * not common, and at least some glibc malloc implementations
72  * only 4 byte align in 32-bit mode.
73  *
74  * This code assumes 8 byte aligned memory in 64-bit mode, and 4 byte
75  * aligned memory in 32-bit mode. A malloc implementation that does
76  * not adhere to these alignment requirements is seriously broken,
77  * and we wont bother trying to work around it.
78  *
79  * Since memory alignment may be off by one word we need to align at
80  * runtime. We, therefore, need an extra word allocated.
81  */
82 #define ETHR_DW_NATMC_MEM__(VAR) \
83    (&(VAR)->c[(int) ((ethr_uint_t) &(VAR)->c[0]) & ETHR_DW_NATMC_ALIGN_MASK__])
84 typedef union {
85 #ifdef ETHR_NATIVE_SU_DW_SINT_T
86     volatile ETHR_NATIVE_SU_DW_SINT_T dw_sint;
87 #endif
88     volatile ethr_sint_t sint[3];
89     volatile char c[ETHR_SIZEOF_PTR*3];
90 } ethr_native_dw_atomic_t;
91 
92 #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
93 
94 #ifdef ETHR_DEBUG
95 #  define ETHR_DW_DBG_ALIGNED__(PTR) \
96      ETHR_ASSERT((((ethr_uint_t) (PTR)) & ETHR_DW_NATMC_ALIGN_MASK__) == 0);
97 #else
98 #  define ETHR_DW_DBG_ALIGNED__(PTR)
99 #endif
100 
101 #define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_ADDR
102 
103 static ETHR_INLINE ethr_sint_t *
ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t * var)104 ethr_native_dw_atomic_addr(ethr_native_dw_atomic_t *var)
105 {
106     ethr_sint_t *p = (ethr_sint_t *) ETHR_DW_NATMC_MEM__(var);
107     ETHR_DW_DBG_ALIGNED__(p);
108     return p;
109 }
110 
111 
112 #if ETHR_SIZEOF_PTR == 4
113 
114 #define ETHR_HAVE_ETHR_NATIVE_SU_DW_ATOMIC_CMPXCHG_MB
115 
116 static ETHR_INLINE ethr_sint64_t
ethr_native_su_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t * var,ethr_sint64_t new,ethr_sint64_t exp)117 ethr_native_su_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var,
118 				    ethr_sint64_t new,
119 				    ethr_sint64_t exp)
120 {
121     ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
122     ETHR_DW_DBG_ALIGNED__(p);
123     return (ethr_sint64_t) _InterlockedCompareExchange64(p, new, exp);
124 }
125 
126 #elif ETHR_SIZEOF_PTR == 8
127 
128 #define ETHR_HAVE_ETHR_NATIVE_DW_ATOMIC_CMPXCHG_MB
129 
130 #ifdef ETHR_BIGENDIAN
131 #  define ETHR_WIN_LOW_WORD__ 1
132 #  define ETHR_WIN_HIGH_WORD__ 0
133 #else
134 #  define ETHR_WIN_LOW_WORD__ 0
135 #  define ETHR_WIN_HIGH_WORD__ 1
136 #endif
137 
138 static ETHR_INLINE int
ethr_native_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t * var,ethr_sint_t * new,ethr_sint_t * xchg)139 ethr_native_dw_atomic_cmpxchg_mb(ethr_native_dw_atomic_t *var,
140 				 ethr_sint_t *new,
141 				 ethr_sint_t *xchg)
142 {
143     ethr_native_dw_ptr_t p = (ethr_native_dw_ptr_t) ETHR_DW_NATMC_MEM__(var);
144     ETHR_DW_DBG_ALIGNED__(p);
145     return (int) _InterlockedCompareExchange128(p,
146 						new[ETHR_WIN_HIGH_WORD__],
147 						new[ETHR_WIN_LOW_WORD__],
148 						xchg);
149 }
150 
151 #endif
152 
153 #endif /* ETHR_TRY_INLINE_FUNCS */
154 
155 #endif /* ETHR_INCLUDE_DW_ATOMIC_IMPL__ */
156