1 /*
2  * %CopyrightBegin%
3  *
4  * Copyright Ericsson AB 2011-2016. All Rights Reserved.
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  *
18  * %CopyrightEnd%
19  */
20 
21 /*
22  * Description: Memory barriers for x86/x86-64
23  * Author: Rickard Green
24  */
25 
26 #ifndef ETHR_X86_MEMBAR_H__
27 #define ETHR_X86_MEMBAR_H__
28 
29 #define ETHR_LoadLoad	(1 << 0)
30 #define ETHR_LoadStore	(1 << 1)
31 #define ETHR_StoreLoad	(1 << 2)
32 #define ETHR_StoreStore	(1 << 3)
33 
34 #define ETHR_NO_SSE2_MEMORY_BARRIER__			\
35 do {							\
36     volatile ethr_sint32_t x__ = 0;			\
37     __asm__ __volatile__ ("lock; orl $0x0, %0\n\t"	\
38 			  : "=m"(x__)			\
39 			  : "m"(x__)			\
40 			  : "memory");			\
41 } while (0)
42 
43 static __inline__ void
ethr_cfence__(void)44 ethr_cfence__(void)
45 {
46     __asm__ __volatile__ ("" : : : "memory");
47 }
48 
49 static __inline__ void
ethr_mfence__(void)50 ethr_mfence__(void)
51 {
52 #if ETHR_SIZEOF_PTR == 4
53     if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
54 	ETHR_NO_SSE2_MEMORY_BARRIER__;
55     else
56 #endif
57 	__asm__ __volatile__ ("mfence\n\t" : : : "memory");
58 }
59 
60 static __inline__ void
ethr_sfence__(void)61 ethr_sfence__(void)
62 {
63 #if ETHR_SIZEOF_PTR == 4
64     if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
65 	ETHR_NO_SSE2_MEMORY_BARRIER__;
66     else
67 #endif
68 	__asm__ __volatile__ ("sfence\n\t" : : : "memory");
69 }
70 
71 static __inline__ void
ethr_lfence__(void)72 ethr_lfence__(void)
73 {
74 #if ETHR_SIZEOF_PTR == 4
75     if (ETHR_X86_RUNTIME_CONF_HAVE_NO_SSE2__)
76 	ETHR_NO_SSE2_MEMORY_BARRIER__;
77     else
78 #endif
79 	__asm__ __volatile__ ("lfence\n\t" : : : "memory");
80 }
81 
82 #define ETHR_X86_OUT_OF_ORDER_MEMBAR(B)				\
83   ETHR_CHOOSE_EXPR((B) == ETHR_StoreStore,			\
84 		   ethr_sfence__(),				\
85 		   ETHR_CHOOSE_EXPR((B) == ETHR_LoadLoad,	\
86 				    ethr_lfence__(),		\
87 				    ethr_mfence__()))
88 
89 #ifdef ETHR_X86_OUT_OF_ORDER
90 
91 #define ETHR_MEMBAR(B) \
92   ETHR_X86_OUT_OF_ORDER_MEMBAR((B))
93 
94 #else /* !ETHR_X86_OUT_OF_ORDER (the default) */
95 
96 /*
97  * We assume that only stores before loads may be reordered. That is,
98  * we assume that *no* instructions like these are used:
99  * - CLFLUSH,
100  * - streaming stores executed with non-temporal move,
101  * - string operations, or
102  * - other instructions which aren't LoadLoad, LoadStore, and StoreStore
103  *   ordered by themselves
104  * If such instructions are used, either insert memory barriers
105  * using ETHR_X86_OUT_OF_ORDER_MEMBAR() at appropriate places, or
106  * define ETHR_X86_OUT_OF_ORDER. For more info see Intel 64 and IA-32
107  * Architectures Software Developer's Manual; Vol 3A; Chapter 8.2.2.
108  */
109 
110 #define ETHR_MEMBAR(B) \
111   ETHR_CHOOSE_EXPR((B) & ETHR_StoreLoad, ethr_mfence__(), ethr_cfence__())
112 
113 #endif /* !ETHR_X86_OUT_OF_ORDER */
114 
115 #endif /* ETHR_X86_MEMBAR_H__ */
116