1 /*
2 * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_INLINE_HPP
26 #define OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_INLINE_HPP
27
28 #include "runtime/atomic.hpp"
29 #include "runtime/os.hpp"
30 #include "vm_version_x86.hpp"
31
32 // Implementation of class atomic
33
store(jbyte store_value,jbyte * dest)34 inline void Atomic::store (jbyte store_value, jbyte* dest) { *dest = store_value; }
store(jshort store_value,jshort * dest)35 inline void Atomic::store (jshort store_value, jshort* dest) { *dest = store_value; }
store(jint store_value,jint * dest)36 inline void Atomic::store (jint store_value, jint* dest) { *dest = store_value; }
store_ptr(intptr_t store_value,intptr_t * dest)37 inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
store_ptr(void * store_value,void * dest)38 inline void Atomic::store_ptr(void* store_value, void* dest) { *(void**)dest = store_value; }
39
store(jbyte store_value,volatile jbyte * dest)40 inline void Atomic::store (jbyte store_value, volatile jbyte* dest) { *dest = store_value; }
store(jshort store_value,volatile jshort * dest)41 inline void Atomic::store (jshort store_value, volatile jshort* dest) { *dest = store_value; }
store(jint store_value,volatile jint * dest)42 inline void Atomic::store (jint store_value, volatile jint* dest) { *dest = store_value; }
store_ptr(intptr_t store_value,volatile intptr_t * dest)43 inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
store_ptr(void * store_value,volatile void * dest)44 inline void Atomic::store_ptr(void* store_value, volatile void* dest) { *(void* volatile *)dest = store_value; }
45
46
47 // Adding a lock prefix to an instruction on MP machine
48 #define LOCK_IF_MP(mp) "cmp $0, " #mp "; je 1f; lock; 1: "
49
add(jint add_value,volatile jint * dest)50 inline jint Atomic::add (jint add_value, volatile jint* dest) {
51 jint addend = add_value;
52 int mp = os::is_MP();
53 __asm__ volatile ( LOCK_IF_MP(%3) "xaddl %0,(%2)"
54 : "=r" (addend)
55 : "0" (addend), "r" (dest), "r" (mp)
56 : "cc", "memory");
57 return addend + add_value;
58 }
59
inc(volatile jint * dest)60 inline void Atomic::inc (volatile jint* dest) {
61 int mp = os::is_MP();
62 __asm__ volatile (LOCK_IF_MP(%1) "addl $1,(%0)" :
63 : "r" (dest), "r" (mp) : "cc", "memory");
64 }
65
inc_ptr(volatile void * dest)66 inline void Atomic::inc_ptr(volatile void* dest) {
67 inc_ptr((volatile intptr_t*)dest);
68 }
69
dec(volatile jint * dest)70 inline void Atomic::dec (volatile jint* dest) {
71 int mp = os::is_MP();
72 __asm__ volatile (LOCK_IF_MP(%1) "subl $1,(%0)" :
73 : "r" (dest), "r" (mp) : "cc", "memory");
74 }
75
dec_ptr(volatile void * dest)76 inline void Atomic::dec_ptr(volatile void* dest) {
77 dec_ptr((volatile intptr_t*)dest);
78 }
79
xchg(jint exchange_value,volatile jint * dest)80 inline jint Atomic::xchg (jint exchange_value, volatile jint* dest) {
81 __asm__ volatile ( "xchgl (%2),%0"
82 : "=r" (exchange_value)
83 : "0" (exchange_value), "r" (dest)
84 : "memory");
85 return exchange_value;
86 }
87
xchg_ptr(void * exchange_value,volatile void * dest)88 inline void* Atomic::xchg_ptr(void* exchange_value, volatile void* dest) {
89 return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
90 }
91
92
cmpxchg(jint exchange_value,volatile jint * dest,jint compare_value)93 inline jint Atomic::cmpxchg (jint exchange_value, volatile jint* dest, jint compare_value) {
94 int mp = os::is_MP();
95 __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)"
96 : "=a" (exchange_value)
97 : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
98 : "cc", "memory");
99 return exchange_value;
100 }
101
102 #ifdef AMD64
store(jlong store_value,jlong * dest)103 inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
store(jlong store_value,volatile jlong * dest)104 inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
105
add_ptr(intptr_t add_value,volatile intptr_t * dest)106 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
107 intptr_t addend = add_value;
108 bool mp = os::is_MP();
109 __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)"
110 : "=r" (addend)
111 : "0" (addend), "r" (dest), "r" (mp)
112 : "cc", "memory");
113 return addend + add_value;
114 }
115
add_ptr(intptr_t add_value,volatile void * dest)116 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) {
117 return (void*)add_ptr(add_value, (volatile intptr_t*)dest);
118 }
119
inc_ptr(volatile intptr_t * dest)120 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
121 bool mp = os::is_MP();
122 __asm__ __volatile__ (LOCK_IF_MP(%1) "addq $1,(%0)"
123 :
124 : "r" (dest), "r" (mp)
125 : "cc", "memory");
126 }
127
dec_ptr(volatile intptr_t * dest)128 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
129 bool mp = os::is_MP();
130 __asm__ __volatile__ (LOCK_IF_MP(%1) "subq $1,(%0)"
131 :
132 : "r" (dest), "r" (mp)
133 : "cc", "memory");
134 }
135
xchg_ptr(intptr_t exchange_value,volatile intptr_t * dest)136 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
137 __asm__ __volatile__ ("xchgq (%2),%0"
138 : "=r" (exchange_value)
139 : "0" (exchange_value), "r" (dest)
140 : "memory");
141 return exchange_value;
142 }
143
cmpxchg(jlong exchange_value,volatile jlong * dest,jlong compare_value)144 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value) {
145 bool mp = os::is_MP();
146 __asm__ __volatile__ (LOCK_IF_MP(%4) "cmpxchgq %1,(%3)"
147 : "=a" (exchange_value)
148 : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
149 : "cc", "memory");
150 return exchange_value;
151 }
152
cmpxchg_ptr(intptr_t exchange_value,volatile intptr_t * dest,intptr_t compare_value)153 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
154 return (intptr_t)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
155 }
156
cmpxchg_ptr(void * exchange_value,volatile void * dest,void * compare_value)157 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) {
158 return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
159 }
160
load(volatile jlong * src)161 inline jlong Atomic::load(volatile jlong* src) { return *src; }
162
163 #else // !AMD64
164
add_ptr(intptr_t add_value,volatile intptr_t * dest)165 inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
166 return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest);
167 }
168
add_ptr(intptr_t add_value,volatile void * dest)169 inline void* Atomic::add_ptr(intptr_t add_value, volatile void* dest) {
170 return (void*)Atomic::add((jint)add_value, (volatile jint*)dest);
171 }
172
173
inc_ptr(volatile intptr_t * dest)174 inline void Atomic::inc_ptr(volatile intptr_t* dest) {
175 inc((volatile jint*)dest);
176 }
177
dec_ptr(volatile intptr_t * dest)178 inline void Atomic::dec_ptr(volatile intptr_t* dest) {
179 dec((volatile jint*)dest);
180 }
181
xchg_ptr(intptr_t exchange_value,volatile intptr_t * dest)182 inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
183 return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
184 }
185
186 extern "C" {
187 // defined in bsd_x86.s
188 jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong, bool);
189 void _Atomic_move_long(volatile jlong* src, volatile jlong* dst);
190 }
191
cmpxchg(jlong exchange_value,volatile jlong * dest,jlong compare_value)192 inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value) {
193 return _Atomic_cmpxchg_long(exchange_value, dest, compare_value, os::is_MP());
194 }
195
cmpxchg_ptr(intptr_t exchange_value,volatile intptr_t * dest,intptr_t compare_value)196 inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value) {
197 return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
198 }
199
cmpxchg_ptr(void * exchange_value,volatile void * dest,void * compare_value)200 inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) {
201 return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
202 }
203
load(volatile jlong * src)204 inline jlong Atomic::load(volatile jlong* src) {
205 volatile jlong dest;
206 _Atomic_move_long(src, &dest);
207 return dest;
208 }
209
store(jlong store_value,jlong * dest)210 inline void Atomic::store(jlong store_value, jlong* dest) {
211 _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest);
212 }
213
store(jlong store_value,volatile jlong * dest)214 inline void Atomic::store(jlong store_value, volatile jlong* dest) {
215 _Atomic_move_long((volatile jlong*)&store_value, dest);
216 }
217
218 #endif // AMD64
219
220 #endif // OS_CPU_BSD_X86_VM_ATOMIC_BSD_X86_INLINE_HPP
221