1 /* 2 * Copyright (c) 2007 Pawel Jakub Dawidek <pjd@FreeBSD.org> 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 1. Redistributions of source code must retain the above copyright 9 * notice, this list of conditions and the following disclaimer. 10 * 2. Redistributions in binary form must reproduce the above copyright 11 * notice, this list of conditions and the following disclaimer in the 12 * documentation and/or other materials provided with the distribution. 13 * 14 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND 15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE 18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24 * SUCH DAMAGE. 25 * 26 * $FreeBSD$ 27 */ 28 29 #ifndef _OPENSOLARIS_SYS_ATOMIC_H_ 30 #define _OPENSOLARIS_SYS_ATOMIC_H_ 31 32 #ifndef _STANDALONE 33 34 #include <sys/types.h> 35 #include <machine/atomic.h> 36 37 #define atomic_sub_64 atomic_subtract_64 38 39 #if defined(__i386__) && (defined(_KERNEL) || defined(KLD_MODULE)) 40 #define I386_HAVE_ATOMIC64 41 #endif 42 43 #if defined(__i386__) || defined(__amd64__) || defined(__arm__) 44 /* No spurious failures from fcmpset. */ 45 #define STRONG_FCMPSET 46 #endif 47 48 #if !defined(__LP64__) && !defined(__mips_n32) && \ 49 !defined(ARM_HAVE_ATOMIC64) && !defined(I386_HAVE_ATOMIC64) && \ 50 !defined(HAS_EMULATED_ATOMIC64) 51 extern void atomic_add_64(volatile uint64_t *target, int64_t delta); 52 extern void atomic_dec_64(volatile uint64_t *target); 53 extern uint64_t atomic_swap_64(volatile uint64_t *a, uint64_t value); 54 extern uint64_t atomic_load_64(volatile uint64_t *a); 55 extern uint64_t atomic_add_64_nv(volatile uint64_t *target, int64_t delta); 56 extern uint64_t atomic_cas_64(volatile uint64_t *target, uint64_t cmp, 57 uint64_t newval); 58 #endif 59 60 #define membar_producer atomic_thread_fence_rel 61 62 static __inline uint32_t 63 atomic_add_32_nv(volatile uint32_t *target, int32_t delta) 64 { 65 return (atomic_fetchadd_32(target, delta) + delta); 66 } 67 68 static __inline uint_t 69 atomic_add_int_nv(volatile uint_t *target, int delta) 70 { 71 return (atomic_add_32_nv(target, delta)); 72 } 73 74 static __inline void 75 atomic_inc_32(volatile uint32_t *target) 76 { 77 atomic_add_32(target, 1); 78 } 79 80 static __inline uint32_t 81 atomic_inc_32_nv(volatile uint32_t *target) 82 { 83 return (atomic_add_32_nv(target, 1)); 84 } 85 86 static __inline void 87 atomic_dec_32(volatile uint32_t *target) 88 { 89 atomic_subtract_32(target, 1); 90 } 91 92 static __inline uint32_t 93 atomic_dec_32_nv(volatile uint32_t *target) 94 { 95 return (atomic_add_32_nv(target, -1)); 96 } 97 98 #ifndef __sparc64__ 99 static inline uint32_t 100 atomic_cas_32(volatile uint32_t *target, uint32_t cmp, uint32_t newval) 101 { 102 #ifdef STRONG_FCMPSET 103 (void) atomic_fcmpset_32(target, &cmp, newval); 104 #else 105 uint32_t expected = cmp; 106 107 do { 108 if (atomic_fcmpset_32(target, &cmp, newval)) 109 break; 110 } while (cmp == expected); 111 #endif 112 return (cmp); 113 } 114 #endif 115 116 #if defined(__LP64__) || defined(__mips_n32) || \ 117 defined(ARM_HAVE_ATOMIC64) || defined(I386_HAVE_ATOMIC64) || \ 118 defined(HAS_EMULATED_ATOMIC64) 119 static __inline void 120 atomic_dec_64(volatile uint64_t *target) 121 { 122 atomic_subtract_64(target, 1); 123 } 124 125 static inline uint64_t 126 atomic_add_64_nv(volatile uint64_t *target, int64_t delta) 127 { 128 return (atomic_fetchadd_64(target, delta) + delta); 129 } 130 131 #ifndef __sparc64__ 132 static inline uint64_t 133 atomic_cas_64(volatile uint64_t *target, uint64_t cmp, uint64_t newval) 134 { 135 #ifdef STRONG_FCMPSET 136 (void) atomic_fcmpset_64(target, &cmp, newval); 137 #else 138 uint64_t expected = cmp; 139 140 do { 141 if (atomic_fcmpset_64(target, &cmp, newval)) 142 break; 143 } while (cmp == expected); 144 #endif 145 return (cmp); 146 } 147 #endif 148 #endif 149 150 static __inline void 151 atomic_inc_64(volatile uint64_t *target) 152 { 153 atomic_add_64(target, 1); 154 } 155 156 static __inline uint64_t 157 atomic_inc_64_nv(volatile uint64_t *target) 158 { 159 return (atomic_add_64_nv(target, 1)); 160 } 161 162 static __inline uint64_t 163 atomic_dec_64_nv(volatile uint64_t *target) 164 { 165 return (atomic_add_64_nv(target, -1)); 166 } 167 168 #if !defined(COMPAT_32BIT) && defined(__LP64__) 169 static __inline void * 170 atomic_cas_ptr(volatile void *target, void *cmp, void *newval) 171 { 172 return ((void *)atomic_cas_64((volatile uint64_t *)target, 173 (uint64_t)cmp, (uint64_t)newval)); 174 } 175 #else 176 static __inline void * 177 atomic_cas_ptr(volatile void *target, void *cmp, void *newval) 178 { 179 return ((void *)atomic_cas_32((volatile uint32_t *)target, 180 (uint32_t)cmp, (uint32_t)newval)); 181 } 182 #endif /* !defined(COMPAT_32BIT) && defined(__LP64__) */ 183 184 #else /* _STANDALONE */ 185 /* 186 * sometimes atomic_add_64 is defined, sometimes not, but the 187 * following is always right for the boot loader. 188 */ 189 #undef atomic_add_64 190 #define atomic_add_64(ptr, val) *(ptr) += val 191 #undef atomic_sub_64 192 #define atomic_sub_64(ptr, val) *(ptr) -= val 193 #endif /* !_STANDALONE */ 194 195 #endif /* !_OPENSOLARIS_SYS_ATOMIC_H_ */ 196