1 /*- 2 * Copyright (c) 2010 Isilon Systems, Inc. 3 * Copyright (c) 2010 iX Systems, Inc. 4 * Copyright (c) 2010 Panasas, Inc. 5 * Copyright (c) 2013-2016 François Tigeot 6 * All rights reserved. 7 * 8 * Redistribution and use in source and binary forms, with or without 9 * modification, are permitted provided that the following conditions 10 * are met: 11 * 1. Redistributions of source code must retain the above copyright 12 * notice unmodified, this list of conditions, and the following 13 * disclaimer. 14 * 2. Redistributions in binary form must reproduce the above copyright 15 * notice, this list of conditions and the following disclaimer in the 16 * documentation and/or other materials provided with the distribution. 17 * 18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 */ 29 30 #ifndef _LINUX_ATOMIC_H_ 31 #define _LINUX_ATOMIC_H_ 32 33 #include <asm/atomic.h> 34 35 typedef struct { 36 volatile u_int counter; 37 } atomic_t; 38 39 typedef struct { 40 volatile u_long counter; 41 } atomic64_t; 42 43 #define atomic_add(i, v) atomic_add_return((i), (v)) 44 #define atomic_sub(i, v) atomic_sub_return((i), (v)) 45 #define atomic_inc_return(v) atomic_add_return(1, (v)) 46 #define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0) 47 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) 48 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) 49 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0) 50 #define atomic_dec_return(v) atomic_sub_return(1, (v)) 51 52 #define atomic64_add(i, v) atomic_add_return_long((i), (v)) 53 #define atomic64_sub(i, v) atomic_sub_return_long((i), (v)) 54 55 #define atomic_xchg(p, v) atomic_swap_int(&((p)->counter), v) 56 #define atomic64_xchg(p, v) atomic_swap_long(&((p)->counter), v) 57 58 #define atomic_cmpset(p, o, n) atomic_cmpset_32(&((p)->counter), o, n) 59 60 static inline int 61 atomic_add_return(int i, atomic_t *v) 62 { 63 return i + atomic_fetchadd_int(&v->counter, i); 64 } 65 66 static inline int64_t 67 atomic_add_return_long(int64_t i, atomic64_t *v) 68 { 69 return i + atomic_fetchadd_long(&v->counter, i); 70 } 71 72 static inline int 73 atomic_sub_return(int i, atomic_t *v) 74 { 75 return atomic_fetchadd_int(&v->counter, -i) - i; 76 } 77 78 static inline int64_t 79 atomic_sub_return_long(int64_t i, atomic64_t *v) 80 { 81 return atomic_fetchadd_long(&v->counter, -i) - i; 82 } 83 84 static inline void 85 atomic_set(atomic_t *v, int i) 86 { 87 atomic_store_rel_int(&v->counter, i); 88 } 89 90 static inline void 91 atomic64_set(atomic64_t *v, long i) 92 { 93 atomic_store_rel_long(&v->counter, i); 94 } 95 96 static inline int 97 atomic_read(atomic_t *v) 98 { 99 return atomic_load_acq_int(&v->counter); 100 } 101 102 static inline int64_t 103 atomic64_read(atomic64_t *v) 104 { 105 return atomic_load_acq_long(&v->counter); 106 } 107 108 static inline int 109 atomic_inc(atomic_t *v) 110 { 111 return atomic_fetchadd_int(&v->counter, 1) + 1; 112 } 113 114 static inline int 115 atomic_dec(atomic_t *v) 116 { 117 return atomic_fetchadd_int(&v->counter, -1) - 1; 118 } 119 120 static inline int atomic_cmpxchg(atomic_t *v, int old, int new) 121 { 122 return atomic_cmpxchg_int(&v->counter, old, new); 123 } 124 125 static inline int atomic_add_unless(atomic_t *v, int add, int unless) 126 { 127 int c, old; 128 c = atomic_read(v); 129 for (;;) { 130 if (unlikely(c == unless)) 131 break; 132 old = atomic_cmpxchg_int(&v->counter, c, c + add); 133 if (likely(old == c)) 134 break; 135 c = old; 136 } 137 return c != unless; 138 } 139 140 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 141 142 /* atomic_clear_mask: atomically clears a variable from the bit set in mask */ 143 #define atomic_clear_mask(mask, addr) \ 144 /* atomic *addr &= ~mask; */ \ 145 __asm __volatile("lock andl %0, %1" \ 146 : \ 147 : "r" (~mask), "m" (*addr) \ 148 : "memory"); 149 150 #define smp_mb__before_atomic() cpu_ccfence() 151 #define smp_mb__after_atomic() cpu_ccfence() 152 153 #endif /* _LINUX_ATOMIC_H_ */ 154