1 #ifndef ATOMIC_SOLARIS_INCLUDED
2 #define ATOMIC_SOLARIS_INCLUDED
3 
4 /* Copyright (c) 2008, 2014, Oracle and/or its affiliates. All rights reserved.
5 
6    This program is free software; you can redistribute it and/or modify
7    it under the terms of the GNU General Public License as published by
8    the Free Software Foundation; version 2 of the License.
9 
10    This program is distributed in the hope that it will be useful,
11    but WITHOUT ANY WARRANTY; without even the implied warranty of
12    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13    GNU General Public License for more details.
14 
15    You should have received a copy of the GNU General Public License
16    along with this program; if not, write to the Free Software
17    Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1335  USA */
18 
19 #include <atomic.h>
20 
21 #if defined(__GNUC__)
22 #define atomic_typeof(T,V)      __typeof__(V)
23 #else
24 #define atomic_typeof(T,V)      T
25 #endif
26 
my_atomic_cas32(int32 volatile * a,int32 * cmp,int32 set)27 static inline int my_atomic_cas32(int32 volatile *a, int32 *cmp, int32 set)
28 {
29   int ret;
30   atomic_typeof(uint32_t, *cmp) sav;
31   sav= atomic_cas_32((volatile uint32_t *)a, (uint32_t)*cmp, (uint32_t)set);
32   ret= (sav == *cmp);
33   if (!ret)
34     *cmp= sav;
35   return ret;
36 }
37 
my_atomic_cas64(int64 volatile * a,int64 * cmp,int64 set)38 static inline int my_atomic_cas64(int64 volatile *a, int64 *cmp, int64 set)
39 {
40   int ret;
41   atomic_typeof(uint64_t, *cmp) sav;
42   sav= atomic_cas_64((volatile uint64_t *)a, (uint64_t)*cmp, (uint64_t)set);
43   ret= (sav == *cmp);
44   if (!ret)
45     *cmp= sav;
46   return ret;
47 }
48 
my_atomic_casptr(void * volatile * a,void ** cmp,void * set)49 static inline int my_atomic_casptr(void * volatile *a, void **cmp, void *set)
50 {
51   int ret;
52   atomic_typeof(void *, *cmp) sav;
53   sav= atomic_cas_ptr((volatile void **)a, (void *)*cmp, (void *)set);
54   ret= (sav == *cmp);
55   if (!ret)
56     *cmp= sav;
57   return ret;
58 }
59 
my_atomic_add32(int32 volatile * a,int32 v)60 static inline int32 my_atomic_add32(int32 volatile *a, int32 v)
61 {
62   int32 nv= atomic_add_32_nv((volatile uint32_t *)a, v);
63   return nv - v;
64 }
65 
my_atomic_add64(int64 volatile * a,int64 v)66 static inline int64 my_atomic_add64(int64 volatile *a, int64 v)
67 {
68   int64 nv= atomic_add_64_nv((volatile uint64_t *)a, v);
69   return nv - v;
70 }
71 
my_atomic_fas32(int32 volatile * a,int32 v)72 static inline int32 my_atomic_fas32(int32 volatile *a, int32 v)
73 {
74   return atomic_swap_32((volatile uint32_t *)a, (uint32_t)v);
75 }
76 
my_atomic_fas64(int64 volatile * a,int64 v)77 static inline int64 my_atomic_fas64(int64 volatile *a, int64 v)
78 {
79   return atomic_swap_64((volatile uint64_t *)a, (uint64_t)v);
80 }
81 
my_atomic_fasptr(void * volatile * a,void * v)82 static inline void * my_atomic_fasptr(void * volatile *a, void * v)
83 {
84   return atomic_swap_ptr(a, v);
85 }
86 
my_atomic_load32(int32 volatile * a)87 static inline int32 my_atomic_load32(int32 volatile *a)
88 {
89   return atomic_or_32_nv((volatile uint32_t *)a, 0);
90 }
91 
my_atomic_load64(int64 volatile * a)92 static inline int64 my_atomic_load64(int64 volatile *a)
93 {
94   return atomic_or_64_nv((volatile uint64_t *)a, 0);
95 }
96 
my_atomic_loadptr(void * volatile * a)97 static inline void* my_atomic_loadptr(void * volatile *a)
98 {
99   return atomic_add_ptr_nv(a, 0);
100 }
101 
my_atomic_store32(int32 volatile * a,int32 v)102 static inline void my_atomic_store32(int32 volatile *a, int32 v)
103 {
104   (void) atomic_swap_32((volatile uint32_t *)a, (uint32_t)v);
105 }
106 
my_atomic_store64(int64 volatile * a,int64 v)107 static inline void my_atomic_store64(int64 volatile *a, int64 v)
108 {
109   (void) atomic_swap_64((volatile uint64_t *)a, (uint64_t)v);
110 }
111 
my_atomic_storeptr(void * volatile * a,void * v)112 static inline void my_atomic_storeptr(void * volatile *a, void *v)
113 {
114   (void) atomic_swap_ptr((volatile void **)a, (void *)v);
115 }
116 
117 #endif /* ATOMIC_SOLARIS_INCLUDED */
118