1 #ifndef _atomic_types_h_
2 #define _atomic_types_h_
3 
4 #if	(__GNUC__ > 4) || \
5 	(__GNUC__ == 4 && __GNUC_MINOR__ >= 1) && \
6 		( \
7 		  (defined(__APPLE__) && \
8 		    ( \
9 		      defined(__ppc__) || \
10 		      defined(__i386__) || \
11 		      defined(__x86_64__) \
12 		    ) \
13 		  ) || \
14 		  (defined(__linux__) && \
15 		    ( \
16 		      (defined(__i386__) && (defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4))) || \
17 		      defined(__ia64__) || \
18 		      defined(__x86_64__) || \
19 		      (defined(__powerpc__) && !defined(__powerpc64__)) || \
20 		      defined(__alpha) \
21 		     ) \
22 		  ) \
23 		  )
24 #define HAVE_ATOMIC_CAS 1
25 #else
26 // #warning Compare and Swap is not supported on this architecture
27 #define HAVE_ATOMIC_CAS 0
28 #endif
29 
30 #include <assert.h>
31 #include "log.h"
32 
33 #if !HAVE_ATOMIC_CAS
34 #include "AmThread.h"
35 #endif
36 
37 
38 // 32 bit unsigned integer
39 class atomic_int
40 #if !HAVE_ATOMIC_CAS
41   : protected AmMutex
42 #endif
43 {
44   volatile unsigned int i;
45 
46 
47 public:
atomic_int()48   atomic_int() : i(0) {}
49 
set(unsigned int val)50   void set(unsigned int val) {
51     i = val;
52   }
53 
get()54   unsigned int get() const {
55     return i;
56   }
57 
58 #if HAVE_ATOMIC_CAS
59   // ++i;
60   unsigned int inc(unsigned int add=1) {
61     return __sync_add_and_fetch(&i,add);
62   }
63 
64   // --i;
65   unsigned int dec(unsigned int sub=1) {
66     return __sync_sub_and_fetch(&i,sub);
67   }
68 #else // if HAVE_ATOMIC_CAS
69   // ++i;
70   unsigned int inc(unsigned int add=1) {
71     unsigned int res;
72     lock();
73     res = (i += add);
74     unlock();
75     return res;
76   }
77 
78   // --i;
79   unsigned int dec(unsigned int sub=1) {
80     unsigned int res;
81     lock();
82     res = (i -= sub);
83     unlock();
84     return res;
85   }
86 #endif
87 
88   // return --ll != 0;
dec_and_test()89   bool dec_and_test() {
90     return dec() == 0;
91   };
92 };
93 
94 // 64 bit unsigned integer
95 class atomic_int64
96 #if !HAVE_ATOMIC_CAS
97   : protected AmMutex
98 #endif
99 {
100   volatile unsigned long long ll;
101 
102 public:
atomic_int64()103   atomic_int64(): ll(0) {}
104 
105 #if HAVE_ATOMIC_CAS
set(unsigned long long val)106   void set(unsigned long long val) {
107 #if !defined(__LP64__) || !__LP64__
108     unsigned long long tmp_ll;
109     do {
110       tmp_ll = ll;
111     }
112     while(!__sync_bool_compare_and_swap(&ll, tmp_ll, val));
113 #else
114     ll = val;
115 #endif
116   }
117 
get()118   unsigned long long get() {
119 #if !defined(__LP64__) || !__LP64__
120     unsigned long long tmp_ll;
121     do {
122       tmp_ll = ll;
123     }
124     while(!__sync_bool_compare_and_swap(&ll, tmp_ll, tmp_ll));
125 
126     return tmp_ll;
127 #else
128     return ll;
129 #endif
130   }
131 
132   // returns ++ll;
133   unsigned long long inc(unsigned long long add=1) {
134     return __sync_add_and_fetch(&ll,add);
135   }
136 
137   // returns --ll;
138   unsigned long long dec(unsigned long long sub=1) {
139     return __sync_sub_and_fetch(&ll,sub);
140   }
141 
142 #else // if HAVE_ATOMIC_CAS
143 
set(unsigned long long val)144   void set(unsigned long long val) {
145 #if !defined(__LP64__) || !__LP64__
146     lock();
147     ll = val;
148     unlock();
149 #else
150     ll = val;
151 #endif
152   }
153 
get()154   unsigned long long get() {
155 #if !defined(__LP64__) || !__LP64__
156     unsigned long long tmp_ll;
157     lock();
158     tmp_ll = ll;
159     unlock();
160     return tmp_ll;
161 #else
162     return ll;
163 #endif
164   }
165 
166   // returns ++ll;
167   unsigned long long inc(unsigned long long add=1) {
168     unsigned long long res;
169     lock();
170     res = (ll += add);
171     unlock();
172     return res;
173   }
174 
175   // returns --ll;
176   unsigned long long dec(unsigned long long sub=1) {
177     unsigned long long res;
178     lock();
179     res = (ll -= sub);
180     unlock();
181     return res;
182   }
183 #endif
184 
185   // return --ll == 0;
dec_and_test()186   bool dec_and_test() {
187     return dec() == 0;
188   };
189 };
190 
191 class atomic_ref_cnt;
192 void inc_ref(atomic_ref_cnt* rc);
193 void dec_ref(atomic_ref_cnt* rc);
194 
195 class atomic_ref_cnt
196 {
197   atomic_int ref_cnt;
198 
199 protected:
atomic_ref_cnt()200   atomic_ref_cnt() {}
201 
_inc_ref()202   void _inc_ref() { ref_cnt.inc(); }
_dec_ref()203   bool _dec_ref() { return ref_cnt.dec_and_test(); }
204 
~atomic_ref_cnt()205   virtual ~atomic_ref_cnt() {}
on_destroy()206   virtual void on_destroy() {}
207 
208   friend void inc_ref(atomic_ref_cnt* rc);
209   friend void dec_ref(atomic_ref_cnt* rc);
210 };
211 
inc_ref(atomic_ref_cnt * rc)212 inline void inc_ref(atomic_ref_cnt* rc)
213 {
214   assert(rc);
215   rc->_inc_ref();
216 }
217 
dec_ref(atomic_ref_cnt * rc)218 inline void dec_ref(atomic_ref_cnt* rc)
219 {
220   assert(rc);
221   if(rc->_dec_ref()){
222     rc->on_destroy();
223     delete rc;
224   }
225 }
226 
227 
228 #endif
229