1 /*
2  * Copyright (c) 2003 by Hewlett-Packard Company.  All rights reserved.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a copy
5  * of this software and associated documentation files (the "Software"), to deal
6  * in the Software without restriction, including without limitation the rights
7  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8  * copies of the Software, and to permit persons to whom the Software is
9  * furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20  * SOFTWARE.
21  */
22 
23 /* The following is useful primarily for debugging and documentation.   */
24 /* We define various atomic operations by acquiring a global pthread    */
25 /* lock.  The resulting implementation will perform poorly, but should  */
26 /* be correct unless it is used from signal handlers.                   */
27 /* We assume that all pthread operations act like full memory barriers. */
28 /* (We believe that is the intent of the specification.)                */
29 
30 #include <pthread.h>
31 
32 #include "test_and_set_t_is_ao_t.h"
33         /* This is not necessarily compatible with the native           */
34         /* implementation.  But those can't be safely mixed anyway.     */
35 
36 /* We define only the full barrier variants, and count on the           */
37 /* generalization section below to fill in the rest.                    */
38 extern pthread_mutex_t AO_pt_lock;
39 
40 AO_INLINE void
AO_nop_full(void)41 AO_nop_full(void)
42 {
43   pthread_mutex_lock(&AO_pt_lock);
44   pthread_mutex_unlock(&AO_pt_lock);
45 }
46 #define AO_HAVE_nop_full
47 
48 AO_INLINE AO_t
AO_load_full(const volatile AO_t * addr)49 AO_load_full(const volatile AO_t *addr)
50 {
51   AO_t result;
52   pthread_mutex_lock(&AO_pt_lock);
53   result = *addr;
54   pthread_mutex_unlock(&AO_pt_lock);
55   return result;
56 }
57 #define AO_HAVE_load_full
58 
59 AO_INLINE void
AO_store_full(volatile AO_t * addr,AO_t val)60 AO_store_full(volatile AO_t *addr, AO_t val)
61 {
62   pthread_mutex_lock(&AO_pt_lock);
63   *addr = val;
64   pthread_mutex_unlock(&AO_pt_lock);
65 }
66 #define AO_HAVE_store_full
67 
68 AO_INLINE unsigned char
AO_char_load_full(const volatile unsigned char * addr)69 AO_char_load_full(const volatile unsigned char *addr)
70 {
71   unsigned char result;
72   pthread_mutex_lock(&AO_pt_lock);
73   result = *addr;
74   pthread_mutex_unlock(&AO_pt_lock);
75   return result;
76 }
77 #define AO_HAVE_char_load_full
78 
79 AO_INLINE void
AO_char_store_full(volatile unsigned char * addr,unsigned char val)80 AO_char_store_full(volatile unsigned char *addr, unsigned char val)
81 {
82   pthread_mutex_lock(&AO_pt_lock);
83   *addr = val;
84   pthread_mutex_unlock(&AO_pt_lock);
85 }
86 #define AO_HAVE_char_store_full
87 
88 AO_INLINE unsigned short
AO_short_load_full(const volatile unsigned short * addr)89 AO_short_load_full(const volatile unsigned short *addr)
90 {
91   unsigned short result;
92   pthread_mutex_lock(&AO_pt_lock);
93   result = *addr;
94   pthread_mutex_unlock(&AO_pt_lock);
95   return result;
96 }
97 #define AO_HAVE_short_load_full
98 
99 AO_INLINE void
AO_short_store_full(volatile unsigned short * addr,unsigned short val)100 AO_short_store_full(volatile unsigned short *addr, unsigned short val)
101 {
102   pthread_mutex_lock(&AO_pt_lock);
103   *addr = val;
104   pthread_mutex_unlock(&AO_pt_lock);
105 }
106 #define AO_HAVE_short_store_full
107 
108 AO_INLINE unsigned int
AO_int_load_full(const volatile unsigned int * addr)109 AO_int_load_full(const volatile unsigned int *addr)
110 {
111   unsigned int result;
112   pthread_mutex_lock(&AO_pt_lock);
113   result = *addr;
114   pthread_mutex_unlock(&AO_pt_lock);
115   return result;
116 }
117 #define AO_HAVE_int_load_full
118 
119 AO_INLINE void
AO_int_store_full(volatile unsigned int * addr,unsigned int val)120 AO_int_store_full(volatile unsigned int *addr, unsigned int val)
121 {
122   pthread_mutex_lock(&AO_pt_lock);
123   *addr = val;
124   pthread_mutex_unlock(&AO_pt_lock);
125 }
126 #define AO_HAVE_int_store_full
127 
128 AO_INLINE AO_TS_VAL_t
AO_test_and_set_full(volatile AO_TS_t * addr)129 AO_test_and_set_full(volatile AO_TS_t *addr)
130 {
131   AO_TS_VAL_t result;
132   pthread_mutex_lock(&AO_pt_lock);
133   result = (AO_TS_VAL_t)(*addr);
134   *addr = AO_TS_SET;
135   pthread_mutex_unlock(&AO_pt_lock);
136   assert(result == AO_TS_SET || result == AO_TS_CLEAR);
137   return result;
138 }
139 #define AO_HAVE_test_and_set_full
140 
141 AO_INLINE AO_t
AO_fetch_and_add_full(volatile AO_t * p,AO_t incr)142 AO_fetch_and_add_full(volatile AO_t *p, AO_t incr)
143 {
144   AO_t old_val;
145 
146   pthread_mutex_lock(&AO_pt_lock);
147   old_val = *p;
148   *p = old_val + incr;
149   pthread_mutex_unlock(&AO_pt_lock);
150   return old_val;
151 }
152 #define AO_HAVE_fetch_and_add_full
153 
154 AO_INLINE unsigned char
AO_char_fetch_and_add_full(volatile unsigned char * p,unsigned char incr)155 AO_char_fetch_and_add_full(volatile unsigned char *p, unsigned char incr)
156 {
157   unsigned char old_val;
158 
159   pthread_mutex_lock(&AO_pt_lock);
160   old_val = *p;
161   *p = old_val + incr;
162   pthread_mutex_unlock(&AO_pt_lock);
163   return old_val;
164 }
165 #define AO_HAVE_char_fetch_and_add_full
166 
167 AO_INLINE unsigned short
AO_short_fetch_and_add_full(volatile unsigned short * p,unsigned short incr)168 AO_short_fetch_and_add_full(volatile unsigned short *p, unsigned short incr)
169 {
170   unsigned short old_val;
171 
172   pthread_mutex_lock(&AO_pt_lock);
173   old_val = *p;
174   *p = old_val + incr;
175   pthread_mutex_unlock(&AO_pt_lock);
176   return old_val;
177 }
178 #define AO_HAVE_short_fetch_and_add_full
179 
180 AO_INLINE unsigned int
AO_int_fetch_and_add_full(volatile unsigned int * p,unsigned int incr)181 AO_int_fetch_and_add_full(volatile unsigned int *p, unsigned int incr)
182 {
183   unsigned int old_val;
184 
185   pthread_mutex_lock(&AO_pt_lock);
186   old_val = *p;
187   *p = old_val + incr;
188   pthread_mutex_unlock(&AO_pt_lock);
189   return old_val;
190 }
191 #define AO_HAVE_int_fetch_and_add_full
192 
193 AO_INLINE void
AO_and_full(volatile AO_t * p,AO_t value)194 AO_and_full(volatile AO_t *p, AO_t value)
195 {
196   pthread_mutex_lock(&AO_pt_lock);
197   *p &= value;
198   pthread_mutex_unlock(&AO_pt_lock);
199 }
200 #define AO_HAVE_and_full
201 
202 AO_INLINE void
AO_or_full(volatile AO_t * p,AO_t value)203 AO_or_full(volatile AO_t *p, AO_t value)
204 {
205   pthread_mutex_lock(&AO_pt_lock);
206   *p |= value;
207   pthread_mutex_unlock(&AO_pt_lock);
208 }
209 #define AO_HAVE_or_full
210 
211 AO_INLINE void
AO_xor_full(volatile AO_t * p,AO_t value)212 AO_xor_full(volatile AO_t *p, AO_t value)
213 {
214   pthread_mutex_lock(&AO_pt_lock);
215   *p ^= value;
216   pthread_mutex_unlock(&AO_pt_lock);
217 }
218 #define AO_HAVE_xor_full
219 
220 AO_INLINE void
AO_char_and_full(volatile unsigned char * p,unsigned char value)221 AO_char_and_full(volatile unsigned char *p, unsigned char value)
222 {
223   pthread_mutex_lock(&AO_pt_lock);
224   *p &= value;
225   pthread_mutex_unlock(&AO_pt_lock);
226 }
227 #define AO_HAVE_char_and_full
228 
229 AO_INLINE void
AO_char_or_full(volatile unsigned char * p,unsigned char value)230 AO_char_or_full(volatile unsigned char *p, unsigned char value)
231 {
232   pthread_mutex_lock(&AO_pt_lock);
233   *p |= value;
234   pthread_mutex_unlock(&AO_pt_lock);
235 }
236 #define AO_HAVE_char_or_full
237 
238 AO_INLINE void
AO_char_xor_full(volatile unsigned char * p,unsigned char value)239 AO_char_xor_full(volatile unsigned char *p, unsigned char value)
240 {
241   pthread_mutex_lock(&AO_pt_lock);
242   *p ^= value;
243   pthread_mutex_unlock(&AO_pt_lock);
244 }
245 #define AO_HAVE_char_xor_full
246 
247 AO_INLINE void
AO_short_and_full(volatile unsigned short * p,unsigned short value)248 AO_short_and_full(volatile unsigned short *p, unsigned short value)
249 {
250   pthread_mutex_lock(&AO_pt_lock);
251   *p &= value;
252   pthread_mutex_unlock(&AO_pt_lock);
253 }
254 #define AO_HAVE_short_and_full
255 
256 AO_INLINE void
AO_short_or_full(volatile unsigned short * p,unsigned short value)257 AO_short_or_full(volatile unsigned short *p, unsigned short value)
258 {
259   pthread_mutex_lock(&AO_pt_lock);
260   *p |= value;
261   pthread_mutex_unlock(&AO_pt_lock);
262 }
263 #define AO_HAVE_short_or_full
264 
265 AO_INLINE void
AO_short_xor_full(volatile unsigned short * p,unsigned short value)266 AO_short_xor_full(volatile unsigned short *p, unsigned short value)
267 {
268   pthread_mutex_lock(&AO_pt_lock);
269   *p ^= value;
270   pthread_mutex_unlock(&AO_pt_lock);
271 }
272 #define AO_HAVE_short_xor_full
273 
274 AO_INLINE void
AO_int_and_full(volatile unsigned * p,unsigned value)275 AO_int_and_full(volatile unsigned *p, unsigned value)
276 {
277   pthread_mutex_lock(&AO_pt_lock);
278   *p &= value;
279   pthread_mutex_unlock(&AO_pt_lock);
280 }
281 #define AO_HAVE_int_and_full
282 
283 AO_INLINE void
AO_int_or_full(volatile unsigned * p,unsigned value)284 AO_int_or_full(volatile unsigned *p, unsigned value)
285 {
286   pthread_mutex_lock(&AO_pt_lock);
287   *p |= value;
288   pthread_mutex_unlock(&AO_pt_lock);
289 }
290 #define AO_HAVE_int_or_full
291 
292 AO_INLINE void
AO_int_xor_full(volatile unsigned * p,unsigned value)293 AO_int_xor_full(volatile unsigned *p, unsigned value)
294 {
295   pthread_mutex_lock(&AO_pt_lock);
296   *p ^= value;
297   pthread_mutex_unlock(&AO_pt_lock);
298 }
299 #define AO_HAVE_int_xor_full
300 
301 AO_INLINE AO_t
AO_fetch_compare_and_swap_full(volatile AO_t * addr,AO_t old_val,AO_t new_val)302 AO_fetch_compare_and_swap_full(volatile AO_t *addr, AO_t old_val,
303                                AO_t new_val)
304 {
305   AO_t fetched_val;
306 
307   pthread_mutex_lock(&AO_pt_lock);
308   fetched_val = *addr;
309   if (fetched_val == old_val)
310     *addr = new_val;
311   pthread_mutex_unlock(&AO_pt_lock);
312   return fetched_val;
313 }
314 #define AO_HAVE_fetch_compare_and_swap_full
315 
316 AO_INLINE unsigned char
AO_char_fetch_compare_and_swap_full(volatile unsigned char * addr,unsigned char old_val,unsigned char new_val)317 AO_char_fetch_compare_and_swap_full(volatile unsigned char *addr,
318                                     unsigned char old_val,
319                                     unsigned char new_val)
320 {
321   unsigned char fetched_val;
322 
323   pthread_mutex_lock(&AO_pt_lock);
324   fetched_val = *addr;
325   if (fetched_val == old_val)
326     *addr = new_val;
327   pthread_mutex_unlock(&AO_pt_lock);
328   return fetched_val;
329 }
330 #define AO_HAVE_char_fetch_compare_and_swap_full
331 
332 AO_INLINE unsigned short
AO_short_fetch_compare_and_swap_full(volatile unsigned short * addr,unsigned short old_val,unsigned short new_val)333 AO_short_fetch_compare_and_swap_full(volatile unsigned short *addr,
334                                      unsigned short old_val,
335                                      unsigned short new_val)
336 {
337   unsigned short fetched_val;
338 
339   pthread_mutex_lock(&AO_pt_lock);
340   fetched_val = *addr;
341   if (fetched_val == old_val)
342     *addr = new_val;
343   pthread_mutex_unlock(&AO_pt_lock);
344   return fetched_val;
345 }
346 #define AO_HAVE_short_fetch_compare_and_swap_full
347 
348 AO_INLINE unsigned
AO_int_fetch_compare_and_swap_full(volatile unsigned * addr,unsigned old_val,unsigned new_val)349 AO_int_fetch_compare_and_swap_full(volatile unsigned *addr, unsigned old_val,
350                                    unsigned new_val)
351 {
352   unsigned fetched_val;
353 
354   pthread_mutex_lock(&AO_pt_lock);
355   fetched_val = *addr;
356   if (fetched_val == old_val)
357     *addr = new_val;
358   pthread_mutex_unlock(&AO_pt_lock);
359   return fetched_val;
360 }
361 #define AO_HAVE_int_fetch_compare_and_swap_full
362 
363 /* Unlike real architectures, we define both double-width CAS variants. */
364 
365 typedef struct {
366         AO_t AO_val1;
367         AO_t AO_val2;
368 } AO_double_t;
369 #define AO_HAVE_double_t
370 
371 #define AO_DOUBLE_T_INITIALIZER { (AO_t)0, (AO_t)0 }
372 
373 AO_INLINE AO_double_t
AO_double_load_full(const volatile AO_double_t * addr)374 AO_double_load_full(const volatile AO_double_t *addr)
375 {
376   AO_double_t result;
377 
378   pthread_mutex_lock(&AO_pt_lock);
379   result.AO_val1 = addr->AO_val1;
380   result.AO_val2 = addr->AO_val2;
381   pthread_mutex_unlock(&AO_pt_lock);
382   return result;
383 }
384 #define AO_HAVE_double_load_full
385 
386 AO_INLINE void
AO_double_store_full(volatile AO_double_t * addr,AO_double_t value)387 AO_double_store_full(volatile AO_double_t *addr, AO_double_t value)
388 {
389   pthread_mutex_lock(&AO_pt_lock);
390   addr->AO_val1 = value.AO_val1;
391   addr->AO_val2 = value.AO_val2;
392   pthread_mutex_unlock(&AO_pt_lock);
393 }
394 #define AO_HAVE_double_store_full
395 
396 AO_INLINE int
AO_compare_double_and_swap_double_full(volatile AO_double_t * addr,AO_t old1,AO_t old2,AO_t new1,AO_t new2)397 AO_compare_double_and_swap_double_full(volatile AO_double_t *addr,
398                                        AO_t old1, AO_t old2,
399                                        AO_t new1, AO_t new2)
400 {
401   pthread_mutex_lock(&AO_pt_lock);
402   if (addr -> AO_val1 == old1 && addr -> AO_val2 == old2)
403     {
404       addr -> AO_val1 = new1;
405       addr -> AO_val2 = new2;
406       pthread_mutex_unlock(&AO_pt_lock);
407       return 1;
408     }
409   else
410     pthread_mutex_unlock(&AO_pt_lock);
411   return 0;
412 }
413 #define AO_HAVE_compare_double_and_swap_double_full
414 
415 AO_INLINE int
AO_compare_and_swap_double_full(volatile AO_double_t * addr,AO_t old1,AO_t new1,AO_t new2)416 AO_compare_and_swap_double_full(volatile AO_double_t *addr,
417                                 AO_t old1, AO_t new1, AO_t new2)
418 {
419   pthread_mutex_lock(&AO_pt_lock);
420   if (addr -> AO_val1 == old1)
421     {
422       addr -> AO_val1 = new1;
423       addr -> AO_val2 = new2;
424       pthread_mutex_unlock(&AO_pt_lock);
425       return 1;
426     }
427   else
428     pthread_mutex_unlock(&AO_pt_lock);
429   return 0;
430 }
431 #define AO_HAVE_compare_and_swap_double_full
432 
433 /* We can't use hardware loads and stores, since they don't     */
434 /* interact correctly with atomic updates.                      */
435