1 #ifndef GCC_ATOMIC_INCLUDED
2 #define GCC_ATOMIC_INCLUDED
3 
4 /* Copyright (c) 2016, 2021, Oracle and/or its affiliates.
5 
6    This program is free software; you can redistribute it and/or modify
7    it under the terms of the GNU General Public License, version 2.0,
8    as published by the Free Software Foundation.
9 
10    This program is also distributed with certain software (including
11    but not limited to OpenSSL) that is licensed under separate terms,
12    as designated in a particular file or component or in included license
13    documentation.  The authors of MySQL hereby grant you an additional
14    permission to link the program and your derivative works with the
15    separately licensed software that they have included with MySQL.
16 
17    This program is distributed in the hope that it will be useful,
18    but WITHOUT ANY WARRANTY; without even the implied warranty of
19    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
20    GNU General Public License, version 2.0, for more details.
21 
22    You should have received a copy of the GNU General Public License
23    along with this program; if not, write to the Free Software
24    Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301  USA */
25 
26 /* New GCC __atomic builtins introduced in GCC 4.7 */
27 
my_atomic_cas32(int32 volatile * a,int32 * cmp,int32 set)28 static inline int my_atomic_cas32(int32 volatile *a, int32 *cmp, int32 set)
29 {
30   return __atomic_compare_exchange_n(a, cmp, set, 0,
31                                      __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
32 }
33 
my_atomic_cas64(int64 volatile * a,int64 * cmp,int64 set)34 static inline int my_atomic_cas64(int64 volatile *a, int64 *cmp, int64 set)
35 {
36   return __atomic_compare_exchange_n(a, cmp, set, 0,
37                                      __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
38 }
39 
my_atomic_casptr(void * volatile * a,void ** cmp,void * set)40 static inline int my_atomic_casptr(void * volatile *a, void **cmp, void *set)
41 {
42   return __atomic_compare_exchange_n(a, cmp, set, 0,
43                                      __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
44 }
45 
my_atomic_add32(int32 volatile * a,int32 v)46 static inline int32 my_atomic_add32(int32 volatile *a, int32 v)
47 {
48   return __atomic_fetch_add(a, v, __ATOMIC_SEQ_CST);
49 }
50 
my_atomic_add64(int64 volatile * a,int64 v)51 static inline int64 my_atomic_add64(int64 volatile *a, int64 v)
52 {
53   return __atomic_fetch_add(a, v, __ATOMIC_SEQ_CST);
54 }
55 
my_atomic_fas32(int32 volatile * a,int32 v)56 static inline int32 my_atomic_fas32(int32 volatile *a, int32 v)
57 {
58   return __atomic_exchange_n(a, v, __ATOMIC_SEQ_CST);
59 }
60 
my_atomic_fas64(int64 volatile * a,int64 v)61 static inline int64 my_atomic_fas64(int64 volatile *a, int64 v)
62 {
63   return __atomic_exchange_n(a, v, __ATOMIC_SEQ_CST);
64 }
65 
my_atomic_fasptr(void * volatile * a,void * v)66 static inline void * my_atomic_fasptr(void * volatile *a, void * v)
67 {
68   return __atomic_exchange_n(a, v, __ATOMIC_SEQ_CST);
69 }
70 
my_atomic_load32(int32 volatile * a)71 static inline int32 my_atomic_load32(int32 volatile *a)
72 {
73   return __atomic_load_n(a, __ATOMIC_SEQ_CST);
74 }
75 
my_atomic_load64(int64 volatile * a)76 static inline int64 my_atomic_load64(int64 volatile *a)
77 {
78   return __atomic_load_n(a, __ATOMIC_SEQ_CST);
79 }
80 
my_atomic_loadptr(void * volatile * a)81 static inline void* my_atomic_loadptr(void * volatile *a)
82 {
83   return __atomic_load_n(a, __ATOMIC_SEQ_CST);
84 }
85 
my_atomic_store32(int32 volatile * a,int32 v)86 static inline void my_atomic_store32(int32 volatile *a, int32 v)
87 {
88   __atomic_store_n(a, v, __ATOMIC_SEQ_CST);
89 }
90 
my_atomic_store64(int64 volatile * a,int64 v)91 static inline void my_atomic_store64(int64 volatile *a, int64 v)
92 {
93   __atomic_store_n(a, v, __ATOMIC_SEQ_CST);
94 }
95 
my_atomic_storeptr(void * volatile * a,void * v)96 static inline void my_atomic_storeptr(void * volatile *a, void *v)
97 {
98   __atomic_store_n(a, v, __ATOMIC_SEQ_CST);
99 }
100 
101 #endif /* GCC_ATOMIC_INCLUDED */
102