1 // Copyright 2013 Red Hat Inc.  All rights reserved.
2 //
3 // Redistribution and use in source and binary forms, with or without
4 // modification, are permitted provided that the following conditions are
5 // met:
6 //
7 //     * Redistributions of source code must retain the above copyright
8 // notice, this list of conditions and the following disclaimer.
9 //     * Redistributions in binary form must reproduce the above
10 // copyright notice, this list of conditions and the following disclaimer
11 // in the documentation and/or other materials provided with the
12 // distribution.
13 //     * Neither the name of Red Hat Inc. nor the names of its
14 // contributors may be used to endorse or promote products derived from
15 // this software without specific prior written permission.
16 //
17 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 
29 // This file is an internal atomic implementation, use atomicops.h instead.
30 
31 #ifndef GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_GENERIC_GCC_H_
32 #define GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_GENERIC_GCC_H_
33 
34 namespace google {
35 namespace protobuf {
36 namespace internal {
37 
NoBarrier_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)38 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
39                                          Atomic32 old_value,
40                                          Atomic32 new_value) {
41   __atomic_compare_exchange_n(ptr, &old_value, new_value, false,
42                               __ATOMIC_RELAXED, __ATOMIC_RELAXED);
43   return old_value;
44 }
45 
NoBarrier_AtomicExchange(volatile Atomic32 * ptr,Atomic32 new_value)46 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
47                                          Atomic32 new_value) {
48   return __atomic_exchange_n(ptr, new_value, __ATOMIC_RELAXED);
49 }
50 
NoBarrier_AtomicIncrement(volatile Atomic32 * ptr,Atomic32 increment)51 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
52                                           Atomic32 increment) {
53   return __atomic_add_fetch(ptr, increment, __ATOMIC_RELAXED);
54 }
55 
Barrier_AtomicIncrement(volatile Atomic32 * ptr,Atomic32 increment)56 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
57                                         Atomic32 increment) {
58   return __atomic_add_fetch(ptr, increment, __ATOMIC_SEQ_CST);
59 }
60 
Acquire_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)61 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
62                                        Atomic32 old_value,
63                                        Atomic32 new_value) {
64   __atomic_compare_exchange_n(ptr, &old_value, new_value, false,
65                               __ATOMIC_ACQUIRE, __ATOMIC_ACQUIRE);
66   return old_value;
67 }
68 
Release_CompareAndSwap(volatile Atomic32 * ptr,Atomic32 old_value,Atomic32 new_value)69 inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
70                                        Atomic32 old_value,
71                                        Atomic32 new_value) {
72   __atomic_compare_exchange_n(ptr, &old_value, new_value, false,
73                               __ATOMIC_RELEASE, __ATOMIC_ACQUIRE);
74   return old_value;
75 }
76 
NoBarrier_Store(volatile Atomic32 * ptr,Atomic32 value)77 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
78   __atomic_store_n(ptr, value, __ATOMIC_RELAXED);
79 }
80 
MemoryBarrierInternal()81 inline void MemoryBarrierInternal() {
82   __sync_synchronize();
83 }
84 
Acquire_Store(volatile Atomic32 * ptr,Atomic32 value)85 inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
86   __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST);
87 }
88 
Release_Store(volatile Atomic32 * ptr,Atomic32 value)89 inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
90   __atomic_store_n(ptr, value, __ATOMIC_RELEASE);
91 }
92 
NoBarrier_Load(volatile const Atomic32 * ptr)93 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
94   return __atomic_load_n(ptr, __ATOMIC_RELAXED);
95 }
96 
Acquire_Load(volatile const Atomic32 * ptr)97 inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
98   return __atomic_load_n(ptr, __ATOMIC_ACQUIRE);
99 }
100 
Release_Load(volatile const Atomic32 * ptr)101 inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
102   return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
103 }
104 
105 #ifdef __LP64__
106 
Release_Store(volatile Atomic64 * ptr,Atomic64 value)107 inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
108   __atomic_store_n(ptr, value, __ATOMIC_RELEASE);
109 }
110 
Acquire_Load(volatile const Atomic64 * ptr)111 inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
112   return __atomic_load_n(ptr, __ATOMIC_ACQUIRE);
113 }
114 
Acquire_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)115 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
116                                        Atomic64 old_value,
117                                        Atomic64 new_value) {
118   __atomic_compare_exchange_n(ptr, &old_value, new_value, false,
119                               __ATOMIC_ACQUIRE, __ATOMIC_ACQUIRE);
120   return old_value;
121 }
122 
NoBarrier_CompareAndSwap(volatile Atomic64 * ptr,Atomic64 old_value,Atomic64 new_value)123 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
124                                          Atomic64 old_value,
125                                          Atomic64 new_value) {
126   __atomic_compare_exchange_n(ptr, &old_value, new_value, false,
127                               __ATOMIC_RELAXED, __ATOMIC_RELAXED);
128   return old_value;
129 }
130 
NoBarrier_AtomicIncrement(volatile Atomic64 * ptr,Atomic64 increment)131 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
132                                           Atomic64 increment) {
133   return __atomic_add_fetch(ptr, increment, __ATOMIC_RELAXED);
134 }
135 
NoBarrier_Store(volatile Atomic64 * ptr,Atomic64 value)136 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
137   __atomic_store_n(ptr, value, __ATOMIC_RELAXED);
138 }
139 
NoBarrier_AtomicExchange(volatile Atomic64 * ptr,Atomic64 new_value)140 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
141                                          Atomic64 new_value) {
142   return __atomic_exchange_n(ptr, new_value, __ATOMIC_RELAXED);
143 }
144 
NoBarrier_Load(volatile const Atomic64 * ptr)145 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
146   return __atomic_load_n(ptr, __ATOMIC_RELAXED);
147 }
148 
149 #endif // defined(__LP64__)
150 
151 }  // namespace internal
152 }  // namespace protobuf
153 }  // namespace google
154 
155 #endif  // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_GENERIC_GCC_H_
156