1 //===-- sanitizer_atomic_clang_mips.h ---------------------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file is a part of ThreadSanitizer/AddressSanitizer runtime.
10 // Not intended for direct inclusion. Include sanitizer_atomic.h.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef SANITIZER_ATOMIC_CLANG_MIPS_H
15 #define SANITIZER_ATOMIC_CLANG_MIPS_H
16 
17 namespace __sanitizer {
18 
19 // MIPS32 does not support atomics > 4 bytes. To address this lack of
20 // functionality, the sanitizer library provides helper methods which use an
21 // internal spin lock mechanism to emulate atomic oprations when the size is
22 // 8 bytes.
23 static void __spin_lock(volatile int *lock) {
24   while (__sync_lock_test_and_set(lock, 1))
25     while (*lock) {
26     }
27 }
28 
29 static void __spin_unlock(volatile int *lock) { __sync_lock_release(lock); }
30 
31 // Make sure the lock is on its own cache line to prevent false sharing.
32 // Put it inside a struct that is aligned and padded to the typical MIPS
33 // cacheline which is 32 bytes.
34 static struct {
35   int lock;
36   char pad[32 - sizeof(int)];
37 } __attribute__((aligned(32))) lock = {0, {0}};
38 
39 template <>
40 inline atomic_uint64_t::Type atomic_fetch_add(volatile atomic_uint64_t *ptr,
41                                               atomic_uint64_t::Type val,
42                                               memory_order mo) {
43   DCHECK(mo &
44          (memory_order_relaxed | memory_order_releasae | memory_order_seq_cst));
45   DCHECK(!((uptr)ptr % sizeof(*ptr)));
46 
47   atomic_uint64_t::Type ret;
48 
49   __spin_lock(&lock.lock);
50   ret = *(const_cast<atomic_uint64_t::Type volatile *>(&ptr->val_dont_use));
51   ptr->val_dont_use = ret + val;
52   __spin_unlock(&lock.lock);
53 
54   return ret;
55 }
56 
57 template <>
58 inline atomic_uint64_t::Type atomic_fetch_sub(volatile atomic_uint64_t *ptr,
59                                               atomic_uint64_t::Type val,
60                                               memory_order mo) {
61   return atomic_fetch_add(ptr, -val, mo);
62 }
63 
64 template <>
65 inline bool atomic_compare_exchange_strong(volatile atomic_uint64_t *ptr,
66                                            atomic_uint64_t::Type *cmp,
67                                            atomic_uint64_t::Type xchg,
68                                            memory_order mo) {
69   DCHECK(mo &
70          (memory_order_relaxed | memory_order_releasae | memory_order_seq_cst));
71   DCHECK(!((uptr)ptr % sizeof(*ptr)));
72 
73   typedef atomic_uint64_t::Type Type;
74   Type cmpv = *cmp;
75   Type prev;
76   bool ret = false;
77 
78   __spin_lock(&lock.lock);
79   prev = *(const_cast<Type volatile *>(&ptr->val_dont_use));
80   if (prev == cmpv) {
81     ret = true;
82     ptr->val_dont_use = xchg;
83   }
84   __spin_unlock(&lock.lock);
85 
86   return ret;
87 }
88 
89 template <>
90 inline atomic_uint64_t::Type atomic_load(const volatile atomic_uint64_t *ptr,
91                                          memory_order mo) {
92   DCHECK(mo &
93          (memory_order_relaxed | memory_order_releasae | memory_order_seq_cst));
94   DCHECK(!((uptr)ptr % sizeof(*ptr)));
95 
96   atomic_uint64_t::Type zero = 0;
97   volatile atomic_uint64_t *Newptr =
98       const_cast<volatile atomic_uint64_t *>(ptr);
99   return atomic_fetch_add(Newptr, zero, mo);
100 }
101 
102 template <>
103 inline void atomic_store(volatile atomic_uint64_t *ptr, atomic_uint64_t::Type v,
104                          memory_order mo) {
105   DCHECK(mo &
106          (memory_order_relaxed | memory_order_releasae | memory_order_seq_cst));
107   DCHECK(!((uptr)ptr % sizeof(*ptr)));
108 
109   __spin_lock(&lock.lock);
110   ptr->val_dont_use = v;
111   __spin_unlock(&lock.lock);
112 }
113 
114 }  // namespace __sanitizer
115 
116 #endif  // SANITIZER_ATOMIC_CLANG_MIPS_H
117 
118