1 /* -*- Mode: C; c-basic-offset:4 ; indent-tabs-mode:nil -*- */
2 /*
3  * Copyright (c) 2004-2005 The Trustees of Indiana University and Indiana
4  *                         University Research and Technology
5  *                         Corporation.  All rights reserved.
6  * Copyright (c) 2004-2005 The University of Tennessee and The University
7  *                         of Tennessee Research Foundation.  All rights
8  *                         reserved.
9  * Copyright (c) 2004-2005 High Performance Computing Center Stuttgart,
10  *                         University of Stuttgart.  All rights reserved.
11  * Copyright (c) 2004-2005 The Regents of the University of California.
12  *                         All rights reserved.
13  * Copyright (c) 2007      Sun Microsystems, Inc.  All rights reserverd.
14  * Copyright (c) 2016      Research Organization for Information Science
15  *                         and Technology (RIST). All rights reserved.
16  * Copyright (c) 2017-2018 Los Alamos National Security, LLC. All rights
17  *                         reserved.
18  * Copyright (c) 2018-2019 Intel, Inc.  All rights reserved.
19  * $COPYRIGHT$
20  *
21  * Additional copyrights may follow
22  *
23  * $HEADER$
24  */
25 
26 #ifndef PMIX_SYS_ARCH_ATOMIC_H
27 #define PMIX_SYS_ARCH_ATOMIC_H 1
28 
29 /*
30  * On sparc v9, use casa and casxa (compare and swap) instructions.
31  */
32 
33 #define ASI_P "0x80"
34 
35 #define MEPMIXMBAR(type) __asm__  __volatile__ ("membar " type : : : "memory")
36 
37 
38 /**********************************************************************
39  *
40  * Define constants for Sparc v9 (Ultra Sparc)
41  *
42  *********************************************************************/
43 #define PMIX_HAVE_ATOMIC_MEM_BARRIER 1
44 
45 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_32 1
46 
47 #define PMIX_HAVE_ATOMIC_COMPARE_EXCHANGE_64 1
48 
49 
50 /**********************************************************************
51  *
52  * Memory Barriers
53  *
54  *********************************************************************/
55 #if PMIX_GCC_INLINE_ASSEMBLY
56 
pmix_atomic_mb(void)57 static inline void pmix_atomic_mb(void)
58 {
59     MEPMIXMBAR("#LoadLoad | #LoadStore | #StoreStore | #StoreLoad");
60 }
61 
62 
pmix_atomic_rmb(void)63 static inline void pmix_atomic_rmb(void)
64 {
65     MEPMIXMBAR("#LoadLoad");
66 }
67 
68 
pmix_atomic_wmb(void)69 static inline void pmix_atomic_wmb(void)
70 {
71     MEPMIXMBAR("#StoreStore");
72 }
73 
pmix_atomic_isync(void)74 static inline void pmix_atomic_isync(void)
75 {
76 }
77 
78 
79 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
80 
81 
82 /**********************************************************************
83  *
84  * Atomic math operations
85  *
86  *********************************************************************/
87 #if PMIX_GCC_INLINE_ASSEMBLY
88 
pmix_atomic_compare_exchange_strong_32(pmix_atomic_int32_t * addr,int32_t * oldval,int32_t newval)89 static inline bool pmix_atomic_compare_exchange_strong_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
90 {
91     /* casa [reg(rs1)] %asi, reg(rs2), reg(rd)
92      *
93      * if (*(reg(rs1)) == reg(rs2) )
94      *    swap reg(rd), *(reg(rs1))
95      * else
96      *    reg(rd) = *(reg(rs1))
97      */
98 
99     int32_t prev = newval;
100     bool ret;
101 
102     __asm__ __volatile__("casa [%1] " ASI_P ", %2, %0"
103                          : "+r" (prev)
104                          : "r" (addr), "r" (*oldval));
105     ret = (prev == *oldval);
106     *oldval = prev;
107     return ret;
108 }
109 
110 
pmix_atomic_compare_exchange_strong_acq_32(pmix_atomic_int32_t * addr,int32_t * oldval,int32_t newval)111 static inline bool pmix_atomic_compare_exchange_strong_acq_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
112 {
113     bool rc;
114 
115     rc = pmix_atomic_compare_exchange_strong_32 (addr, oldval, newval);
116     pmix_atomic_rmb();
117 
118     return rc;
119 }
120 
121 
pmix_atomic_compare_exchange_strong_rel_32(pmix_atomic_int32_t * addr,int32_t * oldval,int32_t newval)122 static inline bool pmix_atomic_compare_exchange_strong_rel_32 (pmix_atomic_int32_t *addr, int32_t *oldval, int32_t newval)
123 {
124     pmix_atomic_wmb();
125     return pmix_atomic_compare_exchange_strong_32 (addr, oldval, newval);
126 }
127 
128 
129 #if PMIX_ASSEMBLY_ARCH == PMIX_SPARCV9_64
130 
pmix_atomic_compare_exchange_strong_64(pmix_atomic_int64_t * addr,int64_t * oldval,int64_t newval)131 static inline bool pmix_atomic_compare_exchange_strong_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
132 {
133     /* casa [reg(rs1)] %asi, reg(rs2), reg(rd)
134      *
135      * if (*(reg(rs1)) == reg(rs1) )
136      *    swap reg(rd), *(reg(rs1))
137      * else
138      *    reg(rd) = *(reg(rs1))
139      */
140     int64_t prev = newval;
141     bool ret;
142 
143     __asm__ __volatile__("casxa [%1] " ASI_P ", %2, %0"
144                          : "+r" (prev)
145                          : "r" (addr), "r" (*oldval));
146     ret = (prev == *oldval);
147     *oldval = prev;
148     return ret;
149 }
150 
151 #else /* PMIX_ASSEMBLY_ARCH == PMIX_SPARCV9_64 */
152 
pmix_atomic_compare_exchange_strong_64(pmix_atomic_int64_t * addr,int64_t * oldval,int64_t newval)153 static inline bool pmix_atomic_compare_exchange_strong_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
154 {
155     /* casa [reg(rs1)] %asi, reg(rs2), reg(rd)
156      *
157      * if (*(reg(rs1)) == reg(rs1) )
158      *    swap reg(rd), *(reg(rs1))
159      * else
160      *    reg(rd) = *(reg(rs1))
161      *
162      */
163     int64_t prev = newval;
164     bool ret;
165 
166     __asm__ __volatile__(
167                        "ldx %0, %%g1               \n\t" /* g1 = ret */
168                        "ldx %2, %%g2               \n\t" /* g2 = oldval */
169                        "casxa [%1] " ASI_P ", %%g2, %%g1 \n\t"
170                        "stx %%g1, %0               \n"
171                        : "+m"(prev)
172                        : "r"(addr), "m"(*oldval)
173                        : "%g1", "%g2"
174                        );
175 
176    ret = (prev == *oldval);
177    *oldval = prev;
178    return ret;
179 }
180 
181 #endif /* PMIX_ASSEMBLY_ARCH == PMIX_SPARCV9_64 */
182 
pmix_atomic_compare_exchange_strong_acq_64(pmix_atomic_int64_t * addr,int64_t * oldval,int64_t newval)183 static inline bool pmix_atomic_compare_exchange_strong_acq_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
184 {
185     bool rc;
186 
187     rc = pmix_atomic_compare_exchange_strong_64 (addr, oldval, newval);
188     pmix_atomic_rmb();
189 
190     return rc;
191 }
192 
193 
pmix_atomic_compare_exchange_strong_rel_64(pmix_atomic_int64_t * addr,int64_t * oldval,int64_t newval)194 static inline bool pmix_atomic_compare_exchange_strong_rel_64 (pmix_atomic_int64_t *addr, int64_t *oldval, int64_t newval)
195 {
196     pmix_atomic_wmb();
197     return pmix_atomic_compare_exchange_strong_64 (addr, oldval, newval);
198 }
199 
200 #endif /* PMIX_GCC_INLINE_ASSEMBLY */
201 
202 
203 #endif /* ! PMIX_SYS_ARCH_ATOMIC_H */
204