1 /*
2  * SPDX-License-Identifier: BSD-2-Clause
3  *
4  * Copyright (c) 2019 Western Digital Corporation or its affiliates.
5  *
6  * Authors:
7  *   Anup Patel <anup.patel@wdc.com>
8  */
9 
10 #include <sbi/sbi_types.h>
11 #include <sbi/riscv_asm.h>
12 #include <sbi/riscv_atomic.h>
13 #include <sbi/riscv_barrier.h>
14 #include <sbi/sbi_bits.h>
15 
atomic_read(atomic_t * atom)16 long atomic_read(atomic_t *atom)
17 {
18 	long ret = atom->counter;
19 	rmb();
20 	return ret;
21 }
22 
atomic_write(atomic_t * atom,long value)23 void atomic_write(atomic_t *atom, long value)
24 {
25 	atom->counter = value;
26 	wmb();
27 }
28 
atomic_add_return(atomic_t * atom,long value)29 long atomic_add_return(atomic_t *atom, long value)
30 {
31 	long ret;
32 
33 	__asm__ __volatile__("	amoadd.w.aqrl  %1, %2, %0"
34 			     : "+A"(atom->counter), "=r"(ret)
35 			     : "r"(value)
36 			     : "memory");
37 
38 	return ret + value;
39 }
40 
atomic_sub_return(atomic_t * atom,long value)41 long atomic_sub_return(atomic_t *atom, long value)
42 {
43 	long ret;
44 
45 	__asm__ __volatile__("	amoadd.w.aqrl  %1, %2, %0"
46 			     : "+A"(atom->counter), "=r"(ret)
47 			     : "r"(-value)
48 			     : "memory");
49 
50 	return ret - value;
51 }
52 
53 #define __axchg(ptr, new, size)						\
54 	({									\
55 		__typeof__(ptr) __ptr = (ptr);					\
56 		__typeof__(new) __new = (new);					\
57 		__typeof__(*(ptr)) __ret;					\
58 		switch (size) {							\
59 		case 4:								\
60 			__asm__ __volatile__ (					\
61 				"	amoswap.w.aqrl %0, %2, %1\n"		\
62 				: "=r" (__ret), "+A" (*__ptr)			\
63 				: "r" (__new)					\
64 				: "memory");					\
65 			break;							\
66 		case 8:								\
67 			__asm__ __volatile__ (					\
68 				"	amoswap.d.aqrl %0, %2, %1\n"		\
69 				: "=r" (__ret), "+A" (*__ptr)			\
70 				: "r" (__new)					\
71 				: "memory");					\
72 			break;							\
73 		default:							\
74 			break;						\
75 		}								\
76 		__ret;								\
77 	})
78 
79 #define axchg(ptr, x)							\
80 	({									\
81 		__typeof__(*(ptr)) _x_ = (x);					\
82 		(__typeof__(*(ptr))) __xchg((ptr), _x_, sizeof(*(ptr)));	\
83 	})
84 
85 
86 #define __xchg(ptr, new, size)                                            \
87 	({                                                                \
88 		__typeof__(ptr) __ptr	 = (ptr);                         \
89 		__typeof__(*(ptr)) __new = (new);                         \
90 		__typeof__(*(ptr)) __ret;                                 \
91 		register unsigned int __rc;                               \
92 		switch (size) {                                           \
93 		case 4:                                                   \
94 			__asm__ __volatile__("0:	lr.w %0, %2\n"           \
95 					     "	sc.w.rl %1, %z3, %2\n"     \
96 					     "	bnez %1, 0b\n"             \
97 					     "	fence rw, rw\n"            \
98 					     : "=&r"(__ret), "=&r"(__rc), \
99 					       "+A"(*__ptr)               \
100 					     : "rJ"(__new)                \
101 					     : "memory");                 \
102 			break;                                            \
103 		case 8:                                                   \
104 			__asm__ __volatile__("0:	lr.d %0, %2\n"           \
105 					     "	sc.d.rl %1, %z3, %2\n"     \
106 					     "	bnez %1, 0b\n"             \
107 					     "	fence rw, rw\n"            \
108 					     : "=&r"(__ret), "=&r"(__rc), \
109 					       "+A"(*__ptr)               \
110 					     : "rJ"(__new)                \
111 					     : "memory");                 \
112 			break;                                            \
113 		default:                                                  \
114 			break;                                            \
115 		}                                                         \
116 		__ret;                                                    \
117 	})
118 
119 #define xchg(ptr, n)                                                     \
120 	({                                                               \
121 		__typeof__(*(ptr)) _n_ = (n);                            \
122 		(__typeof__(*(ptr))) __xchg((ptr), _n_, sizeof(*(ptr))); \
123 	})
124 
125 #define __cmpxchg(ptr, old, new, size)                                    \
126 	({                                                                \
127 		__typeof__(ptr) __ptr	 = (ptr);                         \
128 		__typeof__(*(ptr)) __old = (old);                         \
129 		__typeof__(*(ptr)) __new = (new);                         \
130 		__typeof__(*(ptr)) __ret;                                 \
131 		register unsigned int __rc;                               \
132 		switch (size) {                                           \
133 		case 4:                                                   \
134 			__asm__ __volatile__("0:	lr.w %0, %2\n"           \
135 					     "	bne  %0, %z3, 1f\n"        \
136 					     "	sc.w.rl %1, %z4, %2\n"     \
137 					     "	bnez %1, 0b\n"             \
138 					     "	fence rw, rw\n"            \
139 					     "1:\n"                       \
140 					     : "=&r"(__ret), "=&r"(__rc), \
141 					       "+A"(*__ptr)               \
142 					     : "rJ"(__old), "rJ"(__new)   \
143 					     : "memory");                 \
144 			break;                                            \
145 		case 8:                                                   \
146 			__asm__ __volatile__("0:	lr.d %0, %2\n"           \
147 					     "	bne %0, %z3, 1f\n"         \
148 					     "	sc.d.rl %1, %z4, %2\n"     \
149 					     "	bnez %1, 0b\n"             \
150 					     "	fence rw, rw\n"            \
151 					     "1:\n"                       \
152 					     : "=&r"(__ret), "=&r"(__rc), \
153 					       "+A"(*__ptr)               \
154 					     : "rJ"(__old), "rJ"(__new)   \
155 					     : "memory");                 \
156 			break;                                            \
157 		default:                                                  \
158 			break;                                            \
159 		}                                                         \
160 		__ret;                                                    \
161 	})
162 
163 #define cmpxchg(ptr, o, n)                                          \
164 	({                                                          \
165 		__typeof__(*(ptr)) _o_ = (o);                       \
166 		__typeof__(*(ptr)) _n_ = (n);                       \
167 		(__typeof__(*(ptr)))                                \
168 			__cmpxchg((ptr), _o_, _n_, sizeof(*(ptr))); \
169 	})
170 
arch_atomic_cmpxchg(atomic_t * atom,long oldval,long newval)171 long arch_atomic_cmpxchg(atomic_t *atom, long oldval, long newval)
172 {
173 #ifdef __riscv_atomic
174 	return __sync_val_compare_and_swap(&atom->counter, oldval, newval);
175 #else
176 	return cmpxchg(&atom->counter, oldval, newval);
177 #endif
178 }
179 
arch_atomic_xchg(atomic_t * atom,long newval)180 long arch_atomic_xchg(atomic_t *atom, long newval)
181 {
182 	/* Atomically set new value and return old value. */
183 #ifdef __riscv_atomic
184 	return axchg(&atom->counter, newval);
185 #else
186 	return xchg(&atom->counter, newval);
187 #endif
188 }
189 
atomic_raw_xchg_uint(volatile unsigned int * ptr,unsigned int newval)190 unsigned int atomic_raw_xchg_uint(volatile unsigned int *ptr,
191 				  unsigned int newval)
192 {
193 	/* Atomically set new value and return old value. */
194 #ifdef __riscv_atomic
195 	return axchg(ptr, newval);
196 #else
197 	return xchg(ptr, newval);
198 #endif
199 }
200 
atomic_raw_xchg_ulong(volatile unsigned long * ptr,unsigned long newval)201 unsigned long atomic_raw_xchg_ulong(volatile unsigned long *ptr,
202 				    unsigned long newval)
203 {
204 	/* Atomically set new value and return old value. */
205 #ifdef __riscv_atomic
206 	return axchg(ptr, newval);
207 #else
208 	return xchg(ptr, newval);
209 #endif
210 }
211 
212 #if (BITS_PER_LONG == 64)
213 #define __AMO(op) "amo" #op ".d"
214 #elif (BITS_PER_LONG == 32)
215 #define __AMO(op) "amo" #op ".w"
216 #else
217 #error "Unexpected BITS_PER_LONG"
218 #endif
219 
220 #define __atomic_op_bit_ord(op, mod, nr, addr, ord)                          \
221 	({                                                                   \
222 		unsigned long __res, __mask;                                 \
223 		__mask = BIT_MASK(nr);                                       \
224 		__asm__ __volatile__(__AMO(op) #ord " %0, %2, %1"            \
225 				     : "=r"(__res), "+A"(addr[BIT_WORD(nr)]) \
226 				     : "r"(mod(__mask))                      \
227 				     : "memory");                            \
228 		__res;                                                       \
229 	})
230 
231 #define __atomic_op_bit(op, mod, nr, addr) \
232 	__atomic_op_bit_ord(op, mod, nr, addr, .aqrl)
233 
234 /* Bitmask modifiers */
235 #define __NOP(x) (x)
236 #define __NOT(x) (~(x))
237 
atomic_raw_set_bit(int nr,volatile unsigned long * addr)238 inline int atomic_raw_set_bit(int nr, volatile unsigned long *addr)
239 {
240 	return __atomic_op_bit(or, __NOP, nr, addr);
241 }
242 
atomic_raw_clear_bit(int nr,volatile unsigned long * addr)243 inline int atomic_raw_clear_bit(int nr, volatile unsigned long *addr)
244 {
245 	return __atomic_op_bit(and, __NOT, nr, addr);
246 }
247 
atomic_set_bit(int nr,atomic_t * atom)248 inline int atomic_set_bit(int nr, atomic_t *atom)
249 {
250 	return atomic_raw_set_bit(nr, (unsigned long *)&atom->counter);
251 }
252 
atomic_clear_bit(int nr,atomic_t * atom)253 inline int atomic_clear_bit(int nr, atomic_t *atom)
254 {
255 	return atomic_raw_clear_bit(nr, (unsigned long *)&atom->counter);
256 }
257