1 /* Linux-specific atomic operations for Nios II Linux.
2 Copyright (C) 2008-2019 Free Software Foundation, Inc.
3
4 This file is free software; you can redistribute it and/or modify it
5 under the terms of the GNU General Public License as published by the
6 Free Software Foundation; either version 3, or (at your option) any
7 later version.
8
9 This file is distributed in the hope that it will be useful, but
10 WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 General Public License for more details.
13
14 Under Section 7 of GPL version 3, you are granted additional
15 permissions described in the GCC Runtime Library Exception, version
16 3.1, as published by the Free Software Foundation.
17
18 You should have received a copy of the GNU General Public License and
19 a copy of the GCC Runtime Library Exception along with this program;
20 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
21 <http://www.gnu.org/licenses/>. */
22
23 /* We implement byte, short and int versions of each atomic operation
24 using the kernel helper defined below. There is no support for
25 64-bit operations yet. */
26
27 /* Crash a userspace program with SIGSEV. */
28 #define ABORT_INSTRUCTION asm ("stw zero, 0(zero)")
29
30 /* Kernel helper for compare-and-exchange a 32-bit value. */
31 static inline long
__kernel_cmpxchg(int oldval,int newval,int * mem)32 __kernel_cmpxchg (int oldval, int newval, int *mem)
33 {
34 register int r2 asm ("r2");
35 register int *r4 asm ("r4") = mem;
36 register int r5 asm ("r5") = oldval;
37 register int r6 asm ("r6") = newval;
38
39 /* Call the kernel provided fixed address cmpxchg helper routine. */
40 asm volatile ("movi %0, %4\n\t"
41 "callr %0\n"
42 : "=r" (r2)
43 : "r" (r4), "r" (r5), "r" (r6), "I" (0x00001004)
44 : "ra", "memory");
45 return r2;
46 }
47
48 #define HIDDEN __attribute__ ((visibility ("hidden")))
49
50 #ifdef __nios2_little_endian__
51 #define INVERT_MASK_1 0
52 #define INVERT_MASK_2 0
53 #else
54 #define INVERT_MASK_1 24
55 #define INVERT_MASK_2 16
56 #endif
57
58 #define MASK_1 0xffu
59 #define MASK_2 0xffffu
60
61 #define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP) \
62 int HIDDEN \
63 __sync_fetch_and_##OP##_4 (int *ptr, int val) \
64 { \
65 int failure, tmp; \
66 \
67 do { \
68 tmp = *ptr; \
69 failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr); \
70 } while (failure != 0); \
71 \
72 return tmp; \
73 }
74
75 FETCH_AND_OP_WORD (add, , +)
76 FETCH_AND_OP_WORD (sub, , -)
77 FETCH_AND_OP_WORD (or, , |)
78 FETCH_AND_OP_WORD (and, , &)
79 FETCH_AND_OP_WORD (xor, , ^)
80 FETCH_AND_OP_WORD (nand, ~, &)
81
82 #define NAME_oldval(OP, WIDTH) __sync_fetch_and_##OP##_##WIDTH
83 #define NAME_newval(OP, WIDTH) __sync_##OP##_and_fetch_##WIDTH
84
85 /* Implement both __sync_<op>_and_fetch and __sync_fetch_and_<op> for
86 subword-sized quantities. */
87
88 #define SUBWORD_SYNC_OP(OP, PFX_OP, INF_OP, TYPE, WIDTH, RETURN) \
89 TYPE HIDDEN \
90 NAME##_##RETURN (OP, WIDTH) (TYPE *ptr, TYPE val) \
91 { \
92 int *wordptr = (int *) ((unsigned long) ptr & ~3); \
93 unsigned int mask, shift, oldval, newval; \
94 int failure; \
95 \
96 shift = (((unsigned long) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
97 mask = MASK_##WIDTH << shift; \
98 \
99 do { \
100 oldval = *wordptr; \
101 newval = ((PFX_OP (((oldval & mask) >> shift) \
102 INF_OP (unsigned int) val)) << shift) & mask; \
103 newval |= oldval & ~mask; \
104 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
105 } while (failure != 0); \
106 \
107 return (RETURN & mask) >> shift; \
108 }
109
110 SUBWORD_SYNC_OP (add, , +, unsigned short, 2, oldval)
111 SUBWORD_SYNC_OP (sub, , -, unsigned short, 2, oldval)
112 SUBWORD_SYNC_OP (or, , |, unsigned short, 2, oldval)
113 SUBWORD_SYNC_OP (and, , &, unsigned short, 2, oldval)
114 SUBWORD_SYNC_OP (xor, , ^, unsigned short, 2, oldval)
115 SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, oldval)
116
117 SUBWORD_SYNC_OP (add, , +, unsigned char, 1, oldval)
118 SUBWORD_SYNC_OP (sub, , -, unsigned char, 1, oldval)
119 SUBWORD_SYNC_OP (or, , |, unsigned char, 1, oldval)
120 SUBWORD_SYNC_OP (and, , &, unsigned char, 1, oldval)
121 SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, oldval)
122 SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, oldval)
123
124 #define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP) \
125 int HIDDEN \
126 __sync_##OP##_and_fetch_4 (int *ptr, int val) \
127 { \
128 int tmp, failure; \
129 \
130 do { \
131 tmp = *ptr; \
132 failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr); \
133 } while (failure != 0); \
134 \
135 return PFX_OP (tmp INF_OP val); \
136 }
137
138 OP_AND_FETCH_WORD (add, , +)
139 OP_AND_FETCH_WORD (sub, , -)
140 OP_AND_FETCH_WORD (or, , |)
141 OP_AND_FETCH_WORD (and, , &)
142 OP_AND_FETCH_WORD (xor, , ^)
143 OP_AND_FETCH_WORD (nand, ~, &)
144
145 SUBWORD_SYNC_OP (add, , +, unsigned short, 2, newval)
146 SUBWORD_SYNC_OP (sub, , -, unsigned short, 2, newval)
147 SUBWORD_SYNC_OP (or, , |, unsigned short, 2, newval)
148 SUBWORD_SYNC_OP (and, , &, unsigned short, 2, newval)
149 SUBWORD_SYNC_OP (xor, , ^, unsigned short, 2, newval)
150 SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, newval)
151
152 SUBWORD_SYNC_OP (add, , +, unsigned char, 1, newval)
153 SUBWORD_SYNC_OP (sub, , -, unsigned char, 1, newval)
154 SUBWORD_SYNC_OP (or, , |, unsigned char, 1, newval)
155 SUBWORD_SYNC_OP (and, , &, unsigned char, 1, newval)
156 SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, newval)
157 SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, newval)
158
159 int HIDDEN
__sync_val_compare_and_swap_4(int * ptr,int oldval,int newval)160 __sync_val_compare_and_swap_4 (int *ptr, int oldval, int newval)
161 {
162 int actual_oldval, fail;
163
164 while (1)
165 {
166 actual_oldval = *ptr;
167
168 if (oldval != actual_oldval)
169 return actual_oldval;
170
171 fail = __kernel_cmpxchg (actual_oldval, newval, ptr);
172
173 if (!fail)
174 return oldval;
175 }
176 }
177
178 #define SUBWORD_VAL_CAS(TYPE, WIDTH) \
179 TYPE HIDDEN \
180 __sync_val_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
181 TYPE newval) \
182 { \
183 int *wordptr = (int *)((unsigned long) ptr & ~3), fail; \
184 unsigned int mask, shift, actual_oldval, actual_newval; \
185 \
186 shift = (((unsigned long) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
187 mask = MASK_##WIDTH << shift; \
188 \
189 while (1) \
190 { \
191 actual_oldval = *wordptr; \
192 \
193 if (((actual_oldval & mask) >> shift) != (unsigned int) oldval) \
194 return (actual_oldval & mask) >> shift; \
195 \
196 actual_newval = (actual_oldval & ~mask) \
197 | (((unsigned int) newval << shift) & mask); \
198 \
199 fail = __kernel_cmpxchg (actual_oldval, actual_newval, \
200 wordptr); \
201 \
202 if (!fail) \
203 return oldval; \
204 } \
205 }
206
207 SUBWORD_VAL_CAS (unsigned short, 2)
208 SUBWORD_VAL_CAS (unsigned char, 1)
209
210 typedef unsigned char bool;
211
212 bool HIDDEN
__sync_bool_compare_and_swap_4(int * ptr,int oldval,int newval)213 __sync_bool_compare_and_swap_4 (int *ptr, int oldval, int newval)
214 {
215 int failure = __kernel_cmpxchg (oldval, newval, ptr);
216 return (failure == 0);
217 }
218
219 #define SUBWORD_BOOL_CAS(TYPE, WIDTH) \
220 bool HIDDEN \
221 __sync_bool_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
222 TYPE newval) \
223 { \
224 TYPE actual_oldval \
225 = __sync_val_compare_and_swap_##WIDTH (ptr, oldval, newval); \
226 return (oldval == actual_oldval); \
227 }
228
229 SUBWORD_BOOL_CAS (unsigned short, 2)
230 SUBWORD_BOOL_CAS (unsigned char, 1)
231
232 int HIDDEN
__sync_lock_test_and_set_4(int * ptr,int val)233 __sync_lock_test_and_set_4 (int *ptr, int val)
234 {
235 int failure, oldval;
236
237 do {
238 oldval = *ptr;
239 failure = __kernel_cmpxchg (oldval, val, ptr);
240 } while (failure != 0);
241
242 return oldval;
243 }
244
245 #define SUBWORD_TEST_AND_SET(TYPE, WIDTH) \
246 TYPE HIDDEN \
247 __sync_lock_test_and_set_##WIDTH (TYPE *ptr, TYPE val) \
248 { \
249 int failure; \
250 unsigned int oldval, newval, shift, mask; \
251 int *wordptr = (int *) ((unsigned long) ptr & ~3); \
252 \
253 shift = (((unsigned long) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
254 mask = MASK_##WIDTH << shift; \
255 \
256 do { \
257 oldval = *wordptr; \
258 newval = (oldval & ~mask) \
259 | (((unsigned int) val << shift) & mask); \
260 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
261 } while (failure != 0); \
262 \
263 return (oldval & mask) >> shift; \
264 }
265
266 SUBWORD_TEST_AND_SET (unsigned short, 2)
267 SUBWORD_TEST_AND_SET (unsigned char, 1)
268
269 #define SYNC_LOCK_RELEASE(TYPE, WIDTH) \
270 void HIDDEN \
271 __sync_lock_release_##WIDTH (TYPE *ptr) \
272 { \
273 /* All writes before this point must be seen before we release \
274 the lock itself. */ \
275 __builtin_sync (); \
276 *ptr = 0; \
277 }
278
279 SYNC_LOCK_RELEASE (int, 4)
280 SYNC_LOCK_RELEASE (short, 2)
281 SYNC_LOCK_RELEASE (char, 1)
282