1 /* $NetBSD: cache_r4k.h,v 1.16 2016/07/12 15:56:23 skrll Exp $ */
2
3 /*
4 * Copyright 2001 Wasabi Systems, Inc.
5 * All rights reserved.
6 *
7 * Written by Jason R. Thorpe for Wasabi Systems, Inc.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions
11 * are met:
12 * 1. Redistributions of source code must retain the above copyright
13 * notice, this list of conditions and the following disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright
15 * notice, this list of conditions and the following disclaimer in the
16 * documentation and/or other materials provided with the distribution.
17 * 3. All advertising materials mentioning features or use of this software
18 * must display the following acknowledgement:
19 * This product includes software developed for the NetBSD Project by
20 * Wasabi Systems, Inc.
21 * 4. The name of Wasabi Systems, Inc. may not be used to endorse
22 * or promote products derived from this software without specific prior
23 * written permission.
24 *
25 * THIS SOFTWARE IS PROVIDED BY WASABI SYSTEMS, INC. ``AS IS'' AND
26 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
27 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL WASABI SYSTEMS, INC
29 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
30 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
31 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
32 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
33 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
34 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
35 * POSSIBILITY OF SUCH DAMAGE.
36 */
37
38 /*
39 * Cache definitions/operations for R4000-style caches.
40 */
41
42 #define CACHE_R4K_I 0
43 #define CACHE_R4K_D 1
44 #define CACHE_R4K_SI 2
45 #define CACHE_R4K_SD 3
46
47 #define CACHEOP_R4K_INDEX_INV (0 << 2) /* I, SI */
48 #define CACHEOP_R4K_INDEX_WB_INV (0 << 2) /* D, SD */
49 #define CACHEOP_R4K_INDEX_LOAD_TAG (1 << 2) /* all */
50 #define CACHEOP_R4K_INDEX_STORE_TAG (2 << 2) /* all */
51 #define CACHEOP_R4K_CREATE_DIRTY_EXCL (3 << 2) /* D, SD */
52 #define CACHEOP_R4K_HIT_INV (4 << 2) /* all */
53 #define CACHEOP_R4K_HIT_WB_INV (5 << 2) /* D, SD */
54 #define CACHEOP_R4K_FILL (5 << 2) /* I */
55 #define CACHEOP_R4K_HIT_WB (6 << 2) /* I, D, SD */
56 #define CACHEOP_R4K_HIT_SET_VIRTUAL (7 << 2) /* SI, SD */
57
58 #if !defined(_LOCORE)
59
60 #if 1
61 /*
62 * cache_r4k_op_line:
63 *
64 * Perform the specified cache operation on a single line.
65 */
66 #define cache_op_r4k_line(va, op) \
67 { \
68 __asm volatile( \
69 ".set push" "\n\t" \
70 ".set noreorder" "\n\t" \
71 "cache %0, 0(%[va])" "\n\t" \
72 ".set pop" \
73 : \
74 : "i" (op), [va] "r" (va) \
75 : "memory"); \
76 }
77
78 /*
79 * cache_r4k_op_8lines_NN:
80 *
81 * Perform the specified cache operation on 8 n-byte cache lines.
82 */
83 static inline void
cache_r4k_op_8lines_NN(size_t n,register_t va,u_int op)84 cache_r4k_op_8lines_NN(size_t n, register_t va, u_int op)
85 {
86 __asm volatile(
87 ".set push" "\n\t"
88 ".set noreorder" "\n\t"
89 "cache %[op], (0*%[n])(%[va])" "\n\t"
90 "cache %[op], (1*%[n])(%[va])" "\n\t"
91 "cache %[op], (2*%[n])(%[va])" "\n\t"
92 "cache %[op], (3*%[n])(%[va])" "\n\t"
93 "cache %[op], (4*%[n])(%[va])" "\n\t"
94 "cache %[op], (5*%[n])(%[va])" "\n\t"
95 "cache %[op], (6*%[n])(%[va])" "\n\t"
96 "cache %[op], (7*%[n])(%[va])" "\n\t"
97 ".set pop"
98 :
99 : [va] "r" (va), [op] "i" (op), [n] "n" (n)
100 : "memory");
101 }
102
103 /*
104 * cache_r4k_op_8lines_16:
105 * Perform the specified cache operation on 8 16-byte cache lines.
106 * cache_r4k_op_8lines_32:
107 * Perform the specified cache operation on 8 32-byte cache lines.
108 */
109 #define cache_r4k_op_8lines_16(va, op) \
110 cache_r4k_op_8lines_NN(16, (va), (op))
111 #define cache_r4k_op_8lines_32(va, op) \
112 cache_r4k_op_8lines_NN(32, (va), (op))
113 #define cache_r4k_op_8lines_64(va, op) \
114 cache_r4k_op_8lines_NN(64, (va), (op))
115 #define cache_r4k_op_8lines_128(va, op) \
116 cache_r4k_op_8lines_NN(128, (va), (op))
117
118 /*
119 * cache_r4k_op_32lines_NN:
120 *
121 * Perform the specified cache operation on 32 n-byte cache lines.
122 */
123 #define cache_r4k_op_32lines_NN(n, va, op) \
124 { \
125 __asm volatile( \
126 ".set push" "\n\t" \
127 ".set noreorder" "\n\t" \
128 "cache %2, (0*%0)(%[va])" "\n\t" \
129 "cache %2, (1*%0)(%[va])" "\n\t" \
130 "cache %2, (2*%0)(%[va])" "\n\t" \
131 "cache %2, (3*%0)(%[va])" "\n\t" \
132 "cache %2, (4*%0)(%[va])" "\n\t" \
133 "cache %2, (5*%0)(%[va])" "\n\t" \
134 "cache %2, (6*%0)(%[va])" "\n\t" \
135 "cache %2, (7*%0)(%[va])" "\n\t" \
136 "cache %2, (8*%0)(%[va])" "\n\t" \
137 "cache %2, (9*%0)(%[va])" "\n\t" \
138 "cache %2, (10*%0)(%[va])" "\n\t" \
139 "cache %2, (11*%0)(%[va])" "\n\t" \
140 "cache %2, (12*%0)(%[va])" "\n\t" \
141 "cache %2, (13*%0)(%[va])" "\n\t" \
142 "cache %2, (14*%0)(%[va])" "\n\t" \
143 "cache %2, (15*%0)(%[va])" "\n\t" \
144 "cache %2, (16*%0)(%[va])" "\n\t" \
145 "cache %2, (17*%0)(%[va])" "\n\t" \
146 "cache %2, (18*%0)(%[va])" "\n\t" \
147 "cache %2, (19*%0)(%[va])" "\n\t" \
148 "cache %2, (20*%0)(%[va])" "\n\t" \
149 "cache %2, (21*%0)(%[va])" "\n\t" \
150 "cache %2, (22*%0)(%[va])" "\n\t" \
151 "cache %2, (23*%0)(%[va])" "\n\t" \
152 "cache %2, (24*%0)(%[va])" "\n\t" \
153 "cache %2, (25*%0)(%[va])" "\n\t" \
154 "cache %2, (26*%0)(%[va])" "\n\t" \
155 "cache %2, (27*%0)(%[va])" "\n\t" \
156 "cache %2, (28*%0)(%[va])" "\n\t" \
157 "cache %2, (29*%0)(%[va])" "\n\t" \
158 "cache %2, (30*%0)(%[va])" "\n\t" \
159 "cache %2, (31*%0)(%[va])" "\n\t" \
160 ".set pop" \
161 : \
162 : "i" (n), [va] "r" (va), "i" (op) \
163 : "memory"); \
164 }
165
166 /*
167 * cache_r4k_op_32lines_16:
168 *
169 * Perform the specified cache operation on 32 16-byte cache lines.
170 */
171 #define cache_r4k_op_32lines_16(va, op) \
172 cache_r4k_op_32lines_NN(16, va, op)
173 #define cache_r4k_op_32lines_32(va, op) \
174 cache_r4k_op_32lines_NN(32, va, op)
175 #define cache_r4k_op_32lines_64(va, op) \
176 cache_r4k_op_32lines_NN(64, va, op)
177 #define cache_r4k_op_32lines_128(va, op) \
178 cache_r4k_op_32lines_NN(128, va, op)
179
180 /*
181 * cache_r4k_op_16lines_16_2way:
182 * Perform the specified cache operation on 16 n-byte cache lines, 2-ways.
183 */
184 static inline void
cache_r4k_op_16lines_NN_2way(size_t n,register_t va1,register_t va2,u_int op)185 cache_r4k_op_16lines_NN_2way(size_t n, register_t va1, register_t va2, u_int op)
186 {
187 __asm volatile(
188 ".set push" "\n\t"
189 ".set noreorder" "\n\t"
190 "cache %[op], (0*%[n])(%[va1])" "\n\t"
191 "cache %[op], (0*%[n])(%[va2])" "\n\t"
192 "cache %[op], (1*%[n])(%[va1])" "\n\t"
193 "cache %[op], (1*%[n])(%[va2])" "\n\t"
194 "cache %[op], (2*%[n])(%[va1])" "\n\t"
195 "cache %[op], (2*%[n])(%[va2])" "\n\t"
196 "cache %[op], (3*%[n])(%[va1])" "\n\t"
197 "cache %[op], (3*%[n])(%[va2])" "\n\t"
198 "cache %[op], (4*%[n])(%[va1])" "\n\t"
199 "cache %[op], (4*%[n])(%[va2])" "\n\t"
200 "cache %[op], (5*%[n])(%[va1])" "\n\t"
201 "cache %[op], (5*%[n])(%[va2])" "\n\t"
202 "cache %[op], (6*%[n])(%[va1])" "\n\t"
203 "cache %[op], (6*%[n])(%[va2])" "\n\t"
204 "cache %[op], (7*%[n])(%[va1])" "\n\t"
205 "cache %[op], (7*%[n])(%[va2])" "\n\t"
206 "cache %[op], (8*%[n])(%[va1])" "\n\t"
207 "cache %[op], (8*%[n])(%[va2])" "\n\t"
208 "cache %[op], (9*%[n])(%[va1])" "\n\t"
209 "cache %[op], (9*%[n])(%[va2])" "\n\t"
210 "cache %[op], (10*%[n])(%[va1])" "\n\t"
211 "cache %[op], (10*%[n])(%[va2])" "\n\t"
212 "cache %[op], (11*%[n])(%[va1])" "\n\t"
213 "cache %[op], (11*%[n])(%[va2])" "\n\t"
214 "cache %[op], (12*%[n])(%[va1])" "\n\t"
215 "cache %[op], (12*%[n])(%[va2])" "\n\t"
216 "cache %[op], (13*%[n])(%[va1])" "\n\t"
217 "cache %[op], (13*%[n])(%[va2])" "\n\t"
218 "cache %[op], (14*%[n])(%[va1])" "\n\t"
219 "cache %[op], (14*%[n])(%[va2])" "\n\t"
220 "cache %[op], (15*%[n])(%[va1])" "\n\t"
221 "cache %[op], (15*%[n])(%[va2])" "\n\t"
222 ".set pop"
223 :
224 : [va1] "r" (va1), [va2] "r" (va2), [op] "i" (op), [n] "n" (n)
225 : "memory");
226 }
227
228 /*
229 * cache_r4k_op_16lines_16_2way:
230 * Perform the specified cache operation on 16 16-byte cache lines, 2-ways.
231 * cache_r4k_op_16lines_32_2way:
232 * Perform the specified cache operation on 16 32-byte cache lines, 2-ways.
233 */
234 #define cache_r4k_op_16lines_16_2way(va1, va2, op) \
235 cache_r4k_op_16lines_NN_2way(16, (va1), (va2), (op))
236 #define cache_r4k_op_16lines_32_2way(va1, va2, op) \
237 cache_r4k_op_16lines_NN_2way(32, (va1), (va2), (op))
238 #define cache_r4k_op_16lines_64_2way(va1, va2, op) \
239 cache_r4k_op_16lines_NN_2way(64, (va1), (va2), (op))
240
241 /*
242 * cache_r4k_op_8lines_NN_4way:
243 * Perform the specified cache operation on 8 n-byte cache lines, 4-ways.
244 */
245 static inline void
cache_r4k_op_8lines_NN_4way(size_t n,register_t va1,register_t va2,register_t va3,register_t va4,u_int op)246 cache_r4k_op_8lines_NN_4way(size_t n, register_t va1, register_t va2,
247 register_t va3, register_t va4, u_int op)
248 {
249 __asm volatile(
250 ".set push" "\n\t"
251 ".set noreorder" "\n\t"
252 "cache %[op], (0*%[n])(%[va1])" "\n\t"
253 "cache %[op], (0*%[n])(%[va2])" "\n\t"
254 "cache %[op], (0*%[n])(%[va3])" "\n\t"
255 "cache %[op], (0*%[n])(%[va4])" "\n\t"
256 "cache %[op], (1*%[n])(%[va1])" "\n\t"
257 "cache %[op], (1*%[n])(%[va2])" "\n\t"
258 "cache %[op], (1*%[n])(%[va3])" "\n\t"
259 "cache %[op], (1*%[n])(%[va4])" "\n\t"
260 "cache %[op], (2*%[n])(%[va1])" "\n\t"
261 "cache %[op], (2*%[n])(%[va2])" "\n\t"
262 "cache %[op], (2*%[n])(%[va3])" "\n\t"
263 "cache %[op], (2*%[n])(%[va4])" "\n\t"
264 "cache %[op], (3*%[n])(%[va1])" "\n\t"
265 "cache %[op], (3*%[n])(%[va2])" "\n\t"
266 "cache %[op], (3*%[n])(%[va3])" "\n\t"
267 "cache %[op], (3*%[n])(%[va4])" "\n\t"
268 "cache %[op], (4*%[n])(%[va1])" "\n\t"
269 "cache %[op], (4*%[n])(%[va2])" "\n\t"
270 "cache %[op], (4*%[n])(%[va3])" "\n\t"
271 "cache %[op], (4*%[n])(%[va4])" "\n\t"
272 "cache %[op], (5*%[n])(%[va1])" "\n\t"
273 "cache %[op], (5*%[n])(%[va2])" "\n\t"
274 "cache %[op], (5*%[n])(%[va3])" "\n\t"
275 "cache %[op], (5*%[n])(%[va4])" "\n\t"
276 "cache %[op], (6*%[n])(%[va1])" "\n\t"
277 "cache %[op], (6*%[n])(%[va2])" "\n\t"
278 "cache %[op], (6*%[n])(%[va3])" "\n\t"
279 "cache %[op], (6*%[n])(%[va4])" "\n\t"
280 "cache %[op], (7*%[n])(%[va1])" "\n\t"
281 "cache %[op], (7*%[n])(%[va2])" "\n\t"
282 "cache %[op], (7*%[n])(%[va3])" "\n\t"
283 "cache %[op], (7*%[n])(%[va4])" "\n\t"
284 ".set pop"
285 :
286 : [va1] "r" (va1), [va2] "r" (va2),
287 [va3] "r" (va3), [va4] "r" (va4),
288 [op] "i" (op), [n] "n" (n)
289 : "memory");
290 }
291 /*
292 * cache_r4k_op_8lines_16_4way:
293 * Perform the specified cache operation on 8 16-byte cache lines, 4-ways.
294 * cache_r4k_op_8lines_32_4way:
295 * Perform the specified cache operation on 8 32-byte cache lines, 4-ways.
296 */
297 #define cache_r4k_op_8lines_16_4way(va1, va2, va3, va4, op) \
298 cache_r4k_op_8lines_NN_4way(16, (va1), (va2), (va3), (va4), (op))
299 #define cache_r4k_op_8lines_32_4way(va1, va2, va3, va4, op) \
300 cache_r4k_op_8lines_NN_4way(32, (va1), (va2), (va3), (va4), (op))
301 #define cache_r4k_op_8lines_64_4way(va1, va2, va3, va4, op) \
302 cache_r4k_op_8lines_NN_4way(64, (va1), (va2), (va3), (va4), (op))
303 #define cache_r4k_op_8lines_128_4way(va1, va2, va3, va4, op) \
304 cache_r4k_op_8lines_NN_4way(128, (va1), (va2), (va3), (va4), (op))
305 #endif
306
307 /* cache_r4k.c */
308
309 void r4k_icache_sync_all_generic(void);
310 void r4k_icache_sync_range_generic(register_t, vsize_t);
311 void r4k_icache_sync_range_index_generic(vaddr_t, vsize_t);
312 void r4k_pdcache_wbinv_all_generic(void);
313 void r4k_sdcache_wbinv_all_generic(void);
314
315 /* cache_r4k_pcache16.S */
316
317 void cache_r4k_icache_index_inv_16(vaddr_t, vsize_t);
318 void cache_r4k_icache_hit_inv_16(register_t, vsize_t);
319 void cache_r4k_pdcache_index_wb_inv_16(vaddr_t, vsize_t);
320 void cache_r4k_pdcache_hit_inv_16(register_t, vsize_t);
321 void cache_r4k_pdcache_hit_wb_inv_16(register_t, vsize_t);
322 void cache_r4k_pdcache_hit_wb_16(register_t, vsize_t);
323
324 /* cache_r4k_scache16.S */
325
326 void cache_r4k_sdcache_index_wb_inv_16(vaddr_t, vsize_t);
327 void cache_r4k_sdcache_hit_inv_16(register_t, vsize_t);
328 void cache_r4k_sdcache_hit_wb_inv_16(register_t, vsize_t);
329 void cache_r4k_sdcache_hit_wb_16(register_t, vsize_t);
330
331 /* cache_r4k_pcache32.S */
332
333 void cache_r4k_icache_index_inv_32(vaddr_t, vsize_t);
334 void cache_r4k_icache_hit_inv_32(register_t, vsize_t);
335 void cache_r4k_pdcache_index_wb_inv_32(vaddr_t, vsize_t);
336 void cache_r4k_pdcache_hit_inv_32(register_t, vsize_t);
337 void cache_r4k_pdcache_hit_wb_inv_32(register_t, vsize_t);
338 void cache_r4k_pdcache_hit_wb_32(register_t, vsize_t);
339
340 /* cache_r4k_scache32.S */
341
342 void cache_r4k_sdcache_index_wb_inv_32(vaddr_t, vsize_t);
343 void cache_r4k_sdcache_hit_inv_32(register_t, vsize_t);
344 void cache_r4k_sdcache_hit_wb_inv_32(register_t, vsize_t);
345 void cache_r4k_sdcache_hit_wb_32(register_t, vsize_t);
346
347 /* cache_r4k_pcache64.S */
348
349 void cache_r4k_icache_index_inv_64(vaddr_t, vsize_t);
350 void cache_r4k_icache_hit_inv_64(register_t, vsize_t);
351 void cache_r4k_pdcache_index_wb_inv_64(vaddr_t, vsize_t);
352 void cache_r4k_pdcache_hit_inv_64(register_t, vsize_t);
353 void cache_r4k_pdcache_hit_wb_inv_64(register_t, vsize_t);
354 void cache_r4k_pdcache_hit_wb_64(register_t, vsize_t);
355
356 /* cache_r4k_scache64.S */
357
358 void cache_r4k_sdcache_index_wb_inv_64(vaddr_t, vsize_t);
359 void cache_r4k_sdcache_hit_inv_64(register_t, vsize_t);
360 void cache_r4k_sdcache_hit_wb_inv_64(register_t, vsize_t);
361 void cache_r4k_sdcache_hit_wb_64(register_t, vsize_t);
362
363 /* cache_r4k_pcache128.S */
364
365 void cache_r4k_icache_index_inv_128(vaddr_t, vsize_t);
366 void cache_r4k_icache_hit_inv_128(register_t, vsize_t);
367 void cache_r4k_pdcache_index_wb_inv_128(vaddr_t, vsize_t);
368 void cache_r4k_pdcache_hit_inv_128(register_t, vsize_t);
369 void cache_r4k_pdcache_hit_wb_inv_128(register_t, vsize_t);
370 void cache_r4k_pdcache_hit_wb_128(register_t, vsize_t);
371
372 /* cache_r4k_scache128.S */
373
374 void cache_r4k_sdcache_index_wb_inv_128(vaddr_t, vsize_t);
375 void cache_r4k_sdcache_hit_inv_128(register_t, vsize_t);
376 void cache_r4k_sdcache_hit_wb_inv_128(register_t, vsize_t);
377 void cache_r4k_sdcache_hit_wb_128(register_t, vsize_t);
378
379 #endif /* !_LOCORE */
380