1 /* $Id: spitfire.h,v 1.18 2001/11/29 16:42:10 kanoj Exp $
2 * spitfire.h: SpitFire/BlackBird/Cheetah inline MMU operations.
3 *
4 * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
5 */
6
7 #ifndef _SPARC64_SPITFIRE_H
8 #define _SPARC64_SPITFIRE_H
9
10 #include <asm/asi.h>
11
12 /* The following register addresses are accessible via ASI_DMMU
13 * and ASI_IMMU, that is there is a distinct and unique copy of
14 * each these registers for each TLB.
15 */
16 #define TSB_TAG_TARGET 0x0000000000000000 /* All chips */
17 #define TLB_SFSR 0x0000000000000018 /* All chips */
18 #define TSB_REG 0x0000000000000028 /* All chips */
19 #define TLB_TAG_ACCESS 0x0000000000000030 /* All chips */
20 #define VIRT_WATCHPOINT 0x0000000000000038 /* All chips */
21 #define PHYS_WATCHPOINT 0x0000000000000040 /* All chips */
22 #define TSB_EXTENSION_P 0x0000000000000048 /* Ultra-III and later */
23 #define TSB_EXTENSION_S 0x0000000000000050 /* Ultra-III and later, D-TLB only */
24 #define TSB_EXTENSION_N 0x0000000000000058 /* Ultra-III and later */
25 #define TLB_TAG_ACCESS_EXT 0x0000000000000060 /* Ultra-III+ and later */
26
27 /* These registers only exist as one entity, and are accessed
28 * via ASI_DMMU only.
29 */
30 #define PRIMARY_CONTEXT 0x0000000000000008
31 #define SECONDARY_CONTEXT 0x0000000000000010
32 #define DMMU_SFAR 0x0000000000000020
33 #define VIRT_WATCHPOINT 0x0000000000000038
34 #define PHYS_WATCHPOINT 0x0000000000000040
35
36 #define SPITFIRE_HIGHEST_LOCKED_TLBENT (64 - 1)
37
38 /* translation table entry bits */
39 #define SPITFIRE_TTE_WRITABLE 0x02
40 #define SPITFIRE_TTE_PRIVILEGED 0x04
41 #define SPITFIRE_TTE_EFFECT 0x08
42 #define SPITFIRE_TTE_CV 0x10
43 #define SPITFIRE_TTE_CP 0x20
44 #define SPITFIRE_TTE_LOCKED 0x40
45 #define SPITFIRE_TTE_VALID 0x8000000000000000ULL
46
47 #ifndef __ASSEMBLY__
48
49 enum ultra_tlb_layout {
50 spitfire = 0,
51 cheetah = 1,
52 cheetah_plus = 2,
53 };
54
55 extern enum ultra_tlb_layout tlb_type;
56
57 #define CHEETAH_HIGHEST_LOCKED_TLBENT (16 - 1)
58
59 #define L1DCACHE_SIZE 0x4000
60
61 #define sparc64_highest_locked_tlbent() \
62 (tlb_type == spitfire ? \
63 SPITFIRE_HIGHEST_LOCKED_TLBENT : \
64 CHEETAH_HIGHEST_LOCKED_TLBENT)
65
spitfire_get_isfsr(void)66 static __inline__ unsigned long spitfire_get_isfsr(void)
67 {
68 unsigned long ret;
69
70 __asm__ __volatile__("ldxa [%1] %2, %0"
71 : "=r" (ret)
72 : "r" (TLB_SFSR), "i" (ASI_IMMU));
73 return ret;
74 }
75
spitfire_get_dsfsr(void)76 static __inline__ unsigned long spitfire_get_dsfsr(void)
77 {
78 unsigned long ret;
79
80 __asm__ __volatile__("ldxa [%1] %2, %0"
81 : "=r" (ret)
82 : "r" (TLB_SFSR), "i" (ASI_DMMU));
83 return ret;
84 }
85
spitfire_get_sfar(void)86 static __inline__ unsigned long spitfire_get_sfar(void)
87 {
88 unsigned long ret;
89
90 __asm__ __volatile__("ldxa [%1] %2, %0"
91 : "=r" (ret)
92 : "r" (DMMU_SFAR), "i" (ASI_DMMU));
93 return ret;
94 }
95
spitfire_put_isfsr(unsigned long sfsr)96 static __inline__ void spitfire_put_isfsr(unsigned long sfsr)
97 {
98 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
99 "membar #Sync"
100 : /* no outputs */
101 : "r" (sfsr), "r" (TLB_SFSR), "i" (ASI_IMMU));
102 }
103
spitfire_put_dsfsr(unsigned long sfsr)104 static __inline__ void spitfire_put_dsfsr(unsigned long sfsr)
105 {
106 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
107 "membar #Sync"
108 : /* no outputs */
109 : "r" (sfsr), "r" (TLB_SFSR), "i" (ASI_DMMU));
110 }
111
spitfire_get_primary_context(void)112 static __inline__ unsigned long spitfire_get_primary_context(void)
113 {
114 unsigned long ctx;
115
116 __asm__ __volatile__("ldxa [%1] %2, %0"
117 : "=r" (ctx)
118 : "r" (PRIMARY_CONTEXT), "i" (ASI_DMMU));
119 return ctx;
120 }
121
spitfire_set_primary_context(unsigned long ctx)122 static __inline__ void spitfire_set_primary_context(unsigned long ctx)
123 {
124 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
125 "membar #Sync"
126 : /* No outputs */
127 : "r" (ctx & 0x3ff),
128 "r" (PRIMARY_CONTEXT), "i" (ASI_DMMU));
129 __asm__ __volatile__ ("membar #Sync" : : : "memory");
130 }
131
spitfire_get_secondary_context(void)132 static __inline__ unsigned long spitfire_get_secondary_context(void)
133 {
134 unsigned long ctx;
135
136 __asm__ __volatile__("ldxa [%1] %2, %0"
137 : "=r" (ctx)
138 : "r" (SECONDARY_CONTEXT), "i" (ASI_DMMU));
139 return ctx;
140 }
141
spitfire_set_secondary_context(unsigned long ctx)142 static __inline__ void spitfire_set_secondary_context(unsigned long ctx)
143 {
144 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
145 "membar #Sync"
146 : /* No outputs */
147 : "r" (ctx & 0x3ff),
148 "r" (SECONDARY_CONTEXT), "i" (ASI_DMMU));
149 __asm__ __volatile__ ("membar #Sync" : : : "memory");
150 }
151
152 /* The data cache is write through, so this just invalidates the
153 * specified line.
154 */
spitfire_put_dcache_tag(unsigned long addr,unsigned long tag)155 static __inline__ void spitfire_put_dcache_tag(unsigned long addr, unsigned long tag)
156 {
157 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
158 "membar #Sync"
159 : /* No outputs */
160 : "r" (tag), "r" (addr), "i" (ASI_DCACHE_TAG));
161 __asm__ __volatile__ ("membar #Sync" : : : "memory");
162 }
163
164 /* The instruction cache lines are flushed with this, but note that
165 * this does not flush the pipeline. It is possible for a line to
166 * get flushed but stale instructions to still be in the pipeline,
167 * a flush instruction (to any address) is sufficient to handle
168 * this issue after the line is invalidated.
169 */
spitfire_put_icache_tag(unsigned long addr,unsigned long tag)170 static __inline__ void spitfire_put_icache_tag(unsigned long addr, unsigned long tag)
171 {
172 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
173 "membar #Sync"
174 : /* No outputs */
175 : "r" (tag), "r" (addr), "i" (ASI_IC_TAG));
176 }
177
spitfire_get_dtlb_data(int entry)178 static __inline__ unsigned long spitfire_get_dtlb_data(int entry)
179 {
180 unsigned long data;
181
182 __asm__ __volatile__("ldxa [%1] %2, %0"
183 : "=r" (data)
184 : "r" (entry << 3), "i" (ASI_DTLB_DATA_ACCESS));
185
186 /* Clear TTE diag bits. */
187 data &= ~0x0003fe0000000000UL;
188
189 return data;
190 }
191
spitfire_get_dtlb_tag(int entry)192 static __inline__ unsigned long spitfire_get_dtlb_tag(int entry)
193 {
194 unsigned long tag;
195
196 __asm__ __volatile__("ldxa [%1] %2, %0"
197 : "=r" (tag)
198 : "r" (entry << 3), "i" (ASI_DTLB_TAG_READ));
199 return tag;
200 }
201
spitfire_put_dtlb_data(int entry,unsigned long data)202 static __inline__ void spitfire_put_dtlb_data(int entry, unsigned long data)
203 {
204 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
205 "membar #Sync"
206 : /* No outputs */
207 : "r" (data), "r" (entry << 3),
208 "i" (ASI_DTLB_DATA_ACCESS));
209 }
210
spitfire_get_itlb_data(int entry)211 static __inline__ unsigned long spitfire_get_itlb_data(int entry)
212 {
213 unsigned long data;
214
215 __asm__ __volatile__("ldxa [%1] %2, %0"
216 : "=r" (data)
217 : "r" (entry << 3), "i" (ASI_ITLB_DATA_ACCESS));
218
219 /* Clear TTE diag bits. */
220 data &= ~0x0003fe0000000000UL;
221
222 return data;
223 }
224
spitfire_get_itlb_tag(int entry)225 static __inline__ unsigned long spitfire_get_itlb_tag(int entry)
226 {
227 unsigned long tag;
228
229 __asm__ __volatile__("ldxa [%1] %2, %0"
230 : "=r" (tag)
231 : "r" (entry << 3), "i" (ASI_ITLB_TAG_READ));
232 return tag;
233 }
234
spitfire_put_itlb_data(int entry,unsigned long data)235 static __inline__ void spitfire_put_itlb_data(int entry, unsigned long data)
236 {
237 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
238 "membar #Sync"
239 : /* No outputs */
240 : "r" (data), "r" (entry << 3),
241 "i" (ASI_ITLB_DATA_ACCESS));
242 }
243
244 /* Spitfire hardware assisted TLB flushes. */
245
246 /* Context level flushes. */
spitfire_flush_dtlb_primary_context(void)247 static __inline__ void spitfire_flush_dtlb_primary_context(void)
248 {
249 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
250 "membar #Sync"
251 : /* No outputs */
252 : "r" (0x40), "i" (ASI_DMMU_DEMAP));
253 }
254
spitfire_flush_itlb_primary_context(void)255 static __inline__ void spitfire_flush_itlb_primary_context(void)
256 {
257 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
258 "membar #Sync"
259 : /* No outputs */
260 : "r" (0x40), "i" (ASI_IMMU_DEMAP));
261 }
262
spitfire_flush_dtlb_secondary_context(void)263 static __inline__ void spitfire_flush_dtlb_secondary_context(void)
264 {
265 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
266 "membar #Sync"
267 : /* No outputs */
268 : "r" (0x50), "i" (ASI_DMMU_DEMAP));
269 }
270
spitfire_flush_itlb_secondary_context(void)271 static __inline__ void spitfire_flush_itlb_secondary_context(void)
272 {
273 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
274 "membar #Sync"
275 : /* No outputs */
276 : "r" (0x50), "i" (ASI_IMMU_DEMAP));
277 }
278
spitfire_flush_dtlb_nucleus_context(void)279 static __inline__ void spitfire_flush_dtlb_nucleus_context(void)
280 {
281 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
282 "membar #Sync"
283 : /* No outputs */
284 : "r" (0x60), "i" (ASI_DMMU_DEMAP));
285 }
286
spitfire_flush_itlb_nucleus_context(void)287 static __inline__ void spitfire_flush_itlb_nucleus_context(void)
288 {
289 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
290 "membar #Sync"
291 : /* No outputs */
292 : "r" (0x60), "i" (ASI_IMMU_DEMAP));
293 }
294
295 /* Page level flushes. */
spitfire_flush_dtlb_primary_page(unsigned long page)296 static __inline__ void spitfire_flush_dtlb_primary_page(unsigned long page)
297 {
298 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
299 "membar #Sync"
300 : /* No outputs */
301 : "r" (page), "i" (ASI_DMMU_DEMAP));
302 }
303
spitfire_flush_itlb_primary_page(unsigned long page)304 static __inline__ void spitfire_flush_itlb_primary_page(unsigned long page)
305 {
306 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
307 "membar #Sync"
308 : /* No outputs */
309 : "r" (page), "i" (ASI_IMMU_DEMAP));
310 }
311
spitfire_flush_dtlb_secondary_page(unsigned long page)312 static __inline__ void spitfire_flush_dtlb_secondary_page(unsigned long page)
313 {
314 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
315 "membar #Sync"
316 : /* No outputs */
317 : "r" (page | 0x10), "i" (ASI_DMMU_DEMAP));
318 }
319
spitfire_flush_itlb_secondary_page(unsigned long page)320 static __inline__ void spitfire_flush_itlb_secondary_page(unsigned long page)
321 {
322 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
323 "membar #Sync"
324 : /* No outputs */
325 : "r" (page | 0x10), "i" (ASI_IMMU_DEMAP));
326 }
327
spitfire_flush_dtlb_nucleus_page(unsigned long page)328 static __inline__ void spitfire_flush_dtlb_nucleus_page(unsigned long page)
329 {
330 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
331 "membar #Sync"
332 : /* No outputs */
333 : "r" (page | 0x20), "i" (ASI_DMMU_DEMAP));
334 }
335
spitfire_flush_itlb_nucleus_page(unsigned long page)336 static __inline__ void spitfire_flush_itlb_nucleus_page(unsigned long page)
337 {
338 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
339 "membar #Sync"
340 : /* No outputs */
341 : "r" (page | 0x20), "i" (ASI_IMMU_DEMAP));
342 }
343
344 /* Cheetah has "all non-locked" tlb flushes. */
cheetah_flush_dtlb_all(void)345 static __inline__ void cheetah_flush_dtlb_all(void)
346 {
347 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
348 "membar #Sync"
349 : /* No outputs */
350 : "r" (0x80), "i" (ASI_DMMU_DEMAP));
351 }
352
cheetah_flush_itlb_all(void)353 static __inline__ void cheetah_flush_itlb_all(void)
354 {
355 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t"
356 "membar #Sync"
357 : /* No outputs */
358 : "r" (0x80), "i" (ASI_IMMU_DEMAP));
359 }
360
361 /* Cheetah has a 4-tlb layout so direct access is a bit different.
362 * The first two TLBs are fully assosciative, hold 16 entries, and are
363 * used only for locked and >8K sized translations. One exists for
364 * data accesses and one for instruction accesses.
365 *
366 * The third TLB is for data accesses to 8K non-locked translations, is
367 * 2 way assosciative, and holds 512 entries. The fourth TLB is for
368 * instruction accesses to 8K non-locked translations, is 2 way
369 * assosciative, and holds 128 entries.
370 *
371 * Cheetah has some bug where bogus data can be returned from
372 * ASI_{D,I}TLB_DATA_ACCESS loads, doing the load twice fixes
373 * the problem for me. -DaveM
374 */
cheetah_get_ldtlb_data(int entry)375 static __inline__ unsigned long cheetah_get_ldtlb_data(int entry)
376 {
377 unsigned long data;
378
379 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
380 "ldxa [%1] %2, %0"
381 : "=r" (data)
382 : "r" ((0 << 16) | (entry << 3)),
383 "i" (ASI_DTLB_DATA_ACCESS));
384
385 return data;
386 }
387
cheetah_get_litlb_data(int entry)388 static __inline__ unsigned long cheetah_get_litlb_data(int entry)
389 {
390 unsigned long data;
391
392 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
393 "ldxa [%1] %2, %0"
394 : "=r" (data)
395 : "r" ((0 << 16) | (entry << 3)),
396 "i" (ASI_ITLB_DATA_ACCESS));
397
398 return data;
399 }
400
cheetah_get_ldtlb_tag(int entry)401 static __inline__ unsigned long cheetah_get_ldtlb_tag(int entry)
402 {
403 unsigned long tag;
404
405 __asm__ __volatile__("ldxa [%1] %2, %0"
406 : "=r" (tag)
407 : "r" ((0 << 16) | (entry << 3)),
408 "i" (ASI_DTLB_TAG_READ));
409
410 return tag;
411 }
412
cheetah_get_litlb_tag(int entry)413 static __inline__ unsigned long cheetah_get_litlb_tag(int entry)
414 {
415 unsigned long tag;
416
417 __asm__ __volatile__("ldxa [%1] %2, %0"
418 : "=r" (tag)
419 : "r" ((0 << 16) | (entry << 3)),
420 "i" (ASI_ITLB_TAG_READ));
421
422 return tag;
423 }
424
cheetah_put_ldtlb_data(int entry,unsigned long data)425 static __inline__ void cheetah_put_ldtlb_data(int entry, unsigned long data)
426 {
427 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
428 "membar #Sync"
429 : /* No outputs */
430 : "r" (data),
431 "r" ((0 << 16) | (entry << 3)),
432 "i" (ASI_DTLB_DATA_ACCESS));
433 }
434
cheetah_put_litlb_data(int entry,unsigned long data)435 static __inline__ void cheetah_put_litlb_data(int entry, unsigned long data)
436 {
437 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
438 "membar #Sync"
439 : /* No outputs */
440 : "r" (data),
441 "r" ((0 << 16) | (entry << 3)),
442 "i" (ASI_ITLB_DATA_ACCESS));
443 }
444
cheetah_get_dtlb_data(int entry,int tlb)445 static __inline__ unsigned long cheetah_get_dtlb_data(int entry, int tlb)
446 {
447 unsigned long data;
448
449 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
450 "ldxa [%1] %2, %0"
451 : "=r" (data)
452 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_DATA_ACCESS));
453
454 return data;
455 }
456
cheetah_get_dtlb_tag(int entry,int tlb)457 static __inline__ unsigned long cheetah_get_dtlb_tag(int entry, int tlb)
458 {
459 unsigned long tag;
460
461 __asm__ __volatile__("ldxa [%1] %2, %0"
462 : "=r" (tag)
463 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_TAG_READ));
464 return tag;
465 }
466
cheetah_put_dtlb_data(int entry,unsigned long data,int tlb)467 static __inline__ void cheetah_put_dtlb_data(int entry, unsigned long data, int tlb)
468 {
469 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
470 "membar #Sync"
471 : /* No outputs */
472 : "r" (data),
473 "r" ((tlb << 16) | (entry << 3)),
474 "i" (ASI_DTLB_DATA_ACCESS));
475 }
476
cheetah_get_itlb_data(int entry)477 static __inline__ unsigned long cheetah_get_itlb_data(int entry)
478 {
479 unsigned long data;
480
481 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t"
482 "ldxa [%1] %2, %0"
483 : "=r" (data)
484 : "r" ((2 << 16) | (entry << 3)),
485 "i" (ASI_ITLB_DATA_ACCESS));
486
487 return data;
488 }
489
cheetah_get_itlb_tag(int entry)490 static __inline__ unsigned long cheetah_get_itlb_tag(int entry)
491 {
492 unsigned long tag;
493
494 __asm__ __volatile__("ldxa [%1] %2, %0"
495 : "=r" (tag)
496 : "r" ((2 << 16) | (entry << 3)), "i" (ASI_ITLB_TAG_READ));
497 return tag;
498 }
499
cheetah_put_itlb_data(int entry,unsigned long data)500 static __inline__ void cheetah_put_itlb_data(int entry, unsigned long data)
501 {
502 __asm__ __volatile__("stxa %0, [%1] %2\n\t"
503 "membar #Sync"
504 : /* No outputs */
505 : "r" (data), "r" ((2 << 16) | (entry << 3)),
506 "i" (ASI_ITLB_DATA_ACCESS));
507 }
508
509 #endif /* !(__ASSEMBLY__) */
510
511 #endif /* !(_SPARC64_SPITFIRE_H) */
512