xref: /linux/arch/loongarch/include/asm/tlb.h (revision d6fd48ef)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4  */
5 #ifndef __ASM_TLB_H
6 #define __ASM_TLB_H
7 
8 #include <linux/mm_types.h>
9 #include <asm/cpu-features.h>
10 #include <asm/loongarch.h>
11 
12 /*
13  * TLB Invalidate Flush
14  */
15 static inline void tlbclr(void)
16 {
17 	__asm__ __volatile__("tlbclr");
18 }
19 
20 static inline void tlbflush(void)
21 {
22 	__asm__ __volatile__("tlbflush");
23 }
24 
25 /*
26  * TLB R/W operations.
27  */
28 static inline void tlb_probe(void)
29 {
30 	__asm__ __volatile__("tlbsrch");
31 }
32 
33 static inline void tlb_read(void)
34 {
35 	__asm__ __volatile__("tlbrd");
36 }
37 
38 static inline void tlb_write_indexed(void)
39 {
40 	__asm__ __volatile__("tlbwr");
41 }
42 
43 static inline void tlb_write_random(void)
44 {
45 	__asm__ __volatile__("tlbfill");
46 }
47 
48 enum invtlb_ops {
49 	/* Invalid all tlb */
50 	INVTLB_ALL = 0x0,
51 	/* Invalid current tlb */
52 	INVTLB_CURRENT_ALL = 0x1,
53 	/* Invalid all global=1 lines in current tlb */
54 	INVTLB_CURRENT_GTRUE = 0x2,
55 	/* Invalid all global=0 lines in current tlb */
56 	INVTLB_CURRENT_GFALSE = 0x3,
57 	/* Invalid global=0 and matched asid lines in current tlb */
58 	INVTLB_GFALSE_AND_ASID = 0x4,
59 	/* Invalid addr with global=0 and matched asid in current tlb */
60 	INVTLB_ADDR_GFALSE_AND_ASID = 0x5,
61 	/* Invalid addr with global=1 or matched asid in current tlb */
62 	INVTLB_ADDR_GTRUE_OR_ASID = 0x6,
63 	/* Invalid matched gid in guest tlb */
64 	INVGTLB_GID = 0x9,
65 	/* Invalid global=1, matched gid in guest tlb */
66 	INVGTLB_GID_GTRUE = 0xa,
67 	/* Invalid global=0, matched gid in guest tlb */
68 	INVGTLB_GID_GFALSE = 0xb,
69 	/* Invalid global=0, matched gid and asid in guest tlb */
70 	INVGTLB_GID_GFALSE_ASID = 0xc,
71 	/* Invalid global=0 , matched gid, asid and addr in guest tlb */
72 	INVGTLB_GID_GFALSE_ASID_ADDR = 0xd,
73 	/* Invalid global=1 , matched gid, asid and addr in guest tlb */
74 	INVGTLB_GID_GTRUE_ASID_ADDR = 0xe,
75 	/* Invalid all gid gva-->gpa guest tlb */
76 	INVGTLB_ALLGID_GVA_TO_GPA = 0x10,
77 	/* Invalid all gid gpa-->hpa tlb */
78 	INVTLB_ALLGID_GPA_TO_HPA = 0x11,
79 	/* Invalid all gid tlb, including  gva-->gpa and gpa-->hpa */
80 	INVTLB_ALLGID = 0x12,
81 	/* Invalid matched gid gva-->gpa guest tlb */
82 	INVGTLB_GID_GVA_TO_GPA = 0x13,
83 	/* Invalid matched gid gpa-->hpa tlb */
84 	INVTLB_GID_GPA_TO_HPA = 0x14,
85 	/* Invalid matched gid tlb,including gva-->gpa and gpa-->hpa */
86 	INVTLB_GID_ALL = 0x15,
87 	/* Invalid matched gid and addr gpa-->hpa tlb */
88 	INVTLB_GID_ADDR = 0x16,
89 };
90 
91 /*
92  * invtlb op info addr
93  * (0x1 << 26) | (0x24 << 20) | (0x13 << 15) |
94  * (addr << 10) | (info << 5) | op
95  */
96 static inline void invtlb(u32 op, u32 info, u64 addr)
97 {
98 	__asm__ __volatile__(
99 		"parse_r addr,%0\n\t"
100 		"parse_r info,%1\n\t"
101 		".word ((0x6498000) | (addr << 10) | (info << 5) | %2)\n\t"
102 		:
103 		: "r"(addr), "r"(info), "i"(op)
104 		:
105 		);
106 }
107 
108 static inline void invtlb_addr(u32 op, u32 info, u64 addr)
109 {
110 	__asm__ __volatile__(
111 		"parse_r addr,%0\n\t"
112 		".word ((0x6498000) | (addr << 10) | (0 << 5) | %1)\n\t"
113 		:
114 		: "r"(addr), "i"(op)
115 		:
116 		);
117 }
118 
119 static inline void invtlb_info(u32 op, u32 info, u64 addr)
120 {
121 	__asm__ __volatile__(
122 		"parse_r info,%0\n\t"
123 		".word ((0x6498000) | (0 << 10) | (info << 5) | %1)\n\t"
124 		:
125 		: "r"(info), "i"(op)
126 		:
127 		);
128 }
129 
130 static inline void invtlb_all(u32 op, u32 info, u64 addr)
131 {
132 	__asm__ __volatile__(
133 		".word ((0x6498000) | (0 << 10) | (0 << 5) | %0)\n\t"
134 		:
135 		: "i"(op)
136 		:
137 		);
138 }
139 
140 #define __tlb_remove_tlb_entry(tlb, ptep, address) do { } while (0)
141 
142 static void tlb_flush(struct mmu_gather *tlb);
143 
144 #define tlb_flush tlb_flush
145 #include <asm-generic/tlb.h>
146 
147 static inline void tlb_flush(struct mmu_gather *tlb)
148 {
149 	struct vm_area_struct vma;
150 
151 	vma.vm_mm = tlb->mm;
152 	vm_flags_init(&vma, 0);
153 	if (tlb->fullmm) {
154 		flush_tlb_mm(tlb->mm);
155 		return;
156 	}
157 
158 	flush_tlb_range(&vma, tlb->start, tlb->end);
159 }
160 
161 extern void handle_tlb_load(void);
162 extern void handle_tlb_store(void);
163 extern void handle_tlb_modify(void);
164 extern void handle_tlb_refill(void);
165 extern void handle_tlb_protect(void);
166 
167 extern void dump_tlb_all(void);
168 extern void dump_tlb_regs(void);
169 
170 #endif /* __ASM_TLB_H */
171