xref: /netbsd/sys/arch/arm/arm/cpufunc_asm_arm8.S (revision bf9ec67e)
1/*	$NetBSD: cpufunc_asm_arm8.S,v 1.2 2001/11/11 00:47:49 thorpej Exp $	*/
2
3/*
4 * Copyright (c) 1997 ARM Limited
5 * Copyright (c) 1997 Causality Limited
6 * All rights reserved.
7 *
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
10 * are met:
11 * 1. Redistributions of source code must retain the above copyright
12 *    notice, this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright
14 *    notice, this list of conditions and the following disclaimer in the
15 *    documentation and/or other materials provided with the distribution.
16 * 3. All advertising materials mentioning features or use of this software
17 *    must display the following acknowledgement:
18 *	This product includes software developed by Causality Limited.
19 * 4. The name of Causality Limited may not be used to endorse or promote
20 *    products derived from this software without specific prior written
21 *    permission.
22 *
23 * THIS SOFTWARE IS PROVIDED BY CAUSALITY LIMITED ``AS IS'' AND ANY EXPRESS
24 * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
25 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26 * DISCLAIMED. IN NO EVENT SHALL CAUSALITY LIMITED BE LIABLE FOR ANY DIRECT,
27 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
28 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
29 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
32 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
33 * SUCH DAMAGE.
34 *
35 * ARM8 assembly functions for CPU / MMU / TLB specific operations
36 */
37
38#include <machine/cpu.h>
39#include <machine/asm.h>
40
41ENTRY(arm8_clock_config)
42	mrc	p15, 0, r3, c15, c0, 0	/* Read the clock register */
43	bic	r2, r3, #0x11		/* turn off dynamic clocking
44					   and clear L bit */
45	mcr	p15, 0, r2, c15, c0, 0	/* Write clock register */
46
47	bic	r2, r3, r0		/* Clear bits */
48	eor	r2, r2, r1		/* XOR bits */
49	bic	r2, r2, #0x10		/* clear the L bit */
50
51	bic	r1, r2, #0x01		/* still keep dynamic clocking off */
52	mcr	p15, 0, r1, c15, c0, 0	/* Write clock register */
53	mov	r0, r0			/* NOP */
54	mov	r0, r0			/* NOP */
55	mov	r0, r0			/* NOP */
56	mov	r0, r0			/* NOP */
57	mcr	p15, 0, r2, c15, c0, 0 	/* Write clock register */
58	mov	r0, r3			/* Return old value */
59	mov	pc, lr
60
61/*
62 * Functions to set the MMU Translation Table Base register
63 *
64 * We need to clean and flush the cache as it uses virtual
65 * addresses that are about to change.
66 */
67ENTRY(arm8_setttb)
68	mrs	r3, cpsr_all
69	orr	r1, r3, #(I32_bit | F32_bit)
70	msr	cpsr_all, r1
71
72	stmfd	sp!, {r0-r3, lr}
73	bl	_C_LABEL(arm8_cache_cleanID)
74	ldmfd	sp!, {r0-r3, lr}
75	mcr	p15, 0, r0, c7, c7, 0	/* flush I+D cache */
76
77	/* Write the TTB */
78	mcr	p15, 0, r0, c2, c0, 0
79
80	/* If we have updated the TTB we must flush the TLB */
81	mcr	p15, 0, r0, c8, c7, 0
82
83	/* For good measure we will flush the IDC as well */
84	mcr	p15, 0, r0, c7, c7, 0
85
86	/* Make sure that pipeline is emptied */
87	mov	r0, r0
88	mov	r0, r0
89	msr	cpsr_all, r3
90
91	mov	pc, lr
92
93/*
94 * TLB functions
95 */
96ENTRY(arm8_tlb_flushID)
97	mcr	p15, 0, r0, c8, c7, 0	/* flush I+D tlb */
98	mov	pc, lr
99
100ENTRY(arm8_tlb_flushID_SE)
101	mcr	p15, 0, r0, c8, c7, 1	/* flush I+D tlb single entry */
102	mov	pc, lr
103
104/*
105 * Cache functions
106 */
107ENTRY(arm8_cache_flushID)
108	mcr	p15, 0, r0, c7, c7, 0	/* flush I+D cache */
109	mov	pc, lr
110
111ENTRY(arm8_cache_flushID_E)
112	mcr	p15, 0, r0, c7, c7, 1	/* flush I+D single entry */
113	mov	pc, lr
114
115ENTRY(arm8_cache_cleanID)
116	mov	r0, #0x00000000
117
1181:	mov	r2, r0
119	mcr	p15, 0, r2, c7, c11, 1
120	add	r2, r2, #0x10
121	mcr	p15, 0, r2, c7, c11, 1
122	add	r2, r2, #0x10
123	mcr	p15, 0, r2, c7, c11, 1
124	add	r2, r2, #0x10
125	mcr	p15, 0, r2, c7, c11, 1
126	add	r2, r2, #0x10
127	mcr	p15, 0, r2, c7, c11, 1
128	add	r2, r2, #0x10
129	mcr	p15, 0, r2, c7, c11, 1
130	add	r2, r2, #0x10
131	mcr	p15, 0, r2, c7, c11, 1
132	add	r2, r2, #0x10
133	mcr	p15, 0, r2, c7, c11, 1
134	add	r2, r2, #0x10
135	mcr	p15, 0, r2, c7, c11, 1
136	add	r2, r2, #0x10
137	mcr	p15, 0, r2, c7, c11, 1
138	add	r2, r2, #0x10
139	mcr	p15, 0, r2, c7, c11, 1
140	add	r2, r2, #0x10
141	mcr	p15, 0, r2, c7, c11, 1
142	add	r2, r2, #0x10
143	mcr	p15, 0, r2, c7, c11, 1
144	add	r2, r2, #0x10
145	mcr	p15, 0, r2, c7, c11, 1
146	add	r2, r2, #0x10
147	mcr	p15, 0, r2, c7, c11, 1
148	add	r2, r2, #0x10
149	mcr	p15, 0, r2, c7, c11, 1
150
151	adds	r0, r0, #0x04000000
152	bne	1b
153
154	mov	pc, lr
155
156ENTRY(arm8_cache_cleanID_E)
157	mcr	p15, 0, r0, c7, c11, 1	/* clean I+D single entry */
158	mov	pc, lr
159
160ENTRY(arm8_cache_purgeID)
161	/*
162	 * ARM810 bug 3
163	 *
164	 * Clean and invalidate entry will not invalidate the entry
165	 * if the line was already clean. (mcr p15, 0, rd, c7, 15, 1)
166	 *
167	 * Instead of using the clean and invalidate entry operation
168	 * use a separate clean and invalidate entry operations.
169	 * i.e.
170	 * mcr p15, 0, rd, c7, c11, 1
171	 * mcr p15, 0, rd, c7, c7, 1
172	 */
173
174	mov	r0, #0x00000000
175
176	mrs	r3, cpsr_all
177	orr	r2, r3, #(I32_bit | F32_bit)
178	msr	cpsr_all, r2
179
1801:	mov	r2, r0
181	mcr	p15, 0, r2, c7, c11, 1
182	mcr	p15, 0, r2, c7, c7, 1
183	add	r2, r2, #0x10
184	mcr	p15, 0, r2, c7, c11, 1
185	mcr	p15, 0, r2, c7, c7, 1
186	add	r2, r2, #0x10
187	mcr	p15, 0, r2, c7, c11, 1
188	mcr	p15, 0, r2, c7, c7, 1
189	add	r2, r2, #0x10
190	mcr	p15, 0, r2, c7, c11, 1
191	mcr	p15, 0, r2, c7, c7, 1
192	add	r2, r2, #0x10
193	mcr	p15, 0, r2, c7, c11, 1
194	mcr	p15, 0, r2, c7, c7, 1
195	add	r2, r2, #0x10
196	mcr	p15, 0, r2, c7, c11, 1
197	mcr	p15, 0, r2, c7, c7, 1
198	add	r2, r2, #0x10
199	mcr	p15, 0, r2, c7, c11, 1
200	mcr	p15, 0, r2, c7, c7, 1
201	add	r2, r2, #0x10
202	mcr	p15, 0, r2, c7, c11, 1
203	mcr	p15, 0, r2, c7, c7, 1
204	add	r2, r2, #0x10
205	mcr	p15, 0, r2, c7, c11, 1
206	mcr	p15, 0, r2, c7, c7, 1
207	add	r2, r2, #0x10
208	mcr	p15, 0, r2, c7, c11, 1
209	mcr	p15, 0, r2, c7, c7, 1
210	add	r2, r2, #0x10
211	mcr	p15, 0, r2, c7, c11, 1
212	mcr	p15, 0, r2, c7, c7, 1
213	add	r2, r2, #0x10
214	mcr	p15, 0, r2, c7, c11, 1
215	mcr	p15, 0, r2, c7, c7, 1
216	add	r2, r2, #0x10
217	mcr	p15, 0, r2, c7, c11, 1
218	mcr	p15, 0, r2, c7, c7, 1
219	add	r2, r2, #0x10
220	mcr	p15, 0, r2, c7, c11, 1
221	mcr	p15, 0, r2, c7, c7, 1
222	add	r2, r2, #0x10
223	mcr	p15, 0, r2, c7, c11, 1
224	mcr	p15, 0, r2, c7, c7, 1
225	add	r2, r2, #0x10
226	mcr	p15, 0, r2, c7, c11, 1
227	mcr	p15, 0, r2, c7, c7, 1
228
229	adds	r0, r0, #0x04000000
230	bne	1b
231
232	msr	cpsr_all, r3
233	mov	pc, lr
234
235ENTRY(arm8_cache_purgeID_E)
236	/*
237	 * ARM810 bug 3
238	 *
239	 * Clean and invalidate entry will not invalidate the entry
240	 * if the line was already clean. (mcr p15, 0, rd, c7, 15, 1)
241	 *
242	 * Instead of using the clean and invalidate entry operation
243	 * use a separate clean and invalidate entry operations.
244	 * i.e.
245	 * mcr p15, 0, rd, c7, c11, 1
246	 * mcr p15, 0, rd, c7, c7, 1
247	 */
248	mrs	r3, cpsr_all
249	orr	r2, r3, #(I32_bit | F32_bit)
250	msr	cpsr_all, r2
251	mcr	p15, 0, r0, c7, c11, 1	/* clean I+D single entry */
252	mcr	p15, 0, r0, c7, c7, 1	/* flush I+D single entry */
253	msr	cpsr_all, r3
254	mov	pc, lr
255
256/*
257 * Context switch.
258 *
259 * These is the CPU-specific parts of the context switcher cpu_switch()
260 * These functions actually perform the TTB reload.
261 *
262 * NOTE: Special calling convention
263 *	r1, r4-r13 must be preserved
264 */
265ENTRY(arm8_context_switch)
266	/* For good measure we will flush the IDC as well */
267	mcr	p15, 0, r0, c7, c7, 0	/* flush I+D cache */
268
269	/* Write the TTB */
270	mcr	p15, 0, r0, c2, c0, 0
271
272	/* If we have updated the TTB we must flush the TLB */
273	mcr	p15, 0, r0, c8, c7, 0	/* flush the I+D tlb */
274
275#if 0
276	/* For good measure we will flush the IDC as well */
277	mcr	p15, 0, r0, c7, c7, 0	/* flush I+D cache */
278#endif
279
280	/* Make sure that pipeline is emptied */
281	mov	r0, r0
282	mov	r0, r0
283	mov	pc, lr
284