xref: /freebsd/sys/arm64/arm64/exception.S (revision 2b833162)
1/*-
2 * Copyright (c) 2014 Andrew Turner
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 *    notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 *    notice, this list of conditions and the following disclaimer in the
12 *    documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 *
26 */
27
28#include <machine/asm.h>
29#include <machine/armreg.h>
30__FBSDID("$FreeBSD$");
31
32#include "assym.inc"
33
34	.text
35
36/*
37 * This is limited to 28 instructions as it's placed in the exception vector
38 * slot that is 32 instructions long. We need one for the branch, and three
39 * for the prologue.
40 */
41.macro	save_registers_head el
42.if \el == 1
43	mov	x18, sp
44	stp	x0,  x1,  [sp, #(TF_X - TF_SIZE - 128)]!
45.else
46	stp	x0,  x1,  [sp, #(TF_X - TF_SIZE)]!
47.endif
48	stp	x2,  x3,  [sp, #(2  * 8)]
49	stp	x4,  x5,  [sp, #(4  * 8)]
50	stp	x6,  x7,  [sp, #(6  * 8)]
51	stp	x8,  x9,  [sp, #(8  * 8)]
52	stp	x10, x11, [sp, #(10 * 8)]
53	stp	x12, x13, [sp, #(12 * 8)]
54	stp	x14, x15, [sp, #(14 * 8)]
55	stp	x16, x17, [sp, #(16 * 8)]
56	stp	x18, x19, [sp, #(18 * 8)]
57	stp	x20, x21, [sp, #(20 * 8)]
58	stp	x22, x23, [sp, #(22 * 8)]
59	stp	x24, x25, [sp, #(24 * 8)]
60	stp	x26, x27, [sp, #(26 * 8)]
61	stp	x28, x29, [sp, #(28 * 8)]
62.if \el == 0
63	mrs	x18, sp_el0
64.endif
65	mrs	x10, elr_el1
66	mrs	x11, spsr_el1
67	mrs	x12, esr_el1
68	mrs	x13, far_el1
69	stp	x18,  lr, [sp, #(TF_SP - TF_X)]!
70	stp	x10, x11, [sp, #(TF_ELR)]
71	stp	x12, x13, [sp, #(TF_ESR)]
72	mrs	x18, tpidr_el1
73.endm
74
75.macro	save_registers el
76.if \el == 0
77#if defined(PERTHREAD_SSP)
78	/* Load the SSP canary to sp_el0 */
79	ldr	x1, [x18, #(PC_CURTHREAD)]
80	add	x1, x1, #(TD_MD_CANARY)
81	msr	sp_el0, x1
82#endif
83
84	/* Apply the SSBD (CVE-2018-3639) workaround if needed */
85	ldr	x1, [x18, #PC_SSBD]
86	cbz	x1, 1f
87	mov	w0, #1
88	blr	x1
891:
90
91	ldr	x0, [x18, #PC_CURTHREAD]
92	bl	ptrauth_exit_el0
93
94	ldr	x0, [x18, #(PC_CURTHREAD)]
95	bl	dbg_monitor_enter
96
97	/* Unmask debug and SError exceptions */
98	msr	daifclr, #(DAIF_D | DAIF_A)
99.else
100	/*
101	 * Unmask debug and SError exceptions.
102	 * For EL1, debug exceptions are conditionally unmasked in
103	 * do_el1h_sync().
104	 */
105	msr	daifclr, #(DAIF_A)
106.endif
107.endm
108
109.macro	restore_registers el
110	/*
111	 * Mask all exceptions, x18 may change in the interrupt exception
112	 * handler.
113	 */
114	msr	daifset, #(DAIF_ALL)
115.if \el == 0
116	ldr	x0, [x18, #PC_CURTHREAD]
117	mov	x1, sp
118	bl	dbg_monitor_exit
119
120	ldr	x0, [x18, #PC_CURTHREAD]
121	bl	ptrauth_enter_el0
122
123	/* Remove the SSBD (CVE-2018-3639) workaround if needed */
124	ldr	x1, [x18, #PC_SSBD]
125	cbz	x1, 1f
126	mov	w0, #0
127	blr	x1
1281:
129.endif
130	ldp	x18,  lr, [sp, #(TF_SP)]
131	ldp	x10, x11, [sp, #(TF_ELR)]
132.if \el == 0
133	msr	sp_el0, x18
134.endif
135	msr	spsr_el1, x11
136	msr	elr_el1, x10
137	ldp	x0,  x1,  [sp, #(TF_X + 0  * 8)]
138	ldp	x2,  x3,  [sp, #(TF_X + 2  * 8)]
139	ldp	x4,  x5,  [sp, #(TF_X + 4  * 8)]
140	ldp	x6,  x7,  [sp, #(TF_X + 6  * 8)]
141	ldp	x8,  x9,  [sp, #(TF_X + 8  * 8)]
142	ldp	x10, x11, [sp, #(TF_X + 10 * 8)]
143	ldp	x12, x13, [sp, #(TF_X + 12 * 8)]
144	ldp	x14, x15, [sp, #(TF_X + 14 * 8)]
145	ldp	x16, x17, [sp, #(TF_X + 16 * 8)]
146.if \el == 0
147	/*
148	 * We only restore the callee saved registers when returning to
149	 * userland as they may have been updated by a system call or signal.
150	 */
151	ldp	x18, x19, [sp, #(TF_X + 18 * 8)]
152	ldp	x20, x21, [sp, #(TF_X + 20 * 8)]
153	ldp	x22, x23, [sp, #(TF_X + 22 * 8)]
154	ldp	x24, x25, [sp, #(TF_X + 24 * 8)]
155	ldp	x26, x27, [sp, #(TF_X + 26 * 8)]
156	ldp	x28, x29, [sp, #(TF_X + 28 * 8)]
157.else
158	ldr	     x29, [sp, #(TF_X + 29 * 8)]
159.endif
160.if \el == 0
161	add	sp, sp, #(TF_SIZE)
162.else
163	mov	sp, x18
164	mrs	x18, tpidr_el1
165.endif
166.endm
167
168.macro	do_ast
169	mrs	x19, daif
170	/* Make sure the IRQs are enabled before calling ast() */
171	bic	x19, x19, #PSR_I
1721:
173	/*
174	 * Mask interrupts while checking the ast pending flag
175	 */
176	msr	daifset, #(DAIF_INTR)
177
178	/* Read the current thread AST mask */
179	ldr	x1, [x18, #PC_CURTHREAD]	/* Load curthread */
180	ldr	w1, [x1, #(TD_AST)]
181
182	/* Check if we have a non-zero AST mask */
183	cbz	w1, 2f
184
185	/* Restore interrupts */
186	msr	daif, x19
187
188	/* handle the ast */
189	mov	x0, sp
190	bl	_C_LABEL(ast)
191
192	/* Re-check for new ast scheduled */
193	b	1b
1942:
195.endm
196
197ENTRY(handle_el1h_sync)
198	save_registers 1
199	ldr	x0, [x18, #PC_CURTHREAD]
200	mov	x1, sp
201	bl	do_el1h_sync
202	restore_registers 1
203	ERET
204END(handle_el1h_sync)
205
206ENTRY(handle_el1h_irq)
207	save_registers 1
208	mov	x0, sp
209	bl	intr_irq_handler
210	restore_registers 1
211	ERET
212END(handle_el1h_irq)
213
214ENTRY(handle_el0_sync)
215	save_registers 0
216	ldr	x0, [x18, #PC_CURTHREAD]
217	mov	x1, sp
218	str	x1, [x0, #TD_FRAME]
219	bl	do_el0_sync
220	do_ast
221	restore_registers 0
222	ERET
223END(handle_el0_sync)
224
225ENTRY(handle_el0_irq)
226	save_registers 0
227	mov	x0, sp
228	bl	intr_irq_handler
229	do_ast
230	restore_registers 0
231	ERET
232END(handle_el0_irq)
233
234ENTRY(handle_serror)
235	save_registers 0
236	mov	x0, sp
2371:	bl	do_serror
238	b	1b
239END(handle_serror)
240
241ENTRY(handle_empty_exception)
242	save_registers 0
243	mov	x0, sp
2441:	bl	unhandled_exception
245	b	1b
246END(handle_empty_exception)
247
248.macro	vector	name, el
249	.align 7
250	save_registers_head \el
251	b	handle_\name
252	dsb	sy
253	isb
254	/* Break instruction to ensure we aren't executing code here. */
255	brk	0x42
256.endm
257
258.macro	vempty el
259	vector	empty_exception \el
260.endm
261
262	.align 11
263	.globl exception_vectors
264exception_vectors:
265	vempty 1		/* Synchronous EL1t */
266	vempty 1		/* IRQ EL1t */
267	vempty 1		/* FIQ EL1t */
268	vempty 1		/* Error EL1t */
269
270	vector el1h_sync 1	/* Synchronous EL1h */
271	vector el1h_irq 1	/* IRQ EL1h */
272	vempty 1		/* FIQ EL1h */
273	vector serror 1		/* Error EL1h */
274
275	vector el0_sync 0	/* Synchronous 64-bit EL0 */
276	vector el0_irq 0	/* IRQ 64-bit EL0 */
277	vempty 0		/* FIQ 64-bit EL0 */
278	vector serror 0		/* Error 64-bit EL0 */
279
280	vector el0_sync 0	/* Synchronous 32-bit EL0 */
281	vector el0_irq 0	/* IRQ 32-bit EL0 */
282	vempty 0		/* FIQ 32-bit EL0 */
283	vector serror 0		/* Error 32-bit EL0 */
284
285