1/*- 2 * Copyright (c) 2014 Andrew Turner 3 * All rights reserved. 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 1. Redistributions of source code must retain the above copyright 9 * notice, this list of conditions and the following disclaimer. 10 * 2. Redistributions in binary form must reproduce the above copyright 11 * notice, this list of conditions and the following disclaimer in the 12 * documentation and/or other materials provided with the distribution. 13 * 14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24 * SUCH DAMAGE. 25 * 26 */ 27 28#include <machine/asm.h> 29#include <machine/armreg.h> 30__FBSDID("$FreeBSD$"); 31 32#include "assym.inc" 33 34 .text 35 36.macro save_registers el 37.if \el == 1 38 mov x18, sp 39 sub sp, sp, #128 40.endif 41 sub sp, sp, #(TF_SIZE + 16) 42 stp x29, x30, [sp, #(TF_SIZE)] 43 stp x28, x29, [sp, #(TF_X + 28 * 8)] 44 stp x26, x27, [sp, #(TF_X + 26 * 8)] 45 stp x24, x25, [sp, #(TF_X + 24 * 8)] 46 stp x22, x23, [sp, #(TF_X + 22 * 8)] 47 stp x20, x21, [sp, #(TF_X + 20 * 8)] 48 stp x18, x19, [sp, #(TF_X + 18 * 8)] 49 stp x16, x17, [sp, #(TF_X + 16 * 8)] 50 stp x14, x15, [sp, #(TF_X + 14 * 8)] 51 stp x12, x13, [sp, #(TF_X + 12 * 8)] 52 stp x10, x11, [sp, #(TF_X + 10 * 8)] 53 stp x8, x9, [sp, #(TF_X + 8 * 8)] 54 stp x6, x7, [sp, #(TF_X + 6 * 8)] 55 stp x4, x5, [sp, #(TF_X + 4 * 8)] 56 stp x2, x3, [sp, #(TF_X + 2 * 8)] 57 stp x0, x1, [sp, #(TF_X + 0 * 8)] 58 mrs x10, elr_el1 59 mrs x11, spsr_el1 60 mrs x12, esr_el1 61.if \el == 0 62 mrs x18, sp_el0 63.endif 64 str x10, [sp, #(TF_ELR)] 65 stp w11, w12, [sp, #(TF_SPSR)] 66 stp x18, lr, [sp, #(TF_SP)] 67 mrs x18, tpidr_el1 68 add x29, sp, #(TF_SIZE) 69.if \el == 0 70 /* Apply the SSBD (CVE-2018-3639) workaround if needed */ 71 ldr x1, [x18, #PC_SSBD] 72 cbz x1, 1f 73 mov w0, #1 74 blr x1 751: 76 77 ldr x0, [x18, #(PC_CURTHREAD)] 78 bl dbg_monitor_enter 79.endif 80 msr daifclr, #8 /* Enable the debug exception */ 81.endm 82 83.macro restore_registers el 84.if \el == 1 85 /* 86 * Disable interrupts and debug exceptions, x18 may change in the 87 * interrupt exception handler. For EL0 exceptions, do_ast already 88 * did this. 89 */ 90 msr daifset, #10 91.endif 92.if \el == 0 93 ldr x0, [x18, #PC_CURTHREAD] 94 mov x1, sp 95 bl dbg_monitor_exit 96 97 /* Remove the SSBD (CVE-2018-3639) workaround if needed */ 98 ldr x1, [x18, #PC_SSBD] 99 cbz x1, 1f 100 mov w0, #0 101 blr x1 1021: 103.endif 104 ldp x18, lr, [sp, #(TF_SP)] 105 ldp x10, x11, [sp, #(TF_ELR)] 106.if \el == 0 107 msr sp_el0, x18 108.endif 109 msr spsr_el1, x11 110 msr elr_el1, x10 111 ldp x0, x1, [sp, #(TF_X + 0 * 8)] 112 ldp x2, x3, [sp, #(TF_X + 2 * 8)] 113 ldp x4, x5, [sp, #(TF_X + 4 * 8)] 114 ldp x6, x7, [sp, #(TF_X + 6 * 8)] 115 ldp x8, x9, [sp, #(TF_X + 8 * 8)] 116 ldp x10, x11, [sp, #(TF_X + 10 * 8)] 117 ldp x12, x13, [sp, #(TF_X + 12 * 8)] 118 ldp x14, x15, [sp, #(TF_X + 14 * 8)] 119 ldp x16, x17, [sp, #(TF_X + 16 * 8)] 120.if \el == 0 121 /* 122 * We only restore the callee saved registers when returning to 123 * userland as they may have been updated by a system call or signal. 124 */ 125 ldp x18, x19, [sp, #(TF_X + 18 * 8)] 126 ldp x20, x21, [sp, #(TF_X + 20 * 8)] 127 ldp x22, x23, [sp, #(TF_X + 22 * 8)] 128 ldp x24, x25, [sp, #(TF_X + 24 * 8)] 129 ldp x26, x27, [sp, #(TF_X + 26 * 8)] 130 ldp x28, x29, [sp, #(TF_X + 28 * 8)] 131.else 132 ldr x29, [sp, #(TF_X + 29 * 8)] 133.endif 134.if \el == 0 135 add sp, sp, #(TF_SIZE + 16) 136.else 137 mov sp, x18 138 mrs x18, tpidr_el1 139.endif 140.endm 141 142.macro do_ast 143 mrs x19, daif 144 /* Make sure the IRQs are enabled before calling ast() */ 145 bic x19, x19, #PSR_I 1461: 147 /* Disable interrupts */ 148 msr daifset, #10 149 150 /* Read the current thread flags */ 151 ldr x1, [x18, #PC_CURTHREAD] /* Load curthread */ 152 ldr x2, [x1, #TD_FLAGS] 153 154 /* Check if we have either bits set */ 155 mov x3, #((TDF_ASTPENDING|TDF_NEEDRESCHED) >> 8) 156 lsl x3, x3, #8 157 and x2, x2, x3 158 cbz x2, 2f 159 160 /* Restore interrupts */ 161 msr daif, x19 162 163 /* handle the ast */ 164 mov x0, sp 165 bl _C_LABEL(ast) 166 167 /* Re-check for new ast scheduled */ 168 b 1b 1692: 170.endm 171 172ENTRY(handle_el1h_sync) 173 save_registers 1 174 ldr x0, [x18, #PC_CURTHREAD] 175 mov x1, sp 176 bl do_el1h_sync 177 restore_registers 1 178 ERET 179END(handle_el1h_sync) 180 181ENTRY(handle_el1h_irq) 182 save_registers 1 183 mov x0, sp 184 bl intr_irq_handler 185 restore_registers 1 186 ERET 187END(handle_el1h_irq) 188 189ENTRY(handle_el0_sync) 190 save_registers 0 191 ldr x0, [x18, #PC_CURTHREAD] 192 mov x1, sp 193 str x1, [x0, #TD_FRAME] 194 bl do_el0_sync 195 do_ast 196 restore_registers 0 197 ERET 198END(handle_el0_sync) 199 200ENTRY(handle_el0_irq) 201 save_registers 0 202 mov x0, sp 203 bl intr_irq_handler 204 do_ast 205 restore_registers 0 206 ERET 207END(handle_el0_irq) 208 209ENTRY(handle_serror) 210 save_registers 0 211 mov x0, sp 2121: bl do_serror 213 b 1b 214END(handle_serror) 215 216ENTRY(handle_empty_exception) 217 save_registers 0 218 mov x0, sp 2191: bl unhandled_exception 220 b 1b 221END(handle_unhandled_exception) 222 223.macro vempty 224 .align 7 225 b handle_empty_exception 226.endm 227 228.macro vector name 229 .align 7 230 b handle_\name 231.endm 232 233 .align 11 234 .globl exception_vectors 235exception_vectors: 236 vempty /* Synchronous EL1t */ 237 vempty /* IRQ EL1t */ 238 vempty /* FIQ EL1t */ 239 vempty /* Error EL1t */ 240 241 vector el1h_sync /* Synchronous EL1h */ 242 vector el1h_irq /* IRQ EL1h */ 243 vempty /* FIQ EL1h */ 244 vector serror /* Error EL1h */ 245 246 vector el0_sync /* Synchronous 64-bit EL0 */ 247 vector el0_irq /* IRQ 64-bit EL0 */ 248 vempty /* FIQ 64-bit EL0 */ 249 vector serror /* Error 64-bit EL0 */ 250 251 vector el0_sync /* Synchronous 32-bit EL0 */ 252 vector el0_irq /* IRQ 32-bit EL0 */ 253 vempty /* FIQ 32-bit EL0 */ 254 vector serror /* Error 32-bit EL0 */ 255 256