1/* 2 * Copyright (c) 2018-2021 Maxime Villard, m00nbsd.net 3 * All rights reserved. 4 * 5 * This code is part of the NVMM hypervisor. 6 * 7 * Redistribution and use in source and binary forms, with or without 8 * modification, are permitted provided that the following conditions 9 * are met: 10 * 1. Redistributions of source code must retain the above copyright 11 * notice, this list of conditions and the following disclaimer. 12 * 2. Redistributions in binary form must reproduce the above copyright 13 * notice, this list of conditions and the following disclaimer in the 14 * documentation and/or other materials provided with the distribution. 15 * 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 17 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 19 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 22 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED 23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 26 * SUCH DAMAGE. 27 */ 28 29#if defined(__NetBSD__) 30/* Override user-land alignment before including asm.h */ 31#define ALIGN_DATA .align 8 32#define ALIGN_TEXT .align 16,0x90 33#define _ALIGN_TEXT ALIGN_TEXT 34#define _LOCORE 35#include "assym.h" 36#include <machine/asm.h> 37#elif defined(__DragonFly__) 38#define _C_LABEL(x) x 39#include <machine/asmacros.h> 40#include "assym.s" 41#endif 42 43#define ASM_NVMM 44#include "nvmm_x86.h" 45 46 .text 47 48/* redef */ 49#define VMCS_HOST_RSP 0x00006C14 50 51#define HOST_SAVE_GPRS \ 52 pushq %rbx ;\ 53 pushq %rbp ;\ 54 pushq %r12 ;\ 55 pushq %r13 ;\ 56 pushq %r14 ;\ 57 pushq %r15 58 59#define HOST_RESTORE_GPRS \ 60 popq %r15 ;\ 61 popq %r14 ;\ 62 popq %r13 ;\ 63 popq %r12 ;\ 64 popq %rbp ;\ 65 popq %rbx 66 67#define HOST_SAVE_RAX \ 68 pushq %rax 69 70#define HOST_RESTORE_RAX \ 71 popq %rax 72 73#define HOST_SAVE_LDT \ 74 sldtw %ax ;\ 75 pushq %rax 76 77#define HOST_RESTORE_LDT \ 78 popq %rax ;\ 79 lldtw %ax 80 81/* 82 * We don't save RAX (done manually), but we do restore it. 83 */ 84 85#define GUEST_SAVE_GPRS(reg) \ 86 movq %rcx,(NVMM_X64_GPR_RCX * 8)(reg) ;\ 87 movq %rdx,(NVMM_X64_GPR_RDX * 8)(reg) ;\ 88 movq %rbx,(NVMM_X64_GPR_RBX * 8)(reg) ;\ 89 movq %rbp,(NVMM_X64_GPR_RBP * 8)(reg) ;\ 90 movq %rsi,(NVMM_X64_GPR_RSI * 8)(reg) ;\ 91 movq %rdi,(NVMM_X64_GPR_RDI * 8)(reg) ;\ 92 movq %r8,(NVMM_X64_GPR_R8 * 8)(reg) ;\ 93 movq %r9,(NVMM_X64_GPR_R9 * 8)(reg) ;\ 94 movq %r10,(NVMM_X64_GPR_R10 * 8)(reg) ;\ 95 movq %r11,(NVMM_X64_GPR_R11 * 8)(reg) ;\ 96 movq %r12,(NVMM_X64_GPR_R12 * 8)(reg) ;\ 97 movq %r13,(NVMM_X64_GPR_R13 * 8)(reg) ;\ 98 movq %r14,(NVMM_X64_GPR_R14 * 8)(reg) ;\ 99 movq %r15,(NVMM_X64_GPR_R15 * 8)(reg) 100 101#define GUEST_RESTORE_GPRS(reg) \ 102 movq (NVMM_X64_GPR_RCX * 8)(reg),%rcx ;\ 103 movq (NVMM_X64_GPR_RDX * 8)(reg),%rdx ;\ 104 movq (NVMM_X64_GPR_RBX * 8)(reg),%rbx ;\ 105 movq (NVMM_X64_GPR_RBP * 8)(reg),%rbp ;\ 106 movq (NVMM_X64_GPR_RSI * 8)(reg),%rsi ;\ 107 movq (NVMM_X64_GPR_RDI * 8)(reg),%rdi ;\ 108 movq (NVMM_X64_GPR_R8 * 8)(reg),%r8 ;\ 109 movq (NVMM_X64_GPR_R9 * 8)(reg),%r9 ;\ 110 movq (NVMM_X64_GPR_R10 * 8)(reg),%r10 ;\ 111 movq (NVMM_X64_GPR_R11 * 8)(reg),%r11 ;\ 112 movq (NVMM_X64_GPR_R12 * 8)(reg),%r12 ;\ 113 movq (NVMM_X64_GPR_R13 * 8)(reg),%r13 ;\ 114 movq (NVMM_X64_GPR_R14 * 8)(reg),%r14 ;\ 115 movq (NVMM_X64_GPR_R15 * 8)(reg),%r15 ;\ 116 movq (NVMM_X64_GPR_RAX * 8)(reg),%rax 117 118/* 119 * %rdi = VA of guest GPR state 120 */ 121ENTRY(vmx_vmlaunch) 122 /* Save the Host GPRs. */ 123 HOST_SAVE_GPRS 124 125 /* Save the Host LDT. */ 126 HOST_SAVE_LDT 127 128 /* Save the Host RAX. */ 129 movq %rdi,%rax 130 pushq %rax 131 132 /* Save the Host RSP. */ 133 movq $VMCS_HOST_RSP,%rdi 134 movq %rsp,%rsi 135 vmwrite %rsi,%rdi 136 137 /* Restore the Guest GPRs. */ 138 GUEST_RESTORE_GPRS(%rax) 139 140 /* Run the VM. */ 141 vmlaunch 142 143 /* Failure. */ 144 addq $8,%rsp 145 HOST_RESTORE_LDT 146 HOST_RESTORE_GPRS 147 movq $-1,%rax 148 retq 149END(vmx_vmlaunch) 150 151/* 152 * %rdi = VA of guest GPR state 153 */ 154ENTRY(vmx_vmresume) 155 /* Save the Host GPRs. */ 156 HOST_SAVE_GPRS 157 158 /* Save the Host LDT. */ 159 HOST_SAVE_LDT 160 161 /* Save the Host RAX. */ 162 movq %rdi,%rax 163 pushq %rax 164 165 /* Save the Host RSP. */ 166 movq $VMCS_HOST_RSP,%rdi 167 movq %rsp,%rsi 168 vmwrite %rsi,%rdi 169 170 /* Restore the Guest GPRs. */ 171 GUEST_RESTORE_GPRS(%rax) 172 173 /* Run the VM. */ 174 vmresume 175 176 /* Failure. */ 177 addq $8,%rsp 178 HOST_RESTORE_LDT 179 HOST_RESTORE_GPRS 180 movq $-1,%rax 181 retq 182END(vmx_vmresume) 183 184/* 185 * The CPU jumps here after a #VMEXIT. 186 */ 187ENTRY(vmx_resume_rip) 188 /* Save the Guest GPRs. RAX done manually. */ 189 pushq %rax 190 movq 8(%rsp),%rax 191 GUEST_SAVE_GPRS(%rax) 192 popq %rbx 193 movq %rbx,(NVMM_X64_GPR_RAX * 8)(%rax) 194 addq $8,%rsp 195 196 /* Restore the Host LDT. */ 197 HOST_RESTORE_LDT 198 199 /* Restore the Host GPRs. */ 200 HOST_RESTORE_GPRS 201 202 xorq %rax,%rax 203 retq 204END(vmx_resume_rip) 205 206ENTRY(vmx_insn_failvalid) 207 movq $.Lvmx_validstr,%rdi 208 call _C_LABEL(panic) 209END(vmx_insn_failvalid) 210 211ENTRY(vmx_insn_failinvalid) 212 movq $.Lvmx_invalidstr,%rdi 213 call _C_LABEL(panic) 214END(vmx_insn_failinvalid) 215 216 .section ".rodata" 217 218.Lvmx_validstr: 219 .string "VMX fail valid\0" 220.Lvmx_invalidstr: 221 .string "VMX fail invalid\0" 222