16d65b43dSAaron LI/*
2bfc69df0SAaron LI * Copyright (c) 2018-2021 Maxime Villard, m00nbsd.net
36d65b43dSAaron LI * All rights reserved.
46d65b43dSAaron LI *
57f0e1ce2SAaron LI * This code is part of the NVMM hypervisor.
66d65b43dSAaron LI *
76d65b43dSAaron LI * Redistribution and use in source and binary forms, with or without
86d65b43dSAaron LI * modification, are permitted provided that the following conditions
96d65b43dSAaron LI * are met:
106d65b43dSAaron LI * 1. Redistributions of source code must retain the above copyright
116d65b43dSAaron LI *    notice, this list of conditions and the following disclaimer.
126d65b43dSAaron LI * 2. Redistributions in binary form must reproduce the above copyright
136d65b43dSAaron LI *    notice, this list of conditions and the following disclaimer in the
146d65b43dSAaron LI *    documentation and/or other materials provided with the distribution.
156d65b43dSAaron LI *
167f0e1ce2SAaron LI * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
177f0e1ce2SAaron LI * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
187f0e1ce2SAaron LI * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
197f0e1ce2SAaron LI * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
207f0e1ce2SAaron LI * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
217f0e1ce2SAaron LI * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
227f0e1ce2SAaron LI * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
237f0e1ce2SAaron LI * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
247f0e1ce2SAaron LI * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
257f0e1ce2SAaron LI * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
267f0e1ce2SAaron LI * SUCH DAMAGE.
276d65b43dSAaron LI */
286d65b43dSAaron LI
29*8a0a54bfSAaron LI#if defined(__NetBSD__)
30*8a0a54bfSAaron LI/* Override user-land alignment before including asm.h */
31*8a0a54bfSAaron LI#define ALIGN_DATA	.align	8
32*8a0a54bfSAaron LI#define ALIGN_TEXT	.align 16,0x90
33*8a0a54bfSAaron LI#define _ALIGN_TEXT	ALIGN_TEXT
34*8a0a54bfSAaron LI#define _LOCORE
35*8a0a54bfSAaron LI#include "assym.h"
36*8a0a54bfSAaron LI#include <machine/asm.h>
37*8a0a54bfSAaron LI#elif defined(__DragonFly__)
38*8a0a54bfSAaron LI#define _C_LABEL(x)	x
391c2d6221SAaron LI#include <machine/asmacros.h>
401c2d6221SAaron LI#include "assym.s"
41*8a0a54bfSAaron LI#endif
426d65b43dSAaron LI
436d65b43dSAaron LI#define ASM_NVMM
44bfc69df0SAaron LI#include "nvmm_x86.h"
456d65b43dSAaron LI
466d65b43dSAaron LI	.text
476d65b43dSAaron LI
486d65b43dSAaron LI/* redef */
496d65b43dSAaron LI#define VMCS_HOST_RSP				0x00006C14
506d65b43dSAaron LI
516d65b43dSAaron LI#define HOST_SAVE_GPRS		\
526d65b43dSAaron LI	pushq	%rbx		;\
536d65b43dSAaron LI	pushq	%rbp		;\
546d65b43dSAaron LI	pushq	%r12		;\
556d65b43dSAaron LI	pushq	%r13		;\
566d65b43dSAaron LI	pushq	%r14		;\
576d65b43dSAaron LI	pushq	%r15
586d65b43dSAaron LI
596d65b43dSAaron LI#define HOST_RESTORE_GPRS	\
606d65b43dSAaron LI	popq	%r15		;\
616d65b43dSAaron LI	popq	%r14		;\
626d65b43dSAaron LI	popq	%r13		;\
636d65b43dSAaron LI	popq	%r12		;\
646d65b43dSAaron LI	popq	%rbp		;\
656d65b43dSAaron LI	popq	%rbx
666d65b43dSAaron LI
676d65b43dSAaron LI#define HOST_SAVE_RAX		\
686d65b43dSAaron LI	pushq	%rax
696d65b43dSAaron LI
706d65b43dSAaron LI#define HOST_RESTORE_RAX	\
716d65b43dSAaron LI	popq	%rax
726d65b43dSAaron LI
736d65b43dSAaron LI#define HOST_SAVE_LDT		\
746d65b43dSAaron LI	sldtw	%ax		;\
756d65b43dSAaron LI	pushq	%rax
766d65b43dSAaron LI
776d65b43dSAaron LI#define HOST_RESTORE_LDT	\
786d65b43dSAaron LI	popq	%rax		;\
796d65b43dSAaron LI	lldtw	%ax
806d65b43dSAaron LI
816d65b43dSAaron LI/*
826d65b43dSAaron LI * We don't save RAX (done manually), but we do restore it.
836d65b43dSAaron LI */
846d65b43dSAaron LI
856d65b43dSAaron LI#define GUEST_SAVE_GPRS(reg)				\
866d65b43dSAaron LI	movq	%rcx,(NVMM_X64_GPR_RCX * 8)(reg)	;\
876d65b43dSAaron LI	movq	%rdx,(NVMM_X64_GPR_RDX * 8)(reg)	;\
886d65b43dSAaron LI	movq	%rbx,(NVMM_X64_GPR_RBX * 8)(reg)	;\
896d65b43dSAaron LI	movq	%rbp,(NVMM_X64_GPR_RBP * 8)(reg)	;\
906d65b43dSAaron LI	movq	%rsi,(NVMM_X64_GPR_RSI * 8)(reg)	;\
916d65b43dSAaron LI	movq	%rdi,(NVMM_X64_GPR_RDI * 8)(reg)	;\
926d65b43dSAaron LI	movq	%r8,(NVMM_X64_GPR_R8 * 8)(reg)		;\
936d65b43dSAaron LI	movq	%r9,(NVMM_X64_GPR_R9 * 8)(reg)		;\
946d65b43dSAaron LI	movq	%r10,(NVMM_X64_GPR_R10 * 8)(reg)	;\
956d65b43dSAaron LI	movq	%r11,(NVMM_X64_GPR_R11 * 8)(reg)	;\
966d65b43dSAaron LI	movq	%r12,(NVMM_X64_GPR_R12 * 8)(reg)	;\
976d65b43dSAaron LI	movq	%r13,(NVMM_X64_GPR_R13 * 8)(reg)	;\
986d65b43dSAaron LI	movq	%r14,(NVMM_X64_GPR_R14 * 8)(reg)	;\
996d65b43dSAaron LI	movq	%r15,(NVMM_X64_GPR_R15 * 8)(reg)
1006d65b43dSAaron LI
1016d65b43dSAaron LI#define GUEST_RESTORE_GPRS(reg)				\
1026d65b43dSAaron LI	movq	(NVMM_X64_GPR_RCX * 8)(reg),%rcx	;\
1036d65b43dSAaron LI	movq	(NVMM_X64_GPR_RDX * 8)(reg),%rdx	;\
1046d65b43dSAaron LI	movq	(NVMM_X64_GPR_RBX * 8)(reg),%rbx	;\
1056d65b43dSAaron LI	movq	(NVMM_X64_GPR_RBP * 8)(reg),%rbp	;\
1066d65b43dSAaron LI	movq	(NVMM_X64_GPR_RSI * 8)(reg),%rsi	;\
1076d65b43dSAaron LI	movq	(NVMM_X64_GPR_RDI * 8)(reg),%rdi	;\
1086d65b43dSAaron LI	movq	(NVMM_X64_GPR_R8 * 8)(reg),%r8		;\
1096d65b43dSAaron LI	movq	(NVMM_X64_GPR_R9 * 8)(reg),%r9		;\
1106d65b43dSAaron LI	movq	(NVMM_X64_GPR_R10 * 8)(reg),%r10	;\
1116d65b43dSAaron LI	movq	(NVMM_X64_GPR_R11 * 8)(reg),%r11	;\
1126d65b43dSAaron LI	movq	(NVMM_X64_GPR_R12 * 8)(reg),%r12	;\
1136d65b43dSAaron LI	movq	(NVMM_X64_GPR_R13 * 8)(reg),%r13	;\
1146d65b43dSAaron LI	movq	(NVMM_X64_GPR_R14 * 8)(reg),%r14	;\
1156d65b43dSAaron LI	movq	(NVMM_X64_GPR_R15 * 8)(reg),%r15	;\
1166d65b43dSAaron LI	movq	(NVMM_X64_GPR_RAX * 8)(reg),%rax
1176d65b43dSAaron LI
1186d65b43dSAaron LI/*
1196d65b43dSAaron LI * %rdi = VA of guest GPR state
1206d65b43dSAaron LI */
1216d65b43dSAaron LIENTRY(vmx_vmlaunch)
1226d65b43dSAaron LI	/* Save the Host GPRs. */
1236d65b43dSAaron LI	HOST_SAVE_GPRS
1246d65b43dSAaron LI
1256d65b43dSAaron LI	/* Save the Host LDT. */
1266d65b43dSAaron LI	HOST_SAVE_LDT
1276d65b43dSAaron LI
1286d65b43dSAaron LI	/* Save the Host RAX. */
1296d65b43dSAaron LI	movq	%rdi,%rax
1306d65b43dSAaron LI	pushq	%rax
1316d65b43dSAaron LI
1326d65b43dSAaron LI	/* Save the Host RSP. */
1336d65b43dSAaron LI	movq	$VMCS_HOST_RSP,%rdi
1346d65b43dSAaron LI	movq	%rsp,%rsi
1356d65b43dSAaron LI	vmwrite	%rsi,%rdi
1366d65b43dSAaron LI
1376d65b43dSAaron LI	/* Restore the Guest GPRs. */
1386d65b43dSAaron LI	GUEST_RESTORE_GPRS(%rax)
1396d65b43dSAaron LI
1406d65b43dSAaron LI	/* Run the VM. */
1416d65b43dSAaron LI	vmlaunch
1426d65b43dSAaron LI
1436d65b43dSAaron LI	/* Failure. */
1446d65b43dSAaron LI	addq	$8,%rsp
1456d65b43dSAaron LI	HOST_RESTORE_LDT
1466d65b43dSAaron LI	HOST_RESTORE_GPRS
1476d65b43dSAaron LI	movq	$-1,%rax
1486d65b43dSAaron LI	retq
1496d65b43dSAaron LIEND(vmx_vmlaunch)
1506d65b43dSAaron LI
1516d65b43dSAaron LI/*
1526d65b43dSAaron LI * %rdi = VA of guest GPR state
1536d65b43dSAaron LI */
1546d65b43dSAaron LIENTRY(vmx_vmresume)
1556d65b43dSAaron LI	/* Save the Host GPRs. */
1566d65b43dSAaron LI	HOST_SAVE_GPRS
1576d65b43dSAaron LI
1586d65b43dSAaron LI	/* Save the Host LDT. */
1596d65b43dSAaron LI	HOST_SAVE_LDT
1606d65b43dSAaron LI
1616d65b43dSAaron LI	/* Save the Host RAX. */
1626d65b43dSAaron LI	movq	%rdi,%rax
1636d65b43dSAaron LI	pushq	%rax
1646d65b43dSAaron LI
1656d65b43dSAaron LI	/* Save the Host RSP. */
1666d65b43dSAaron LI	movq	$VMCS_HOST_RSP,%rdi
1676d65b43dSAaron LI	movq	%rsp,%rsi
1686d65b43dSAaron LI	vmwrite	%rsi,%rdi
1696d65b43dSAaron LI
1706d65b43dSAaron LI	/* Restore the Guest GPRs. */
1716d65b43dSAaron LI	GUEST_RESTORE_GPRS(%rax)
1726d65b43dSAaron LI
1736d65b43dSAaron LI	/* Run the VM. */
1746d65b43dSAaron LI	vmresume
1756d65b43dSAaron LI
1766d65b43dSAaron LI	/* Failure. */
1776d65b43dSAaron LI	addq	$8,%rsp
1786d65b43dSAaron LI	HOST_RESTORE_LDT
1796d65b43dSAaron LI	HOST_RESTORE_GPRS
1806d65b43dSAaron LI	movq	$-1,%rax
1816d65b43dSAaron LI	retq
1826d65b43dSAaron LIEND(vmx_vmresume)
1836d65b43dSAaron LI
1846d65b43dSAaron LI/*
1856d65b43dSAaron LI * The CPU jumps here after a #VMEXIT.
1866d65b43dSAaron LI */
1876d65b43dSAaron LIENTRY(vmx_resume_rip)
1886d65b43dSAaron LI	/* Save the Guest GPRs. RAX done manually. */
1896d65b43dSAaron LI	pushq	%rax
1906d65b43dSAaron LI	movq	8(%rsp),%rax
1916d65b43dSAaron LI	GUEST_SAVE_GPRS(%rax)
1926d65b43dSAaron LI	popq	%rbx
1936d65b43dSAaron LI	movq	%rbx,(NVMM_X64_GPR_RAX * 8)(%rax)
1946d65b43dSAaron LI	addq	$8,%rsp
1956d65b43dSAaron LI
1966d65b43dSAaron LI	/* Restore the Host LDT. */
1976d65b43dSAaron LI	HOST_RESTORE_LDT
1986d65b43dSAaron LI
1996d65b43dSAaron LI	/* Restore the Host GPRs. */
2006d65b43dSAaron LI	HOST_RESTORE_GPRS
2016d65b43dSAaron LI
2026d65b43dSAaron LI	xorq	%rax,%rax
2036d65b43dSAaron LI	retq
2046d65b43dSAaron LIEND(vmx_resume_rip)
2056d65b43dSAaron LI
2066d65b43dSAaron LIENTRY(vmx_insn_failvalid)
2076d65b43dSAaron LI	movq	$.Lvmx_validstr,%rdi
2086d65b43dSAaron LI	call	_C_LABEL(panic)
2096d65b43dSAaron LIEND(vmx_insn_failvalid)
2106d65b43dSAaron LI
2116d65b43dSAaron LIENTRY(vmx_insn_failinvalid)
2126d65b43dSAaron LI	movq	$.Lvmx_invalidstr,%rdi
2136d65b43dSAaron LI	call	_C_LABEL(panic)
2146d65b43dSAaron LIEND(vmx_insn_failinvalid)
2156d65b43dSAaron LI
2166d65b43dSAaron LI	.section ".rodata"
2176d65b43dSAaron LI
2186d65b43dSAaron LI.Lvmx_validstr:
2196d65b43dSAaron LI	.string	"VMX fail valid\0"
2206d65b43dSAaron LI.Lvmx_invalidstr:
2216d65b43dSAaron LI	.string "VMX fail invalid\0"
222