1/*
2 * Copyright (c) 2018-2021 Maxime Villard, m00nbsd.net
3 * All rights reserved.
4 *
5 * This code is part of the NVMM hypervisor.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 *    notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 *    notice, this list of conditions and the following disclaimer in the
14 *    documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29#if defined(__NetBSD__)
30/* Override user-land alignment before including asm.h */
31#define ALIGN_DATA	.align	8
32#define ALIGN_TEXT	.align 16,0x90
33#define _ALIGN_TEXT	ALIGN_TEXT
34#define _LOCORE
35#include "assym.h"
36#include <machine/asm.h>
37#elif defined(__DragonFly__)
38#include <machine/asmacros.h>
39#include "assym.s"
40#endif
41
42#include <machine/segments.h>
43
44#define ASM_NVMM
45#include "nvmm_x86.h"
46
47	.text
48
49#define HOST_SAVE_GPRS		\
50	pushq	%rbx		;\
51	pushq	%rbp		;\
52	pushq	%r12		;\
53	pushq	%r13		;\
54	pushq	%r14		;\
55	pushq	%r15
56
57#define HOST_RESTORE_GPRS	\
58	popq	%r15		;\
59	popq	%r14		;\
60	popq	%r13		;\
61	popq	%r12		;\
62	popq	%rbp		;\
63	popq	%rbx
64
65#define HOST_SAVE_MSR(msr)	\
66	movq	$msr,%rcx	;\
67	rdmsr			;\
68	pushq	%rdx		;\
69	pushq	%rax
70
71#define HOST_RESTORE_MSR(msr)	\
72	popq	%rax		;\
73	popq	%rdx		;\
74	movq	$msr,%rcx	;\
75	wrmsr
76
77#define HOST_SAVE_TR		\
78	strw	%ax		;\
79	pushq	%rax
80
81#if defined(__NetBSD__)
82#define HOST_RESTORE_TR				\
83	popq	%rax				;\
84	movzwq	%ax,%rdx			;\
85	movq	CPUVAR(GDT),%rax		;\
86	/* Clear the busy bit for reloading. */	\
87	andq	$~0x0200,4(%rax,%rdx, 1)	;\
88	ltrw	%dx
89#elif defined(__DragonFly__)
90/* In DragonFly, PCPU(tss_gdt) points directly to the gdt[] entry for the
91 * current CPU's TSS descriptor; while NetBSD's CPUVAR(GDT) points to the
92 * gdtstore[] table. */
93#define HOST_RESTORE_TR				\
94	popq	%rax				;\
95	movzwq	%ax,%rdx			;\
96	movq	PCPU(tss_gdt),%rax		;\
97	/* Clear the busy bit for reloading. */	\
98	andq	$~0x0200,4(%rax)		;\
99	ltrw	%dx
100#endif
101
102#define HOST_SAVE_LDT		\
103	sldtw	%ax		;\
104	pushq	%rax
105
106#define HOST_RESTORE_LDT	\
107	popq	%rax		;\
108	lldtw	%ax
109
110/*
111 * All GPRs except RAX and RSP, which are taken care of in VMCB.
112 */
113
114#define GUEST_SAVE_GPRS(reg)				\
115	movq	%rcx,(NVMM_X64_GPR_RCX * 8)(reg)	;\
116	movq	%rdx,(NVMM_X64_GPR_RDX * 8)(reg)	;\
117	movq	%rbx,(NVMM_X64_GPR_RBX * 8)(reg)	;\
118	movq	%rbp,(NVMM_X64_GPR_RBP * 8)(reg)	;\
119	movq	%rsi,(NVMM_X64_GPR_RSI * 8)(reg)	;\
120	movq	%rdi,(NVMM_X64_GPR_RDI * 8)(reg)	;\
121	movq	%r8,(NVMM_X64_GPR_R8 * 8)(reg)		;\
122	movq	%r9,(NVMM_X64_GPR_R9 * 8)(reg)		;\
123	movq	%r10,(NVMM_X64_GPR_R10 * 8)(reg)	;\
124	movq	%r11,(NVMM_X64_GPR_R11 * 8)(reg)	;\
125	movq	%r12,(NVMM_X64_GPR_R12 * 8)(reg)	;\
126	movq	%r13,(NVMM_X64_GPR_R13 * 8)(reg)	;\
127	movq	%r14,(NVMM_X64_GPR_R14 * 8)(reg)	;\
128	movq	%r15,(NVMM_X64_GPR_R15 * 8)(reg)
129
130#define GUEST_RESTORE_GPRS(reg)				\
131	movq	(NVMM_X64_GPR_RCX * 8)(reg),%rcx	;\
132	movq	(NVMM_X64_GPR_RDX * 8)(reg),%rdx	;\
133	movq	(NVMM_X64_GPR_RBX * 8)(reg),%rbx	;\
134	movq	(NVMM_X64_GPR_RBP * 8)(reg),%rbp	;\
135	movq	(NVMM_X64_GPR_RSI * 8)(reg),%rsi	;\
136	movq	(NVMM_X64_GPR_RDI * 8)(reg),%rdi	;\
137	movq	(NVMM_X64_GPR_R8 * 8)(reg),%r8		;\
138	movq	(NVMM_X64_GPR_R9 * 8)(reg),%r9		;\
139	movq	(NVMM_X64_GPR_R10 * 8)(reg),%r10	;\
140	movq	(NVMM_X64_GPR_R11 * 8)(reg),%r11	;\
141	movq	(NVMM_X64_GPR_R12 * 8)(reg),%r12	;\
142	movq	(NVMM_X64_GPR_R13 * 8)(reg),%r13	;\
143	movq	(NVMM_X64_GPR_R14 * 8)(reg),%r14	;\
144	movq	(NVMM_X64_GPR_R15 * 8)(reg),%r15
145
146/*
147 * %rdi = PA of VMCB
148 * %rsi = VA of guest GPR state
149 */
150ENTRY(svm_vmrun)
151	/* Save the Host GPRs. */
152	HOST_SAVE_GPRS
153
154	/* Save the Host TR. */
155	HOST_SAVE_TR
156
157	/* Save the Host GSBASE. */
158	HOST_SAVE_MSR(MSR_GSBASE)
159
160	/* Reset DS and ES. */
161	movq	$GSEL(GUDATA_SEL, SEL_UPL),%rax
162	movw	%ax,%ds
163	movw	%ax,%es
164
165	/* Save the Host LDT. */
166	HOST_SAVE_LDT
167
168	/* Prepare RAX. */
169	pushq	%rsi
170	pushq	%rdi
171
172	/* Restore the Guest GPRs. */
173	movq	%rsi,%rax
174	GUEST_RESTORE_GPRS(%rax)
175
176	/* Set RAX. */
177	popq	%rax
178
179	/* Run the VM. */
180	vmload	%rax
181	vmrun	%rax
182	vmsave	%rax
183
184	/* Get RAX. */
185	popq	%rax
186
187	/* Save the Guest GPRs. */
188	GUEST_SAVE_GPRS(%rax)
189
190	/* Restore the Host LDT. */
191	HOST_RESTORE_LDT
192
193	/* Reset FS and GS. */
194	xorq	%rax,%rax
195	movw	%ax,%fs
196	movw	%ax,%gs
197
198	/* Restore the Host GSBASE. */
199	HOST_RESTORE_MSR(MSR_GSBASE)
200
201	/* Restore the Host TR. */
202	HOST_RESTORE_TR
203
204	/* Restore the Host GPRs. */
205	HOST_RESTORE_GPRS
206
207	retq
208END(svm_vmrun)
209