1/*	$NetBSD: nvmm_x86_vmxfunc.S,v 1.3.4.1 2020/08/29 17:00:28 martin Exp $	*/
2
3/*
4 * Copyright (c) 2018 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Maxime Villard.
9 *
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
12 * are met:
13 * 1. Redistributions of source code must retain the above copyright
14 *    notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 *    notice, this list of conditions and the following disclaimer in the
17 *    documentation and/or other materials provided with the distribution.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE.
30 */
31
32#define LOCORE
33#include <machine/asmacros.h>
34#include "assym.s"
35
36#define _C_LABEL(x)	x
37
38#define ASM_NVMM
39#include <dev/virtual/nvmm/x86/nvmm_x86.h>
40
41	.text
42
43/*
44 * %rdi = *pa
45 */
46ENTRY(_vmx_vmxon)
47	vmxon	(%rdi)
48	jz	.Lfail_vmxon
49	jc	.Lfail_vmxon
50	xorq	%rax,%rax
51	retq
52.Lfail_vmxon:
53	movq	$-1,%rax
54	retq
55END(_vmx_vmxon)
56
57/*
58 * no arg
59 */
60ENTRY(_vmx_vmxoff)
61	vmxoff
62	jz	.Lfail_vmxoff
63	jc	.Lfail_vmxoff
64	xorq	%rax,%rax
65	retq
66.Lfail_vmxoff:
67	movq	$-1,%rax
68	retq
69END(_vmx_vmxoff)
70
71/* redef */
72#define VMCS_HOST_RSP				0x00006C14
73
74#define HOST_SAVE_GPRS		\
75	pushq	%rbx		;\
76	pushq	%rbp		;\
77	pushq	%r12		;\
78	pushq	%r13		;\
79	pushq	%r14		;\
80	pushq	%r15
81
82#define HOST_RESTORE_GPRS	\
83	popq	%r15		;\
84	popq	%r14		;\
85	popq	%r13		;\
86	popq	%r12		;\
87	popq	%rbp		;\
88	popq	%rbx
89
90#define HOST_SAVE_RAX		\
91	pushq	%rax
92
93#define HOST_RESTORE_RAX	\
94	popq	%rax
95
96#define HOST_SAVE_LDT		\
97	sldtw	%ax		;\
98	pushq	%rax
99
100#define HOST_RESTORE_LDT	\
101	popq	%rax		;\
102	lldtw	%ax
103
104/*
105 * We don't save RAX (done manually), but we do restore it.
106 */
107
108#define GUEST_SAVE_GPRS(reg)				\
109	movq	%rcx,(NVMM_X64_GPR_RCX * 8)(reg)	;\
110	movq	%rdx,(NVMM_X64_GPR_RDX * 8)(reg)	;\
111	movq	%rbx,(NVMM_X64_GPR_RBX * 8)(reg)	;\
112	movq	%rbp,(NVMM_X64_GPR_RBP * 8)(reg)	;\
113	movq	%rsi,(NVMM_X64_GPR_RSI * 8)(reg)	;\
114	movq	%rdi,(NVMM_X64_GPR_RDI * 8)(reg)	;\
115	movq	%r8,(NVMM_X64_GPR_R8 * 8)(reg)		;\
116	movq	%r9,(NVMM_X64_GPR_R9 * 8)(reg)		;\
117	movq	%r10,(NVMM_X64_GPR_R10 * 8)(reg)	;\
118	movq	%r11,(NVMM_X64_GPR_R11 * 8)(reg)	;\
119	movq	%r12,(NVMM_X64_GPR_R12 * 8)(reg)	;\
120	movq	%r13,(NVMM_X64_GPR_R13 * 8)(reg)	;\
121	movq	%r14,(NVMM_X64_GPR_R14 * 8)(reg)	;\
122	movq	%r15,(NVMM_X64_GPR_R15 * 8)(reg)
123
124#define GUEST_RESTORE_GPRS(reg)				\
125	movq	(NVMM_X64_GPR_RCX * 8)(reg),%rcx	;\
126	movq	(NVMM_X64_GPR_RDX * 8)(reg),%rdx	;\
127	movq	(NVMM_X64_GPR_RBX * 8)(reg),%rbx	;\
128	movq	(NVMM_X64_GPR_RBP * 8)(reg),%rbp	;\
129	movq	(NVMM_X64_GPR_RSI * 8)(reg),%rsi	;\
130	movq	(NVMM_X64_GPR_RDI * 8)(reg),%rdi	;\
131	movq	(NVMM_X64_GPR_R8 * 8)(reg),%r8		;\
132	movq	(NVMM_X64_GPR_R9 * 8)(reg),%r9		;\
133	movq	(NVMM_X64_GPR_R10 * 8)(reg),%r10	;\
134	movq	(NVMM_X64_GPR_R11 * 8)(reg),%r11	;\
135	movq	(NVMM_X64_GPR_R12 * 8)(reg),%r12	;\
136	movq	(NVMM_X64_GPR_R13 * 8)(reg),%r13	;\
137	movq	(NVMM_X64_GPR_R14 * 8)(reg),%r14	;\
138	movq	(NVMM_X64_GPR_R15 * 8)(reg),%r15	;\
139	movq	(NVMM_X64_GPR_RAX * 8)(reg),%rax
140
141/*
142 * %rdi = VA of guest GPR state
143 */
144ENTRY(vmx_vmlaunch)
145	/* Save the Host GPRs. */
146	HOST_SAVE_GPRS
147
148	/* Disable Host interrupts. */
149	cli
150
151	/* Save the Host LDT. */
152	HOST_SAVE_LDT
153
154	/* Save the Host RAX. */
155	movq	%rdi,%rax
156	pushq	%rax
157
158	/* Save the Host RSP. */
159	movq	$VMCS_HOST_RSP,%rdi
160	movq	%rsp,%rsi
161	vmwrite	%rsi,%rdi
162
163	/* Restore the Guest GPRs. */
164	GUEST_RESTORE_GPRS(%rax)
165
166	/* Run the VM. */
167	vmlaunch
168
169	/* Failure. */
170	addq	$8,%rsp
171	HOST_RESTORE_LDT
172	sti
173	HOST_RESTORE_GPRS
174	movq	$-1,%rax
175	retq
176END(vmx_vmlaunch)
177
178/*
179 * %rdi = VA of guest GPR state
180 */
181ENTRY(vmx_vmresume)
182	/* Save the Host GPRs. */
183	HOST_SAVE_GPRS
184
185	/* Disable Host interrupts. */
186	cli
187
188	/* Save the Host LDT. */
189	HOST_SAVE_LDT
190
191	/* Save the Host RAX. */
192	movq	%rdi,%rax
193	pushq	%rax
194
195	/* Save the Host RSP. */
196	movq	$VMCS_HOST_RSP,%rdi
197	movq	%rsp,%rsi
198	vmwrite	%rsi,%rdi
199
200	/* Restore the Guest GPRs. */
201	GUEST_RESTORE_GPRS(%rax)
202
203	/* Run the VM. */
204	vmresume
205
206	/* Failure. */
207	addq	$8,%rsp
208	HOST_RESTORE_LDT
209	sti
210	HOST_RESTORE_GPRS
211	movq	$-1,%rax
212	retq
213END(vmx_vmresume)
214
215/*
216 * The CPU jumps here after a #VMEXIT.
217 */
218ENTRY(vmx_resume_rip)
219	/* Save the Guest GPRs. RAX done manually. */
220	pushq	%rax
221	movq	8(%rsp),%rax
222	GUEST_SAVE_GPRS(%rax)
223	popq	%rbx
224	movq	%rbx,(NVMM_X64_GPR_RAX * 8)(%rax)
225	addq	$8,%rsp
226
227	/* Restore the Host LDT. */
228	HOST_RESTORE_LDT
229
230	/* Enable Host interrupts. */
231	sti
232
233	/* Restore the Host GPRs. */
234	HOST_RESTORE_GPRS
235
236	xorq	%rax,%rax
237	retq
238END(vmx_resume_rip)
239
240ENTRY(vmx_insn_failvalid)
241	movq	$.Lvmx_validstr,%rdi
242	call	_C_LABEL(panic)
243END(vmx_insn_failvalid)
244
245ENTRY(vmx_insn_failinvalid)
246	movq	$.Lvmx_invalidstr,%rdi
247	call	_C_LABEL(panic)
248END(vmx_insn_failinvalid)
249
250	.section ".rodata"
251
252.Lvmx_validstr:
253	.string	"VMX fail valid\0"
254.Lvmx_invalidstr:
255	.string "VMX fail invalid\0"
256