xref: /dragonfly/sys/platform/pc64/x86_64/support.s (revision 8d2aaeec)
1b2b3ffcdSSimon Schubert/*-
2b2b3ffcdSSimon Schubert * Copyright (c) 1993 The Regents of the University of California.
3b2b3ffcdSSimon Schubert * Copyright (c) 2003 Peter Wemm.
4b2b3ffcdSSimon Schubert * Copyright (c) 2008 The DragonFly Project.
55b3646a9SMatthew Dillon * Copyright (c) 2008-2020 The DragonFly Project.
6b2b3ffcdSSimon Schubert * All rights reserved.
7b2b3ffcdSSimon Schubert *
8b2b3ffcdSSimon Schubert * Redistribution and use in source and binary forms, with or without
9b2b3ffcdSSimon Schubert * modification, are permitted provided that the following conditions
10b2b3ffcdSSimon Schubert * are met:
11b2b3ffcdSSimon Schubert * 1. Redistributions of source code must retain the above copyright
12b2b3ffcdSSimon Schubert *    notice, this list of conditions and the following disclaimer.
13b2b3ffcdSSimon Schubert * 2. Redistributions in binary form must reproduce the above copyright
14b2b3ffcdSSimon Schubert *    notice, this list of conditions and the following disclaimer in the
15b2b3ffcdSSimon Schubert *    documentation and/or other materials provided with the distribution.
16c66c7e2fSzrj * 3. Neither the name of the University nor the names of its contributors
17b2b3ffcdSSimon Schubert *    may be used to endorse or promote products derived from this software
18b2b3ffcdSSimon Schubert *    without specific prior written permission.
19b2b3ffcdSSimon Schubert *
20b2b3ffcdSSimon Schubert * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
21b2b3ffcdSSimon Schubert * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22b2b3ffcdSSimon Schubert * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
23b2b3ffcdSSimon Schubert * ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
24b2b3ffcdSSimon Schubert * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25b2b3ffcdSSimon Schubert * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
26b2b3ffcdSSimon Schubert * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
27b2b3ffcdSSimon Schubert * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
28b2b3ffcdSSimon Schubert * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
29b2b3ffcdSSimon Schubert * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
30b2b3ffcdSSimon Schubert * SUCH DAMAGE.
31b2b3ffcdSSimon Schubert *
32b2b3ffcdSSimon Schubert * $FreeBSD: src/sys/amd64/amd64/support.S,v 1.127 2007/05/23 08:33:04 kib Exp $
33b2b3ffcdSSimon Schubert */
34b2b3ffcdSSimon Schubert
35b2b3ffcdSSimon Schubert#include <machine/asmacros.h>
365b3646a9SMatthew Dillon#include <machine/asm_mjgmacros.h>
37b2b3ffcdSSimon Schubert#include <machine/pmap.h>
38b2b3ffcdSSimon Schubert
39b2b3ffcdSSimon Schubert#include "assym.s"
40b2b3ffcdSSimon Schubert
41b2b3ffcdSSimon Schubert	ALIGN_DATA
42b2b3ffcdSSimon Schubert
43b2b3ffcdSSimon Schubert	.text
44b2b3ffcdSSimon Schubert
45b2b3ffcdSSimon Schubert/*
46afd2da4dSMatthew Dillon * bzero(ptr:%rdi, bytes:%rsi)
47afd2da4dSMatthew Dillon *
48afd2da4dSMatthew Dillon * Using rep stosq is 70% faster than a %rax loop and almost as fast as
49afd2da4dSMatthew Dillon * a %xmm0 loop on a modern intel cpu.
50afd2da4dSMatthew Dillon *
51afd2da4dSMatthew Dillon * Do not use non-termportal instructions here as we do not know the caller's
52afd2da4dSMatthew Dillon * intent.
53b2b3ffcdSSimon Schubert */
54b2b3ffcdSSimon SchubertENTRY(bzero)
555b3646a9SMatthew Dillon	subq	%r10,%r10
565b3646a9SMatthew Dillon	movq	%rsi,%rdx
575b3646a9SMatthew Dillon	MEMSET erms=0 end=ret
58b1d2a2deSzrjEND(bzero)
59b2b3ffcdSSimon Schubert
607367f280SMatthew Dillon	.weak	_bzero
617367f280SMatthew Dillon	.equ	_bzero, bzero
627367f280SMatthew Dillon
637367f280SMatthew Dillon/*
647367f280SMatthew Dillon * void *memset(ptr:%rdi, char:%rsi, bytes:%rdx)
657367f280SMatthew Dillon *
667367f280SMatthew Dillon * Same as bzero except we load the char into all byte
677367f280SMatthew Dillon * positions of %rax.  Returns original (ptr).
687367f280SMatthew Dillon */
697367f280SMatthew DillonENTRY(memset)
707367f280SMatthew Dillon	movzbq	%sil,%r8
715b3646a9SMatthew Dillon	movabs  $0x0101010101010101,%r10
725b3646a9SMatthew Dillon	imulq   %r8,%r10
735b3646a9SMatthew Dillon	MEMSET erms=0 end=ret
747367f280SMatthew DillonEND(memset)
757367f280SMatthew Dillon
767367f280SMatthew Dillon	.weak	_memset
777367f280SMatthew Dillon	.equ	_memset, memset
787367f280SMatthew Dillon
79afd2da4dSMatthew Dillon/*
80afd2da4dSMatthew Dillon * pagezero(ptr:%rdi)
81afd2da4dSMatthew Dillon *
82157aa1c5SMatthew Dillon * Modern intel and AMD cpus do a good job with rep stosq on page-sized
83157aa1c5SMatthew Dillon * blocks.  The cross-point on intel is at the 256 byte mark and on AMD
84157aa1c5SMatthew Dillon * it is around the 1024 byte mark.  With large counts, rep stosq will
85157aa1c5SMatthew Dillon * internally use non-termporal instructions and a cache sync at the end.
86afd2da4dSMatthew Dillon */
87157aa1c5SMatthew Dillon#if 1
885d48b312SMatthew Dillon
89b2b3ffcdSSimon SchubertENTRY(pagezero)
90afd2da4dSMatthew Dillon	movq	$PAGE_SIZE>>3,%rcx
91b2b3ffcdSSimon Schubert	xorl	%eax,%eax
92afd2da4dSMatthew Dillon	rep
93afd2da4dSMatthew Dillon	stosq
94b2b3ffcdSSimon Schubert	ret
95b1d2a2deSzrjEND(pagezero)
96b2b3ffcdSSimon Schubert
975b3646a9SMatthew Dillon#else
985d48b312SMatthew Dillon
995d48b312SMatthew DillonENTRY(pagezero)
1005d48b312SMatthew Dillon	addq	$4096,%rdi
1015d48b312SMatthew Dillon	movq	$-4096,%rax
1025d48b312SMatthew Dillon	ALIGN_TEXT
1035d48b312SMatthew Dillon1:
1045d48b312SMatthew Dillon	movq	$0,(%rdi,%rax,1)
1055b3646a9SMatthew Dillon	movq	$0,8(%rdi,%rax,1)
1065b3646a9SMatthew Dillon	addq	$16,%rax
1075d48b312SMatthew Dillon	jne	1b
1085d48b312SMatthew Dillon	ret
1095d48b312SMatthew DillonEND(pagezero)
1105d48b312SMatthew Dillon
1115b3646a9SMatthew Dillon#endif
1125b3646a9SMatthew Dillon
113afd2da4dSMatthew Dillon/*
114afd2da4dSMatthew Dillon * bcopy(src:%rdi, dst:%rsi, cnt:%rdx)
115afd2da4dSMatthew Dillon *
116b2b3ffcdSSimon Schubert * ws@tools.de (Wolfgang Solfrank, TooLs GmbH) +49-228-985800
117b2b3ffcdSSimon Schubert */
118b2b3ffcdSSimon SchubertENTRY(bcopy)
119b2b3ffcdSSimon Schubert	xchgq	%rsi,%rdi
1205b3646a9SMatthew Dillon	MEMMOVE	erms=0 overlap=1 end=ret
121b1d2a2deSzrjEND(bcopy)
122afd2da4dSMatthew Dillon
1237367f280SMatthew Dillon	/*
1247367f280SMatthew Dillon	 * Use in situations where a bcopy function pointer is needed.
1257367f280SMatthew Dillon	 */
1267367f280SMatthew Dillon	.weak	_bcopy
1277367f280SMatthew Dillon	.equ	_bcopy, bcopy
1287367f280SMatthew Dillon
1297367f280SMatthew Dillon	/*
1307367f280SMatthew Dillon	 * memmove(dst:%rdi, src:%rsi, cnt:%rdx)
1317367f280SMatthew Dillon	 * (same as bcopy but without the xchgq, and must return (dst)).
1327367f280SMatthew Dillon	 *
1337367f280SMatthew Dillon	 * NOTE: gcc builtin backs-off to memmove() call
1345b3646a9SMatthew Dillon	 * NOTE: returns dst
1357367f280SMatthew Dillon	 */
1367367f280SMatthew DillonENTRY(memmove)
1375b3646a9SMatthew Dillon	movq	%rdi,%rax
1385b3646a9SMatthew Dillon	MEMMOVE erms=0 overlap=1 end=ret
1397367f280SMatthew DillonEND(memmove)
1407367f280SMatthew Dillon
141407e8612SMatthew Dillon	.weak	_memmove
142407e8612SMatthew Dillon	.equ	_memmove, memmove
143407e8612SMatthew Dillon
144b2b3ffcdSSimon SchubertENTRY(reset_dbregs)
145b2b3ffcdSSimon Schubert	movq	$0x200,%rax	/* the manual says that bit 10 must be set to 1 */
146b2b3ffcdSSimon Schubert	movq	%rax,%dr7	/* disable all breapoints first */
147b2b3ffcdSSimon Schubert	movq	$0,%rax
148b2b3ffcdSSimon Schubert	movq	%rax,%dr0
149b2b3ffcdSSimon Schubert	movq	%rax,%dr1
150b2b3ffcdSSimon Schubert	movq	%rax,%dr2
151b2b3ffcdSSimon Schubert	movq	%rax,%dr3
152b2b3ffcdSSimon Schubert	movq	%rax,%dr6
153b2b3ffcdSSimon Schubert	ret
154b1d2a2deSzrjEND(reset_dbregs)
155b2b3ffcdSSimon Schubert
156b2b3ffcdSSimon Schubert/*
15774857ad3SMatthew Dillon * memcpy(dst:%rdi, src:%rsi, bytes:%rdx)
15874857ad3SMatthew Dillon *
1597367f280SMatthew Dillon * NOTE: memcpy does not support overlapping copies
1607367f280SMatthew Dillon * NOTE: returns dst
161b2b3ffcdSSimon Schubert */
162b2b3ffcdSSimon SchubertENTRY(memcpy)
1635b3646a9SMatthew Dillon	movq	%rdi,%rax
1645b3646a9SMatthew Dillon	MEMMOVE erms=0 overlap=0 end=ret
165b1d2a2deSzrjEND(memcpy)
166b2b3ffcdSSimon Schubert
167b31be447SMatthew Dillon	.weak	_memcpy
168b31be447SMatthew Dillon	.equ	_memcpy, memcpy
169b31be447SMatthew Dillon
170b2b3ffcdSSimon Schubert/* fillw(pat, base, cnt) */
171b2b3ffcdSSimon Schubert/*       %rdi,%rsi, %rdx */
172b2b3ffcdSSimon SchubertENTRY(fillw)
173b2b3ffcdSSimon Schubert	movq	%rdi,%rax
174b2b3ffcdSSimon Schubert	movq	%rsi,%rdi
175b2b3ffcdSSimon Schubert	movq	%rdx,%rcx
176b2b3ffcdSSimon Schubert	rep
177b2b3ffcdSSimon Schubert	stosw
178b2b3ffcdSSimon Schubert	ret
179b1d2a2deSzrjEND(fillw)
180b2b3ffcdSSimon Schubert
181b2b3ffcdSSimon Schubert/*****************************************************************************/
182b2b3ffcdSSimon Schubert/* copyout and fubyte family                                                 */
183b2b3ffcdSSimon Schubert/*****************************************************************************/
184b2b3ffcdSSimon Schubert/*
185b2b3ffcdSSimon Schubert * Access user memory from inside the kernel. These routines should be
186b2b3ffcdSSimon Schubert * the only places that do this.
187b2b3ffcdSSimon Schubert *
188b2b3ffcdSSimon Schubert * These routines set curpcb->onfault for the time they execute. When a
189b2b3ffcdSSimon Schubert * protection violation occurs inside the functions, the trap handler
190b2b3ffcdSSimon Schubert * returns to *curpcb->onfault instead of the function.
191b2b3ffcdSSimon Schubert */
192b2b3ffcdSSimon Schubert
193b2b3ffcdSSimon Schubert/*
194afd7f124SMatthew Dillon * uint64_t:%rax kreadmem64(addr:%rdi)
195afd7f124SMatthew Dillon *
196afd7f124SMatthew Dillon * Read kernel or user memory with fault protection.
197afd7f124SMatthew Dillon */
198afd7f124SMatthew DillonENTRY(kreadmem64)
19948c77f2bSMatthew Dillon	SMAP_OPEN
200afd7f124SMatthew Dillon	movq	PCPU(curthread),%rcx
201afd7f124SMatthew Dillon	movq	TD_PCB(%rcx), %rcx
202afd7f124SMatthew Dillon	movq	$kreadmem64fault,PCB_ONFAULT(%rcx)
203afd7f124SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
204afd7f124SMatthew Dillon	movq	(%rdi),%rax
205afd7f124SMatthew Dillon	movq	$0,PCB_ONFAULT(%rcx)
20648c77f2bSMatthew Dillon	SMAP_CLOSE
207afd7f124SMatthew Dillon	ret
208afd7f124SMatthew Dillon
209afd7f124SMatthew Dillonkreadmem64fault:
21048c77f2bSMatthew Dillon	SMAP_CLOSE
211afd7f124SMatthew Dillon	movq	PCPU(curthread),%rcx
212afd7f124SMatthew Dillon	xorl	%eax,%eax
213afd7f124SMatthew Dillon	movq	TD_PCB(%rcx),%rcx
214afd7f124SMatthew Dillon	movq	%rax,PCB_ONFAULT(%rcx)
215afd7f124SMatthew Dillon	decq	%rax
216afd7f124SMatthew Dillon	ret
217afd7f124SMatthew DillonEND(kreadmem64)
218afd7f124SMatthew Dillon
2195b3646a9SMatthew Dillon.macro COPYOUT_END
2205b3646a9SMatthew Dillon	jmp	done_copyout
2215b3646a9SMatthew Dillon	nop
2225b3646a9SMatthew Dillon.endm
2235b3646a9SMatthew Dillon
224afd7f124SMatthew Dillon/*
225a86ce0cdSMatthew Dillon * std_copyout(from_kernel, to_user, len)  - MP SAFE
226b2b3ffcdSSimon Schubert *         %rdi,        %rsi,    %rdx
227b2b3ffcdSSimon Schubert */
228a86ce0cdSMatthew DillonENTRY(std_copyout)
22948c77f2bSMatthew Dillon	SMAP_OPEN
230b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rax
231b2b3ffcdSSimon Schubert	movq	TD_PCB(%rax), %rax
232b2b3ffcdSSimon Schubert	movq	$copyout_fault,PCB_ONFAULT(%rax)
23393ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rax)
234b2b3ffcdSSimon Schubert	testq	%rdx,%rdx			/* anything to do? */
235b2b3ffcdSSimon Schubert	jz	done_copyout
236b2b3ffcdSSimon Schubert
237b2b3ffcdSSimon Schubert	/*
238b2b3ffcdSSimon Schubert	 * Check explicitly for non-user addresses.  If 486 write protection
239b2b3ffcdSSimon Schubert	 * is being used, this check is essential because we are in kernel
240b2b3ffcdSSimon Schubert	 * mode so the h/w does not provide any protection against writing
241b2b3ffcdSSimon Schubert	 * kernel addresses.
242b2b3ffcdSSimon Schubert	 */
243b2b3ffcdSSimon Schubert
244b2b3ffcdSSimon Schubert	/*
245b2b3ffcdSSimon Schubert	 * First, prevent address wrapping.
246b2b3ffcdSSimon Schubert	 */
247b2b3ffcdSSimon Schubert	movq	%rsi,%rax
248b2b3ffcdSSimon Schubert	addq	%rdx,%rax
249b2b3ffcdSSimon Schubert	jc	copyout_fault
250b2b3ffcdSSimon Schubert/*
251b2b3ffcdSSimon Schubert * XXX STOP USING VM_MAX_USER_ADDRESS.
252b2b3ffcdSSimon Schubert * It is an end address, not a max, so every time it is used correctly it
253b2b3ffcdSSimon Schubert * looks like there is an off by one error, and of course it caused an off
254b2b3ffcdSSimon Schubert * by one error in several places.
255b2b3ffcdSSimon Schubert */
256b2b3ffcdSSimon Schubert	movq	$VM_MAX_USER_ADDRESS,%rcx
257b2b3ffcdSSimon Schubert	cmpq	%rcx,%rax
258b2b3ffcdSSimon Schubert	ja	copyout_fault
259b2b3ffcdSSimon Schubert
260b2b3ffcdSSimon Schubert	xchgq	%rdi,%rsi
2615b3646a9SMatthew Dillon	MEMMOVE erms=0 overlap=0 end=COPYOUT_END
262b2b3ffcdSSimon Schubert
263b2b3ffcdSSimon Schubertdone_copyout:
26448c77f2bSMatthew Dillon	SMAP_CLOSE
265b2b3ffcdSSimon Schubert	xorl	%eax,%eax
266b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rdx
267b2b3ffcdSSimon Schubert	movq	TD_PCB(%rdx), %rdx
268b2b3ffcdSSimon Schubert	movq	%rax,PCB_ONFAULT(%rdx)
269b2b3ffcdSSimon Schubert	ret
270b2b3ffcdSSimon Schubert
271b2b3ffcdSSimon Schubert	ALIGN_TEXT
272b2b3ffcdSSimon Schubertcopyout_fault:
27348c77f2bSMatthew Dillon	SMAP_CLOSE
274b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rdx
275b2b3ffcdSSimon Schubert	movq	TD_PCB(%rdx), %rdx
276b2b3ffcdSSimon Schubert	movq	$0,PCB_ONFAULT(%rdx)
277b2b3ffcdSSimon Schubert	movq	$EFAULT,%rax
278b2b3ffcdSSimon Schubert	ret
279b1d2a2deSzrjEND(std_copyout)
280b2b3ffcdSSimon Schubert
2815b3646a9SMatthew Dillon.macro COPYIN_END
2825b3646a9SMatthew Dillon	jmp	done_copyin
2835b3646a9SMatthew Dillon	nop
2845b3646a9SMatthew Dillon.endm
2855b3646a9SMatthew Dillon
286b2b3ffcdSSimon Schubert/*
287a86ce0cdSMatthew Dillon * std_copyin(from_user, to_kernel, len) - MP SAFE
288b2b3ffcdSSimon Schubert *        %rdi,      %rsi,      %rdx
289b2b3ffcdSSimon Schubert */
290a86ce0cdSMatthew DillonENTRY(std_copyin)
29148c77f2bSMatthew Dillon	SMAP_OPEN
292b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rax
293b2b3ffcdSSimon Schubert	movq	TD_PCB(%rax), %rax
294b2b3ffcdSSimon Schubert	movq	$copyin_fault,PCB_ONFAULT(%rax)
29593ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rax)
296b2b3ffcdSSimon Schubert	testq	%rdx,%rdx			/* anything to do? */
297b2b3ffcdSSimon Schubert	jz	done_copyin
298b2b3ffcdSSimon Schubert
299b2b3ffcdSSimon Schubert	/*
300b2b3ffcdSSimon Schubert	 * make sure address is valid
301b2b3ffcdSSimon Schubert	 */
302b2b3ffcdSSimon Schubert	movq	%rdi,%rax
303b2b3ffcdSSimon Schubert	addq	%rdx,%rax
304b2b3ffcdSSimon Schubert	jc	copyin_fault
305b2b3ffcdSSimon Schubert	movq	$VM_MAX_USER_ADDRESS,%rcx
306b2b3ffcdSSimon Schubert	cmpq	%rcx,%rax
307b2b3ffcdSSimon Schubert	ja	copyin_fault
308b2b3ffcdSSimon Schubert
309b2b3ffcdSSimon Schubert	xchgq	%rdi,%rsi
3105b3646a9SMatthew Dillon	MEMMOVE erms=0 overlap=0 end=COPYIN_END
311b2b3ffcdSSimon Schubert
312b2b3ffcdSSimon Schubertdone_copyin:
31348c77f2bSMatthew Dillon	SMAP_CLOSE
314b2b3ffcdSSimon Schubert	xorl	%eax,%eax
315b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rdx
316b2b3ffcdSSimon Schubert	movq	TD_PCB(%rdx), %rdx
317b2b3ffcdSSimon Schubert	movq	%rax,PCB_ONFAULT(%rdx)
318b2b3ffcdSSimon Schubert	ret
319b2b3ffcdSSimon Schubert
320b2b3ffcdSSimon Schubert	ALIGN_TEXT
321b2b3ffcdSSimon Schubertcopyin_fault:
32248c77f2bSMatthew Dillon	SMAP_CLOSE
323b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rdx
324b2b3ffcdSSimon Schubert	movq	TD_PCB(%rdx), %rdx
325b2b3ffcdSSimon Schubert	movq	$0,PCB_ONFAULT(%rdx)
326b2b3ffcdSSimon Schubert	movq	$EFAULT,%rax
327b2b3ffcdSSimon Schubert	ret
328b1d2a2deSzrjEND(std_copyin)
329b2b3ffcdSSimon Schubert
330b2b3ffcdSSimon Schubert/*
3315947157eSMatthew Dillon * casu32 - Compare and set user integer.  Returns -1 or the current value.
332b2b3ffcdSSimon Schubert *          dst = %rdi, old = %rsi, new = %rdx
333b2b3ffcdSSimon Schubert */
3345947157eSMatthew DillonENTRY(casu32)
33548c77f2bSMatthew Dillon	SMAP_OPEN
336b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
337b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
338b2b3ffcdSSimon Schubert	movq	$fusufault,PCB_ONFAULT(%rcx)
33993ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
340b2b3ffcdSSimon Schubert
341b2b3ffcdSSimon Schubert	movq	$VM_MAX_USER_ADDRESS-4,%rax
342b2b3ffcdSSimon Schubert	cmpq	%rax,%rdi			/* verify address is valid */
343b2b3ffcdSSimon Schubert	ja	fusufault
344b2b3ffcdSSimon Schubert
345b2b3ffcdSSimon Schubert	movl	%esi,%eax			/* old */
346b2b3ffcdSSimon Schubert	lock
347b2b3ffcdSSimon Schubert	cmpxchgl %edx,(%rdi)			/* new = %edx */
348b2b3ffcdSSimon Schubert
349b2b3ffcdSSimon Schubert	/*
350b2b3ffcdSSimon Schubert	 * The old value is in %eax.  If the store succeeded it will be the
351b2b3ffcdSSimon Schubert	 * value we expected (old) from before the store, otherwise it will
352b2b3ffcdSSimon Schubert	 * be the current value.
353b2b3ffcdSSimon Schubert	 */
354b2b3ffcdSSimon Schubert
355b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
356b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
357b2b3ffcdSSimon Schubert	movq	$0,PCB_ONFAULT(%rcx)
35848c77f2bSMatthew Dillon	SMAP_CLOSE
359b2b3ffcdSSimon Schubert	ret
360b1d2a2deSzrjEND(casu32)
361b2b3ffcdSSimon Schubert
362b2b3ffcdSSimon Schubert/*
3635947157eSMatthew Dillon * swapu32 - Swap int in user space.  ptr = %rdi, val = %rsi
3645947157eSMatthew Dillon */
3657f4bfbe7SMatthew DillonENTRY(std_swapu32)
36648c77f2bSMatthew Dillon	SMAP_OPEN
3675947157eSMatthew Dillon	movq	PCPU(curthread),%rcx
3685947157eSMatthew Dillon	movq	TD_PCB(%rcx), %rcx
3695947157eSMatthew Dillon	movq	$fusufault,PCB_ONFAULT(%rcx)
3705947157eSMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
3715947157eSMatthew Dillon
3727f4bfbe7SMatthew Dillon	movq	$VM_MAX_USER_ADDRESS-4,%rax
3735947157eSMatthew Dillon	cmpq	%rax,%rdi			/* verify address is valid */
3745947157eSMatthew Dillon	ja	fusufault
3755947157eSMatthew Dillon
3765947157eSMatthew Dillon	movq	%rsi,%rax			/* old */
3775947157eSMatthew Dillon	xchgl	%eax,(%rdi)
3785947157eSMatthew Dillon
3795947157eSMatthew Dillon	/*
3805947157eSMatthew Dillon	 * The old value is in %rax.  If the store succeeded it will be the
3815947157eSMatthew Dillon	 * value we expected (old) from before the store, otherwise it will
3825947157eSMatthew Dillon	 * be the current value.
3835947157eSMatthew Dillon	 */
3845947157eSMatthew Dillon
3855947157eSMatthew Dillon	movq	PCPU(curthread),%rcx
3865947157eSMatthew Dillon	movq	TD_PCB(%rcx), %rcx
3875947157eSMatthew Dillon	movq	$0,PCB_ONFAULT(%rcx)
38848c77f2bSMatthew Dillon	SMAP_CLOSE
3895947157eSMatthew Dillon	ret
390b1d2a2deSzrjEND(std_swapu32)
3915947157eSMatthew Dillon
3926481baf4SMatthew DillonENTRY(std_fuwordadd32)
39348c77f2bSMatthew Dillon	SMAP_OPEN
3946481baf4SMatthew Dillon	movq	PCPU(curthread),%rcx
3956481baf4SMatthew Dillon	movq	TD_PCB(%rcx), %rcx
3966481baf4SMatthew Dillon	movq	$fusufault,PCB_ONFAULT(%rcx)
3976481baf4SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
3986481baf4SMatthew Dillon
3996481baf4SMatthew Dillon	movq	$VM_MAX_USER_ADDRESS-4,%rax
4006481baf4SMatthew Dillon	cmpq	%rax,%rdi			/* verify address is valid */
4016481baf4SMatthew Dillon	ja	fusufault
4026481baf4SMatthew Dillon
4036481baf4SMatthew Dillon	movq	%rsi,%rax			/* qty to add */
4046481baf4SMatthew Dillon	lock xaddl	%eax,(%rdi)
4056481baf4SMatthew Dillon
4066481baf4SMatthew Dillon	/*
4076481baf4SMatthew Dillon	 * The old value is in %rax.  If the store succeeded it will be the
4086481baf4SMatthew Dillon	 * value we expected (old) from before the store, otherwise it will
4096481baf4SMatthew Dillon	 * be the current value.
4106481baf4SMatthew Dillon	 */
4116481baf4SMatthew Dillon	movq	PCPU(curthread),%rcx
4126481baf4SMatthew Dillon	movq	TD_PCB(%rcx), %rcx
4136481baf4SMatthew Dillon	movq	$0,PCB_ONFAULT(%rcx)
41448c77f2bSMatthew Dillon	SMAP_CLOSE
4156481baf4SMatthew Dillon	ret
4166481baf4SMatthew DillonEND(std_fuwordadd32)
4176481baf4SMatthew Dillon
4185947157eSMatthew Dillon/*
4195947157eSMatthew Dillon * casu64 - Compare and set user word.  Returns -1 or the current value.
420b2b3ffcdSSimon Schubert *          dst = %rdi, old = %rsi, new = %rdx
421b2b3ffcdSSimon Schubert */
4225947157eSMatthew DillonENTRY(casu64)
42348c77f2bSMatthew Dillon	SMAP_OPEN
424b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
425b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
426b2b3ffcdSSimon Schubert	movq	$fusufault,PCB_ONFAULT(%rcx)
42793ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
428b2b3ffcdSSimon Schubert
429629f89deSImre Vadasz	movq	$VM_MAX_USER_ADDRESS-8,%rax
430b2b3ffcdSSimon Schubert	cmpq	%rax,%rdi			/* verify address is valid */
431b2b3ffcdSSimon Schubert	ja	fusufault
432b2b3ffcdSSimon Schubert
433b2b3ffcdSSimon Schubert	movq	%rsi,%rax			/* old */
434b2b3ffcdSSimon Schubert	lock
435b2b3ffcdSSimon Schubert	cmpxchgq %rdx,(%rdi)			/* new = %rdx */
436b2b3ffcdSSimon Schubert
437b2b3ffcdSSimon Schubert	/*
438629f89deSImre Vadasz	 * The old value is in %rax.  If the store succeeded it will be the
439b2b3ffcdSSimon Schubert	 * value we expected (old) from before the store, otherwise it will
440b2b3ffcdSSimon Schubert	 * be the current value.
441b2b3ffcdSSimon Schubert	 */
442b2b3ffcdSSimon Schubert
443b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
444b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
445b2b3ffcdSSimon Schubert	movq	$0,PCB_ONFAULT(%rcx)
44648c77f2bSMatthew Dillon	SMAP_CLOSE
447b2b3ffcdSSimon Schubert	ret
448b1d2a2deSzrjEND(casu64)
449b2b3ffcdSSimon Schubert
450b2b3ffcdSSimon Schubert/*
4515947157eSMatthew Dillon * swapu64 - Swap long in user space.  ptr = %rdi, val = %rsi
4525947157eSMatthew Dillon */
4537f4bfbe7SMatthew DillonENTRY(std_swapu64)
45448c77f2bSMatthew Dillon	SMAP_OPEN
4555947157eSMatthew Dillon	movq	PCPU(curthread),%rcx
4565947157eSMatthew Dillon	movq	TD_PCB(%rcx), %rcx
4575947157eSMatthew Dillon	movq	$fusufault,PCB_ONFAULT(%rcx)
4585947157eSMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
4595947157eSMatthew Dillon
4605947157eSMatthew Dillon	movq	$VM_MAX_USER_ADDRESS-8,%rax
4615947157eSMatthew Dillon	cmpq	%rax,%rdi			/* verify address is valid */
4625947157eSMatthew Dillon	ja	fusufault
4635947157eSMatthew Dillon
4645947157eSMatthew Dillon	movq	%rsi,%rax			/* old */
4655947157eSMatthew Dillon	xchgq	%rax,(%rdi)
4665947157eSMatthew Dillon
4675947157eSMatthew Dillon	/*
4685947157eSMatthew Dillon	 * The old value is in %rax.  If the store succeeded it will be the
4695947157eSMatthew Dillon	 * value we expected (old) from before the store, otherwise it will
4705947157eSMatthew Dillon	 * be the current value.
4715947157eSMatthew Dillon	 */
4725947157eSMatthew Dillon
4735947157eSMatthew Dillon	movq	PCPU(curthread),%rcx
4745947157eSMatthew Dillon	movq	TD_PCB(%rcx), %rcx
4755947157eSMatthew Dillon	movq	$0,PCB_ONFAULT(%rcx)
47648c77f2bSMatthew Dillon	SMAP_CLOSE
4775947157eSMatthew Dillon	ret
478b1d2a2deSzrjEND(std_swapu64)
4795947157eSMatthew Dillon
4806481baf4SMatthew DillonENTRY(std_fuwordadd64)
48148c77f2bSMatthew Dillon	SMAP_OPEN
4826481baf4SMatthew Dillon	movq	PCPU(curthread),%rcx
4836481baf4SMatthew Dillon	movq	TD_PCB(%rcx), %rcx
4846481baf4SMatthew Dillon	movq	$fusufault,PCB_ONFAULT(%rcx)
4856481baf4SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
4866481baf4SMatthew Dillon
4876481baf4SMatthew Dillon	movq	$VM_MAX_USER_ADDRESS-8,%rax
4886481baf4SMatthew Dillon	cmpq	%rax,%rdi			/* verify address is valid */
4896481baf4SMatthew Dillon	ja	fusufault
4906481baf4SMatthew Dillon
4916481baf4SMatthew Dillon	movq	%rsi,%rax			/* value to add */
4926481baf4SMatthew Dillon	lock xaddq	%rax,(%rdi)
4936481baf4SMatthew Dillon
4946481baf4SMatthew Dillon	/*
4956481baf4SMatthew Dillon	 * The old value is in %rax.  If the store succeeded it will be the
4966481baf4SMatthew Dillon	 * value we expected (old) from before the store, otherwise it will
4976481baf4SMatthew Dillon	 * be the current value.
4986481baf4SMatthew Dillon	 */
4996481baf4SMatthew Dillon
5006481baf4SMatthew Dillon	movq	PCPU(curthread),%rcx
5016481baf4SMatthew Dillon	movq	TD_PCB(%rcx), %rcx
5026481baf4SMatthew Dillon	movq	$0,PCB_ONFAULT(%rcx)
50348c77f2bSMatthew Dillon	SMAP_CLOSE
5046481baf4SMatthew Dillon	ret
5056481baf4SMatthew DillonEND(std_fuwordadd64)
5066481baf4SMatthew Dillon
5075947157eSMatthew Dillon/*
508b2b3ffcdSSimon Schubert * Fetch (load) a 64-bit word, a 32-bit word, a 16-bit word, or an 8-bit
509b2b3ffcdSSimon Schubert * byte from user memory.  All these functions are MPSAFE.
510b2b3ffcdSSimon Schubert * addr = %rdi
511b2b3ffcdSSimon Schubert */
512b2b3ffcdSSimon Schubert
5135947157eSMatthew DillonENTRY(std_fuword64)
51448c77f2bSMatthew Dillon	SMAP_OPEN
515b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
516b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
517b2b3ffcdSSimon Schubert	movq	$fusufault,PCB_ONFAULT(%rcx)
51893ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
519b2b3ffcdSSimon Schubert
520b2b3ffcdSSimon Schubert	movq	$VM_MAX_USER_ADDRESS-8,%rax
521b2b3ffcdSSimon Schubert	cmpq	%rax,%rdi			/* verify address is valid */
522b2b3ffcdSSimon Schubert	ja	fusufault
523b2b3ffcdSSimon Schubert
524b2b3ffcdSSimon Schubert	movq	(%rdi),%rax
525b2b3ffcdSSimon Schubert	movq	$0,PCB_ONFAULT(%rcx)
52648c77f2bSMatthew Dillon	SMAP_CLOSE
527b2b3ffcdSSimon Schubert	ret
528b1d2a2deSzrjEND(std_fuword64)
529b2b3ffcdSSimon Schubert
5305947157eSMatthew DillonENTRY(std_fuword32)
53148c77f2bSMatthew Dillon	SMAP_OPEN
532b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
533b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
534b2b3ffcdSSimon Schubert	movq	$fusufault,PCB_ONFAULT(%rcx)
53593ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
536b2b3ffcdSSimon Schubert
537b2b3ffcdSSimon Schubert	movq	$VM_MAX_USER_ADDRESS-4,%rax
538b2b3ffcdSSimon Schubert	cmpq	%rax,%rdi			/* verify address is valid */
539b2b3ffcdSSimon Schubert	ja	fusufault
540b2b3ffcdSSimon Schubert
541b2b3ffcdSSimon Schubert	movl	(%rdi),%eax
542b2b3ffcdSSimon Schubert	movq	$0,PCB_ONFAULT(%rcx)
54348c77f2bSMatthew Dillon	SMAP_CLOSE
544b2b3ffcdSSimon Schubert	ret
545b1d2a2deSzrjEND(std_fuword32)
546b2b3ffcdSSimon Schubert
547a86ce0cdSMatthew DillonENTRY(std_fubyte)
54848c77f2bSMatthew Dillon	SMAP_OPEN
549b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
550b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
551b2b3ffcdSSimon Schubert	movq	$fusufault,PCB_ONFAULT(%rcx)
55293ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
553b2b3ffcdSSimon Schubert
554b2b3ffcdSSimon Schubert	movq	$VM_MAX_USER_ADDRESS-1,%rax
555b2b3ffcdSSimon Schubert	cmpq	%rax,%rdi
556b2b3ffcdSSimon Schubert	ja	fusufault
557b2b3ffcdSSimon Schubert
558b2b3ffcdSSimon Schubert	movzbl	(%rdi),%eax
559b2b3ffcdSSimon Schubert	movq	$0,PCB_ONFAULT(%rcx)
56048c77f2bSMatthew Dillon	SMAP_CLOSE
561b2b3ffcdSSimon Schubert	ret
562b2b3ffcdSSimon Schubert
563b2b3ffcdSSimon Schubert	ALIGN_TEXT
564b2b3ffcdSSimon Schubertfusufault:
565b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
566b2b3ffcdSSimon Schubert	xorl	%eax,%eax
567b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
568b2b3ffcdSSimon Schubert	movq	%rax,PCB_ONFAULT(%rcx)
569b2b3ffcdSSimon Schubert	decq	%rax
57048c77f2bSMatthew Dillon	SMAP_CLOSE
571b2b3ffcdSSimon Schubert	ret
572b1d2a2deSzrjEND(std_fubyte)
573b2b3ffcdSSimon Schubert
574b2b3ffcdSSimon Schubert/*
575b2b3ffcdSSimon Schubert * Store a 64-bit word, a 32-bit word, a 16-bit word, or an 8-bit byte to
576b2b3ffcdSSimon Schubert * user memory.  All these functions are MPSAFE.
5773f938eeaSMatthew Dillon *
578b2b3ffcdSSimon Schubert * addr = %rdi, value = %rsi
5793f938eeaSMatthew Dillon *
5803f938eeaSMatthew Dillon * Write a long
581b2b3ffcdSSimon Schubert */
5825947157eSMatthew DillonENTRY(std_suword64)
58348c77f2bSMatthew Dillon	SMAP_OPEN
584b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
585b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
586b2b3ffcdSSimon Schubert	movq	$fusufault,PCB_ONFAULT(%rcx)
58793ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
588b2b3ffcdSSimon Schubert
589b2b3ffcdSSimon Schubert	movq	$VM_MAX_USER_ADDRESS-8,%rax
590b2b3ffcdSSimon Schubert	cmpq	%rax,%rdi			/* verify address validity */
591b2b3ffcdSSimon Schubert	ja	fusufault
592b2b3ffcdSSimon Schubert
593b2b3ffcdSSimon Schubert	movq	%rsi,(%rdi)
594b2b3ffcdSSimon Schubert	xorl	%eax,%eax
595b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
596b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
597b2b3ffcdSSimon Schubert	movq	%rax,PCB_ONFAULT(%rcx)
59848c77f2bSMatthew Dillon	SMAP_CLOSE
599b2b3ffcdSSimon Schubert	ret
600b1d2a2deSzrjEND(std_suword64)
601b2b3ffcdSSimon Schubert
6023f938eeaSMatthew Dillon/*
6033f938eeaSMatthew Dillon * Write an int
6043f938eeaSMatthew Dillon */
605a86ce0cdSMatthew DillonENTRY(std_suword32)
60648c77f2bSMatthew Dillon	SMAP_OPEN
607b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
608b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
609b2b3ffcdSSimon Schubert	movq	$fusufault,PCB_ONFAULT(%rcx)
61093ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
611b2b3ffcdSSimon Schubert
612b2b3ffcdSSimon Schubert	movq	$VM_MAX_USER_ADDRESS-4,%rax
613b2b3ffcdSSimon Schubert	cmpq	%rax,%rdi			/* verify address validity */
614b2b3ffcdSSimon Schubert	ja	fusufault
615b2b3ffcdSSimon Schubert
616b2b3ffcdSSimon Schubert	movl	%esi,(%rdi)
617b2b3ffcdSSimon Schubert	xorl	%eax,%eax
618b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
619b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
620b2b3ffcdSSimon Schubert	movq	%rax,PCB_ONFAULT(%rcx)
62148c77f2bSMatthew Dillon	SMAP_CLOSE
622b2b3ffcdSSimon Schubert	ret
623b1d2a2deSzrjEND(std_suword32)
624b2b3ffcdSSimon Schubert
625a86ce0cdSMatthew DillonENTRY(std_subyte)
62648c77f2bSMatthew Dillon	SMAP_OPEN
627b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
628b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
629b2b3ffcdSSimon Schubert	movq	$fusufault,PCB_ONFAULT(%rcx)
63093ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
631b2b3ffcdSSimon Schubert
632b2b3ffcdSSimon Schubert	movq	$VM_MAX_USER_ADDRESS-1,%rax
633b2b3ffcdSSimon Schubert	cmpq	%rax,%rdi			/* verify address validity */
634b2b3ffcdSSimon Schubert	ja	fusufault
635b2b3ffcdSSimon Schubert
636b2b3ffcdSSimon Schubert	movl	%esi,%eax
637b2b3ffcdSSimon Schubert	movb	%al,(%rdi)
638b2b3ffcdSSimon Schubert	xorl	%eax,%eax
639b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx		/* restore trashed register */
640b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
641b2b3ffcdSSimon Schubert	movq	%rax,PCB_ONFAULT(%rcx)
64248c77f2bSMatthew Dillon	SMAP_CLOSE
643b2b3ffcdSSimon Schubert	ret
644b1d2a2deSzrjEND(std_subyte)
645b2b3ffcdSSimon Schubert
646b2b3ffcdSSimon Schubert/*
647a86ce0cdSMatthew Dillon * std_copyinstr(from, to, maxlen, int *lencopied) - MP SAFE
648b2b3ffcdSSimon Schubert *           %rdi, %rsi, %rdx, %rcx
649b2b3ffcdSSimon Schubert *
650b2b3ffcdSSimon Schubert *	copy a string from from to to, stop when a 0 character is reached.
651b2b3ffcdSSimon Schubert *	return ENAMETOOLONG if string is longer than maxlen, and
652b2b3ffcdSSimon Schubert *	EFAULT on protection violations. If lencopied is non-zero,
653b2b3ffcdSSimon Schubert *	return the actual length in *lencopied.
654b2b3ffcdSSimon Schubert */
655a86ce0cdSMatthew DillonENTRY(std_copyinstr)
65648c77f2bSMatthew Dillon	SMAP_OPEN
657b2b3ffcdSSimon Schubert	movq	%rdx,%r8			/* %r8 = maxlen */
658b2b3ffcdSSimon Schubert	movq	%rcx,%r9			/* %r9 = *len */
659b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
660b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
661b2b3ffcdSSimon Schubert	movq	$cpystrflt,PCB_ONFAULT(%rcx)
66293ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%rcx)
663b2b3ffcdSSimon Schubert
664b2b3ffcdSSimon Schubert	movq	$VM_MAX_USER_ADDRESS,%rax
665b2b3ffcdSSimon Schubert
666b2b3ffcdSSimon Schubert	/* make sure 'from' is within bounds */
667879d1a9eSMatthew Dillon	subq	%rdi,%rax
668b2b3ffcdSSimon Schubert	jbe	cpystrflt
669b2b3ffcdSSimon Schubert
670b2b3ffcdSSimon Schubert	/* restrict maxlen to <= VM_MAX_USER_ADDRESS-from */
671b2b3ffcdSSimon Schubert	cmpq	%rdx,%rax
672b2b3ffcdSSimon Schubert	jae	1f
673b2b3ffcdSSimon Schubert	movq	%rax,%rdx
674b2b3ffcdSSimon Schubert	movq	%rax,%r8
675b2b3ffcdSSimon Schubert1:
676b2b3ffcdSSimon Schubert	incq	%rdx
677b2b3ffcdSSimon Schubert
678b2b3ffcdSSimon Schubert2:
679b2b3ffcdSSimon Schubert	decq	%rdx
680b2b3ffcdSSimon Schubert	jz	3f
681b2b3ffcdSSimon Schubert
682879d1a9eSMatthew Dillon	movb	(%rdi),%al			/* faster than lodsb+stosb */
683879d1a9eSMatthew Dillon	movb	%al,(%rsi)
684879d1a9eSMatthew Dillon	leaq	1(%rdi),%rdi
685879d1a9eSMatthew Dillon	leaq	1(%rsi),%rsi
686879d1a9eSMatthew Dillon	testb	%al,%al
687b2b3ffcdSSimon Schubert	jnz	2b
688b2b3ffcdSSimon Schubert
689b2b3ffcdSSimon Schubert	/* Success -- 0 byte reached */
690b2b3ffcdSSimon Schubert	decq	%rdx
691b2b3ffcdSSimon Schubert	xorl	%eax,%eax
692b2b3ffcdSSimon Schubert	jmp	cpystrflt_x
693b2b3ffcdSSimon Schubert3:
694b2b3ffcdSSimon Schubert	/* rdx is zero - return ENAMETOOLONG or EFAULT */
695b2b3ffcdSSimon Schubert	movq	$VM_MAX_USER_ADDRESS,%rax
696b2b3ffcdSSimon Schubert	cmpq	%rax,%rsi
697b2b3ffcdSSimon Schubert	jae	cpystrflt
698b2b3ffcdSSimon Schubert4:
699b2b3ffcdSSimon Schubert	movq	$ENAMETOOLONG,%rax
700b2b3ffcdSSimon Schubert	jmp	cpystrflt_x
701b2b3ffcdSSimon Schubert
702b2b3ffcdSSimon Schubertcpystrflt:
703b2b3ffcdSSimon Schubert	movq	$EFAULT,%rax
704b2b3ffcdSSimon Schubert
705b2b3ffcdSSimon Schubertcpystrflt_x:
70648c77f2bSMatthew Dillon	SMAP_CLOSE
707b2b3ffcdSSimon Schubert	/* set *lencopied and return %eax */
708b2b3ffcdSSimon Schubert	movq	PCPU(curthread),%rcx
709b2b3ffcdSSimon Schubert	movq	TD_PCB(%rcx), %rcx
710b2b3ffcdSSimon Schubert	movq	$0,PCB_ONFAULT(%rcx)
711b2b3ffcdSSimon Schubert
712b2b3ffcdSSimon Schubert	testq	%r9,%r9
713b2b3ffcdSSimon Schubert	jz	1f
714b2b3ffcdSSimon Schubert	subq	%rdx,%r8
715b2b3ffcdSSimon Schubert	movq	%r8,(%r9)
716b2b3ffcdSSimon Schubert1:
717b2b3ffcdSSimon Schubert	ret
718b1d2a2deSzrjEND(std_copyinstr)
719b2b3ffcdSSimon Schubert
720b2b3ffcdSSimon Schubert/*
721b2b3ffcdSSimon Schubert * copystr(from, to, maxlen, int *lencopied) - MP SAFE
722b2b3ffcdSSimon Schubert *         %rdi, %rsi, %rdx, %rcx
723b2b3ffcdSSimon Schubert */
724b2b3ffcdSSimon SchubertENTRY(copystr)
725b2b3ffcdSSimon Schubert	movq	%rdx,%r8			/* %r8 = maxlen */
726b2b3ffcdSSimon Schubert	incq	%rdx
727b2b3ffcdSSimon Schubert1:
728b2b3ffcdSSimon Schubert	decq	%rdx
729b2b3ffcdSSimon Schubert	jz	4f
730879d1a9eSMatthew Dillon
731879d1a9eSMatthew Dillon	movb	(%rdi),%al			/* faster than lodsb+stosb */
732879d1a9eSMatthew Dillon	movb	%al,(%rsi)
733879d1a9eSMatthew Dillon	leaq	1(%rdi),%rdi
734879d1a9eSMatthew Dillon	leaq	1(%rsi),%rsi
735879d1a9eSMatthew Dillon	testb	%al,%al
736b2b3ffcdSSimon Schubert	jnz	1b
737b2b3ffcdSSimon Schubert
738b2b3ffcdSSimon Schubert	/* Success -- 0 byte reached */
739b2b3ffcdSSimon Schubert	decq	%rdx
740b2b3ffcdSSimon Schubert	xorl	%eax,%eax
741b2b3ffcdSSimon Schubert	jmp	6f
742b2b3ffcdSSimon Schubert4:
743b2b3ffcdSSimon Schubert	/* rdx is zero -- return ENAMETOOLONG */
744b2b3ffcdSSimon Schubert	movq	$ENAMETOOLONG,%rax
745b2b3ffcdSSimon Schubert
746b2b3ffcdSSimon Schubert6:
747b2b3ffcdSSimon Schubert	testq	%rcx,%rcx
748b2b3ffcdSSimon Schubert	jz	7f
749b2b3ffcdSSimon Schubert	/* set *lencopied and return %rax */
750b2b3ffcdSSimon Schubert	subq	%rdx,%r8
751b2b3ffcdSSimon Schubert	movq	%r8,(%rcx)
752b2b3ffcdSSimon Schubert7:
753b2b3ffcdSSimon Schubert	ret
754b1d2a2deSzrjEND(copystr)
755b2b3ffcdSSimon Schubert
756b2b3ffcdSSimon Schubert/*
757b2b3ffcdSSimon Schubert * Handling of special x86_64 registers and descriptor tables etc
758b2b3ffcdSSimon Schubert * %rdi
759b2b3ffcdSSimon Schubert */
760b2b3ffcdSSimon Schubert/* void lgdt(struct region_descriptor *rdp); */
761b2b3ffcdSSimon SchubertENTRY(lgdt)
762b2b3ffcdSSimon Schubert	/* reload the descriptor table */
763b2b3ffcdSSimon Schubert	lgdt	(%rdi)
764b2b3ffcdSSimon Schubert
765b2b3ffcdSSimon Schubert	/* flush the prefetch q */
766b2b3ffcdSSimon Schubert	jmp	1f
767b2b3ffcdSSimon Schubert	nop
768b2b3ffcdSSimon Schubert1:
769b2b3ffcdSSimon Schubert	movl	$KDSEL,%eax
770b2b3ffcdSSimon Schubert	movl	%eax,%ds
771b2b3ffcdSSimon Schubert	movl	%eax,%es
772b2b3ffcdSSimon Schubert	movl	%eax,%fs	/* Beware, use wrmsr to set 64 bit base */
773b2b3ffcdSSimon Schubert	movl	%eax,%gs	/* Beware, use wrmsr to set 64 bit base */
774b2b3ffcdSSimon Schubert	movl	%eax,%ss
775b2b3ffcdSSimon Schubert
776b2b3ffcdSSimon Schubert	/* reload code selector by turning return into intersegmental return */
777b2b3ffcdSSimon Schubert	popq	%rax
778b2b3ffcdSSimon Schubert	pushq	$KCSEL
779b2b3ffcdSSimon Schubert	pushq	%rax
780b2b3ffcdSSimon Schubert	MEXITCOUNT
781b2b3ffcdSSimon Schubert	lretq
782b1d2a2deSzrjEND(lgdt)
783b2b3ffcdSSimon Schubert
784b2b3ffcdSSimon Schubert/*****************************************************************************/
785bdaa846fSSascha Wildner/* setjmp, longjmp                                                           */
786b2b3ffcdSSimon Schubert/*****************************************************************************/
787b2b3ffcdSSimon Schubert
788b2b3ffcdSSimon SchubertENTRY(setjmp)
789b2b3ffcdSSimon Schubert	movq	%rbx,0(%rdi)			/* save rbx */
790b2b3ffcdSSimon Schubert	movq	%rsp,8(%rdi)			/* save rsp */
791b2b3ffcdSSimon Schubert	movq	%rbp,16(%rdi)			/* save rbp */
792b2b3ffcdSSimon Schubert	movq	%r12,24(%rdi)			/* save r12 */
793b2b3ffcdSSimon Schubert	movq	%r13,32(%rdi)			/* save r13 */
794b2b3ffcdSSimon Schubert	movq	%r14,40(%rdi)			/* save r14 */
795b2b3ffcdSSimon Schubert	movq	%r15,48(%rdi)			/* save r15 */
796b2b3ffcdSSimon Schubert	movq	0(%rsp),%rdx			/* get rta */
797b2b3ffcdSSimon Schubert	movq	%rdx,56(%rdi)			/* save rip */
798b2b3ffcdSSimon Schubert	xorl	%eax,%eax			/* return(0); */
799b2b3ffcdSSimon Schubert	ret
800b1d2a2deSzrjEND(setjmp)
801b2b3ffcdSSimon Schubert
802b2b3ffcdSSimon SchubertENTRY(longjmp)
803b2b3ffcdSSimon Schubert	movq	0(%rdi),%rbx			/* restore rbx */
804b2b3ffcdSSimon Schubert	movq	8(%rdi),%rsp			/* restore rsp */
805b2b3ffcdSSimon Schubert	movq	16(%rdi),%rbp			/* restore rbp */
806b2b3ffcdSSimon Schubert	movq	24(%rdi),%r12			/* restore r12 */
807b2b3ffcdSSimon Schubert	movq	32(%rdi),%r13			/* restore r13 */
808b2b3ffcdSSimon Schubert	movq	40(%rdi),%r14			/* restore r14 */
809b2b3ffcdSSimon Schubert	movq	48(%rdi),%r15			/* restore r15 */
810b2b3ffcdSSimon Schubert	movq	56(%rdi),%rdx			/* get rta */
811b2b3ffcdSSimon Schubert	movq	%rdx,0(%rsp)			/* put in return frame */
812b2b3ffcdSSimon Schubert	xorl	%eax,%eax			/* return(1); */
813b2b3ffcdSSimon Schubert	incl	%eax
814b2b3ffcdSSimon Schubert	ret
815b1d2a2deSzrjEND(longjmp)
816b2b3ffcdSSimon Schubert
817b2b3ffcdSSimon Schubert/*
8180bdfdda1SSascha Wildner * Support for reading MSRs in the safe manner.
8190bdfdda1SSascha Wildner */
8200bdfdda1SSascha WildnerENTRY(rdmsr_safe)
8210bdfdda1SSascha Wildner/* int rdmsr_safe(u_int msr, uint64_t *data) */
8220bdfdda1SSascha Wildner	movq	PCPU(curthread),%r8
8230bdfdda1SSascha Wildner	movq	TD_PCB(%r8), %r8
8240bdfdda1SSascha Wildner	movq	$msr_onfault,PCB_ONFAULT(%r8)
82593ad6da2SMatthew Dillon	movq	%rsp,PCB_ONFAULT_SP(%r8)
8260bdfdda1SSascha Wildner	movl	%edi,%ecx
8270bdfdda1SSascha Wildner	rdmsr			/* Read MSR pointed by %ecx. Returns
8280bdfdda1SSascha Wildner				   hi byte in edx, lo in %eax */
8290bdfdda1SSascha Wildner	salq	$32,%rdx	/* sign-shift %rdx left */
8300bdfdda1SSascha Wildner	movl	%eax,%eax	/* zero-extend %eax -> %rax */
8310bdfdda1SSascha Wildner	orq	%rdx,%rax
8320bdfdda1SSascha Wildner	movq	%rax,(%rsi)
8330bdfdda1SSascha Wildner	xorq	%rax,%rax
8340bdfdda1SSascha Wildner	movq	%rax,PCB_ONFAULT(%r8)
8350bdfdda1SSascha Wildner	ret
836b1d2a2deSzrjEND(rdmsr_safe)
8370bdfdda1SSascha Wildner
8380bdfdda1SSascha Wildner/*
839d4ef6694SJoris Giovannangeli * Support for writing MSRs in the safe manner.
840d4ef6694SJoris Giovannangeli */
841d4ef6694SJoris GiovannangeliENTRY(wrmsr_safe)
842d4ef6694SJoris Giovannangeli/* int wrmsr_safe(u_int msr, uint64_t data) */
843d4ef6694SJoris Giovannangeli	movq	PCPU(curthread),%r8
844d4ef6694SJoris Giovannangeli	movq	TD_PCB(%r8), %r8
845d4ef6694SJoris Giovannangeli	movq	$msr_onfault,PCB_ONFAULT(%r8)
846a7760074SImre Vadász	movq	%rsp,PCB_ONFAULT_SP(%r8)
847d4ef6694SJoris Giovannangeli	movl	%edi,%ecx
848d4ef6694SJoris Giovannangeli	movl	%esi,%eax
849d4ef6694SJoris Giovannangeli	sarq	$32,%rsi
850d4ef6694SJoris Giovannangeli	movl	%esi,%edx
851d4ef6694SJoris Giovannangeli	wrmsr			/* Write MSR pointed by %ecx. Accepts
852d4ef6694SJoris Giovannangeli				   hi byte in edx, lo in %eax. */
853d4ef6694SJoris Giovannangeli	xorq	%rax,%rax
854d4ef6694SJoris Giovannangeli	movq	%rax,PCB_ONFAULT(%r8)
855d4ef6694SJoris Giovannangeli	ret
856b1d2a2deSzrjEND(wrmsr_safe)
857d4ef6694SJoris Giovannangeli
858d4ef6694SJoris Giovannangeli/*
8590bdfdda1SSascha Wildner * MSR operations fault handler
8600bdfdda1SSascha Wildner */
8610bdfdda1SSascha Wildner	ALIGN_TEXT
8620bdfdda1SSascha Wildnermsr_onfault:
8630bdfdda1SSascha Wildner	movq	PCPU(curthread),%r8
8640bdfdda1SSascha Wildner	movq	TD_PCB(%r8), %r8
8650bdfdda1SSascha Wildner	movq	$0,PCB_ONFAULT(%r8)
8660bdfdda1SSascha Wildner	movl	$EFAULT,%eax
8670bdfdda1SSascha Wildner	ret
868*8d2aaeecSMatthew Dillon
869*8d2aaeecSMatthew DillonENTRY(smap_open)
870*8d2aaeecSMatthew Dillon	SMAP_OPEN
871*8d2aaeecSMatthew Dillon	ret
872*8d2aaeecSMatthew DillonEND(smap_open)
873*8d2aaeecSMatthew Dillon
874*8d2aaeecSMatthew DillonENTRY(smap_close)
875*8d2aaeecSMatthew Dillon	SMAP_CLOSE
876*8d2aaeecSMatthew Dillon	ret
877*8d2aaeecSMatthew DillonEND(smap_close)
878