xref: /dragonfly/sys/platform/pc64/include/lock.h (revision e3e00568)
139923942SSimon Schubert /*
246d4e165SJordan Gordeev  * Copyright (c) 2003,2004 The DragonFly Project.  All rights reserved.
346d4e165SJordan Gordeev  *
446d4e165SJordan Gordeev  * This code is derived from software contributed to The DragonFly Project
546d4e165SJordan Gordeev  * by Matthew Dillon <dillon@backplane.com>
639923942SSimon Schubert  *
739923942SSimon Schubert  * Redistribution and use in source and binary forms, with or without
839923942SSimon Schubert  * modification, are permitted provided that the following conditions
939923942SSimon Schubert  * are met:
1046d4e165SJordan Gordeev  *
1139923942SSimon Schubert  * 1. Redistributions of source code must retain the above copyright
1239923942SSimon Schubert  *    notice, this list of conditions and the following disclaimer.
1346d4e165SJordan Gordeev  * 2. Redistributions in binary form must reproduce the above copyright
1446d4e165SJordan Gordeev  *    notice, this list of conditions and the following disclaimer in
1546d4e165SJordan Gordeev  *    the documentation and/or other materials provided with the
1646d4e165SJordan Gordeev  *    distribution.
1746d4e165SJordan Gordeev  * 3. Neither the name of The DragonFly Project nor the names of its
1846d4e165SJordan Gordeev  *    contributors may be used to endorse or promote products derived
1946d4e165SJordan Gordeev  *    from this software without specific, prior written permission.
2039923942SSimon Schubert  *
2146d4e165SJordan Gordeev  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
2246d4e165SJordan Gordeev  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
2346d4e165SJordan Gordeev  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
2446d4e165SJordan Gordeev  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE
2546d4e165SJordan Gordeev  * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
2646d4e165SJordan Gordeev  * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
2746d4e165SJordan Gordeev  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
2846d4e165SJordan Gordeev  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
2946d4e165SJordan Gordeev  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
3046d4e165SJordan Gordeev  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
3146d4e165SJordan Gordeev  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
3239923942SSimon Schubert  * SUCH DAMAGE.
3339923942SSimon Schubert  *
3439923942SSimon Schubert  * $FreeBSD: src/sys/i386/include/lock.h,v 1.11.2.2 2000/09/30 02:49:34 ps Exp $
3539923942SSimon Schubert  */
3639923942SSimon Schubert 
3739923942SSimon Schubert #ifndef _MACHINE_LOCK_H_
3839923942SSimon Schubert #define _MACHINE_LOCK_H_
3939923942SSimon Schubert 
40d7f50089SYonghong Yan #ifndef _CPU_PSL_H_
41d7f50089SYonghong Yan #include <machine/psl.h>
4239923942SSimon Schubert #endif
4339923942SSimon Schubert 
4439923942SSimon Schubert #ifdef LOCORE
4539923942SSimon Schubert 
4639923942SSimon Schubert /*
47c8fe38aeSMatthew Dillon  * Spinlock assembly support.  Note: rax and rcx can be tromped.  No
4839923942SSimon Schubert  * other register will be.   Note that these routines are sometimes
4939923942SSimon Schubert  * called with (%edx) as the mem argument.
5039923942SSimon Schubert  *
5139923942SSimon Schubert  * Under UP the spinlock routines still serve to disable/restore
5239923942SSimon Schubert  * interrupts.
5339923942SSimon Schubert  */
5439923942SSimon Schubert 
5539923942SSimon Schubert #define SPIN_INIT(mem)						\
56c8fe38aeSMatthew Dillon 	movq	$0,mem ;					\
5739923942SSimon Schubert 
5839923942SSimon Schubert #define SPIN_INIT_NOREG(mem)					\
5939923942SSimon Schubert 	SPIN_INIT(mem) ;					\
6039923942SSimon Schubert 
6139923942SSimon Schubert #define SPIN_LOCK(mem)						\
62c8fe38aeSMatthew Dillon 	pushfq ;						\
63c8fe38aeSMatthew Dillon 	popq	%rcx ;		/* flags */			\
6439923942SSimon Schubert 	cli ;							\
6546d4e165SJordan Gordeev 	orq	$PSL_C,%rcx ;	/* make sure non-zero */	\
66*e3e00568SMatthew Dillon 906: ;								\
67*e3e00568SMatthew Dillon 	movq	mem, %rax ;					\
68*e3e00568SMatthew Dillon 907: ;								\
69*e3e00568SMatthew Dillon 	cmpq	$0,%rax ;					\
70*e3e00568SMatthew Dillon 	jnz	908f ;						\
71c8fe38aeSMatthew Dillon 	lock cmpxchgq %rcx,mem ; /* Z=1 (jz) on success */	\
72*e3e00568SMatthew Dillon 	jz	909f ; 						\
7346d4e165SJordan Gordeev 	pause ;							\
74*e3e00568SMatthew Dillon 	jmp	907b ;						\
75*e3e00568SMatthew Dillon 908: ;								\
76*e3e00568SMatthew Dillon 	pause ;							\
77*e3e00568SMatthew Dillon 	jmp	906b ;						\
78*e3e00568SMatthew Dillon 909: ;								\
7939923942SSimon Schubert 
8039923942SSimon Schubert #define SPIN_LOCK_PUSH_REGS					\
8146d4e165SJordan Gordeev 	subq	$16,%rsp ;					\
82c8fe38aeSMatthew Dillon 	movq	%rcx,(%rsp) ;					\
83c8fe38aeSMatthew Dillon 	movq	%rax,8(%rsp) ;					\
8439923942SSimon Schubert 
8539923942SSimon Schubert #define SPIN_LOCK_POP_REGS					\
86c8fe38aeSMatthew Dillon 	movq	(%rsp),%rcx ;					\
87c8fe38aeSMatthew Dillon 	movq	8(%rsp),%rax ;					\
8846d4e165SJordan Gordeev 	addq	$16,%rsp ;					\
8939923942SSimon Schubert 
9046d4e165SJordan Gordeev #define SPIN_LOCK_FRAME_SIZE	16
9139923942SSimon Schubert 
9239923942SSimon Schubert #define SPIN_LOCK_NOREG(mem)					\
9339923942SSimon Schubert 	SPIN_LOCK_PUSH_REGS ;					\
9439923942SSimon Schubert 	SPIN_LOCK(mem) ;					\
9539923942SSimon Schubert 	SPIN_LOCK_POP_REGS ;					\
9639923942SSimon Schubert 
9739923942SSimon Schubert #define SPIN_UNLOCK(mem)					\
98c8fe38aeSMatthew Dillon 	pushq	mem ;						\
99c8fe38aeSMatthew Dillon 	movq	$0,mem ;					\
100c8fe38aeSMatthew Dillon 	popfq ;							\
10139923942SSimon Schubert 
10239923942SSimon Schubert #define SPIN_UNLOCK_PUSH_REGS
10339923942SSimon Schubert #define SPIN_UNLOCK_POP_REGS
10439923942SSimon Schubert #define SPIN_UNLOCK_FRAME_SIZE	0
10539923942SSimon Schubert 
10639923942SSimon Schubert #define SPIN_UNLOCK_NOREG(mem)					\
10739923942SSimon Schubert 	SPIN_UNLOCK(mem) ;					\
10839923942SSimon Schubert 
10939923942SSimon Schubert #else	/* !LOCORE */
11039923942SSimon Schubert 
11139923942SSimon Schubert #ifdef _KERNEL
11239923942SSimon Schubert 
11339923942SSimon Schubert /*
11439923942SSimon Schubert  * Spinlock functions (UP and SMP).  Under UP a spinlock still serves
11539923942SSimon Schubert  * to disable/restore interrupts even if it doesn't spin.
11639923942SSimon Schubert  */
11739923942SSimon Schubert struct spinlock_deprecated {
118c8fe38aeSMatthew Dillon 	volatile long	opaque;
11939923942SSimon Schubert };
12039923942SSimon Schubert 
12139923942SSimon Schubert void	com_lock(void);		/* disables int / spinlock combo */
12239923942SSimon Schubert void	com_unlock(void);
12339923942SSimon Schubert void	imen_lock(void);	/* disables int / spinlock combo */
12439923942SSimon Schubert void	imen_unlock(void);
12539923942SSimon Schubert void	clock_lock(void);	/* disables int / spinlock combo */
12639923942SSimon Schubert void	clock_unlock(void);
12739923942SSimon Schubert 
1285b8eedf1SSascha Wildner void	spin_lock_deprecated(struct spinlock_deprecated *lock);
1295b8eedf1SSascha Wildner void	spin_unlock_deprecated(struct spinlock_deprecated *lock);
13039923942SSimon Schubert 
13139923942SSimon Schubert /*
13239923942SSimon Schubert  * Inline version of spinlock routines -- overrides assembly.  Only unlock
13339923942SSimon Schubert  * and init here please.
13439923942SSimon Schubert  */
13539923942SSimon Schubert static __inline void
spin_init_deprecated(struct spinlock_deprecated * lock)1365b8eedf1SSascha Wildner spin_init_deprecated(struct spinlock_deprecated *lock)
13739923942SSimon Schubert {
13839923942SSimon Schubert 	lock->opaque = 0;
13939923942SSimon Schubert }
14039923942SSimon Schubert 
14139923942SSimon Schubert #endif  /* _KERNEL */
14239923942SSimon Schubert 
14339923942SSimon Schubert #endif	/* LOCORE */
14439923942SSimon Schubert #endif	/* !_MACHINE_LOCK_H_ */
145