xref: /dragonfly/sys/platform/pc64/include/lock.h (revision 678e8cc6)
1 /*
2  * Copyright (c) 2003,2004 The DragonFly Project.  All rights reserved.
3  *
4  * This code is derived from software contributed to The DragonFly Project
5  * by Matthew Dillon <dillon@backplane.com>
6  *
7  * Redistribution and use in source and binary forms, with or without
8  * modification, are permitted provided that the following conditions
9  * are met:
10  *
11  * 1. Redistributions of source code must retain the above copyright
12  *    notice, this list of conditions and the following disclaimer.
13  * 2. Redistributions in binary form must reproduce the above copyright
14  *    notice, this list of conditions and the following disclaimer in
15  *    the documentation and/or other materials provided with the
16  *    distribution.
17  * 3. Neither the name of The DragonFly Project nor the names of its
18  *    contributors may be used to endorse or promote products derived
19  *    from this software without specific, prior written permission.
20  *
21  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
24  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE
25  * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
26  * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
27  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
28  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
29  * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30  * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
31  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
32  * SUCH DAMAGE.
33  *
34  * $FreeBSD: src/sys/i386/include/lock.h,v 1.11.2.2 2000/09/30 02:49:34 ps Exp $
35  * $DragonFly: src/sys/platform/pc32/include/lock.h,v 1.17 2008/06/19 21:32:55 aggelos Exp $
36  */
37 
38 #ifndef _MACHINE_LOCK_H_
39 #define _MACHINE_LOCK_H_
40 
41 #ifndef _CPU_PSL_H_
42 #include <machine/psl.h>
43 #endif
44 
45 #ifdef LOCORE
46 
47 /*
48  * Spinlock assembly support.  Note: rax and rcx can be tromped.  No
49  * other register will be.   Note that these routines are sometimes
50  * called with (%edx) as the mem argument.
51  *
52  * Under UP the spinlock routines still serve to disable/restore
53  * interrupts.
54  */
55 
56 
57 #ifdef SMP
58 
59 #define SPIN_INIT(mem)						\
60 	movq	$0,mem ;					\
61 
62 #define SPIN_INIT_NOREG(mem)					\
63 	SPIN_INIT(mem) ;					\
64 
65 #define SPIN_LOCK(mem)						\
66 	pushfq ;						\
67 	popq	%rcx ;		/* flags */			\
68 	cli ;							\
69 	orq	$PSL_C,%rcx ;	/* make sure non-zero */	\
70 7: ;								\
71 	movq	$0,%rax ;	/* expected contents of lock */	\
72 	lock cmpxchgq %rcx,mem ; /* Z=1 (jz) on success */	\
73 	pause ;							\
74 	jnz	7b ; 						\
75 
76 #define SPIN_LOCK_PUSH_REGS					\
77 	subq	$16,%rsp ;					\
78 	movq	%rcx,(%rsp) ;					\
79 	movq	%rax,8(%rsp) ;					\
80 
81 #define SPIN_LOCK_POP_REGS					\
82 	movq	(%rsp),%rcx ;					\
83 	movq	8(%rsp),%rax ;					\
84 	addq	$16,%rsp ;					\
85 
86 #define SPIN_LOCK_FRAME_SIZE	16
87 
88 #define SPIN_LOCK_NOREG(mem)					\
89 	SPIN_LOCK_PUSH_REGS ;					\
90 	SPIN_LOCK(mem) ;					\
91 	SPIN_LOCK_POP_REGS ;					\
92 
93 #define SPIN_UNLOCK(mem)					\
94 	pushq	mem ;						\
95 	movq	$0,mem ;					\
96 	popfq ;							\
97 
98 #define SPIN_UNLOCK_PUSH_REGS
99 #define SPIN_UNLOCK_POP_REGS
100 #define SPIN_UNLOCK_FRAME_SIZE	0
101 
102 #define SPIN_UNLOCK_NOREG(mem)					\
103 	SPIN_UNLOCK(mem) ;					\
104 
105 #else /* !SMP */
106 
107 #define SPIN_LOCK(mem)						\
108 	pushfq ;						\
109 	cli ;							\
110 	orq	$PSL_C,(%rsp) ;					\
111 	popq	mem ;						\
112 
113 #define SPIN_LOCK_PUSH_RESG
114 #define SPIN_LOCK_POP_REGS
115 #define SPIN_LOCK_FRAME_SIZE	0
116 
117 #define SPIN_UNLOCK(mem)					\
118 	pushq	mem ;						\
119 	movq	$0,mem ;					\
120 	popfq ;							\
121 
122 #define SPIN_UNLOCK_PUSH_REGS
123 #define SPIN_UNLOCK_POP_REGS
124 #define SPIN_UNLOCK_FRAME_SIZE	0
125 
126 #endif	/* SMP */
127 
128 #else	/* !LOCORE */
129 
130 #ifdef _KERNEL
131 
132 /*
133  * Spinlock functions (UP and SMP).  Under UP a spinlock still serves
134  * to disable/restore interrupts even if it doesn't spin.
135  */
136 struct spinlock_deprecated {
137 	volatile long	opaque;
138 };
139 
140 typedef struct spinlock_deprecated *spinlock_t;
141 
142 void	mpintr_lock(void);	/* disables int / spinlock combo */
143 void	mpintr_unlock(void);
144 void	com_lock(void);		/* disables int / spinlock combo */
145 void	com_unlock(void);
146 void	imen_lock(void);	/* disables int / spinlock combo */
147 void	imen_unlock(void);
148 void	clock_lock(void);	/* disables int / spinlock combo */
149 void	clock_unlock(void);
150 
151 void	spin_lock_deprecated(spinlock_t lock);
152 void	spin_unlock_deprecated(spinlock_t lock);
153 
154 /*
155  * Inline version of spinlock routines -- overrides assembly.  Only unlock
156  * and init here please.
157  */
158 static __inline void
159 spin_lock_init(spinlock_t lock)
160 {
161 	lock->opaque = 0;
162 }
163 
164 #endif  /* _KERNEL */
165 
166 #endif	/* LOCORE */
167 #endif	/* !_MACHINE_LOCK_H_ */
168