1 /*
2 * Copyright (c) 2003,2004 The DragonFly Project. All rights reserved.
3 *
4 * This code is derived from software contributed to The DragonFly Project
5 * by Matthew Dillon <dillon@backplane.com>
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 *
11 * 1. Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
16 * distribution.
17 * 3. Neither the name of The DragonFly Project nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific, prior written permission.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
24 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
25 * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
26 * INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING,
27 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
28 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
29 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
31 * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
32 * SUCH DAMAGE.
33 *
34 * $FreeBSD: src/sys/i386/include/lock.h,v 1.11.2.2 2000/09/30 02:49:34 ps Exp $
35 */
36
37 #ifndef _MACHINE_LOCK_H_
38 #define _MACHINE_LOCK_H_
39
40 #ifndef _CPU_PSL_H_
41 #include <machine/psl.h>
42 #endif
43
44 #ifdef LOCORE
45
46 /*
47 * Spinlock assembly support. Note: rax and rcx can be tromped. No
48 * other register will be. Note that these routines are sometimes
49 * called with (%edx) as the mem argument.
50 *
51 * Under UP the spinlock routines still serve to disable/restore
52 * interrupts.
53 */
54
55 #define SPIN_INIT(mem) \
56 movq $0,mem ; \
57
58 #define SPIN_INIT_NOREG(mem) \
59 SPIN_INIT(mem) ; \
60
61 #define SPIN_LOCK(mem) \
62 pushfq ; \
63 popq %rcx ; /* flags */ \
64 cli ; \
65 orq $PSL_C,%rcx ; /* make sure non-zero */ \
66 906: ; \
67 movq mem, %rax ; \
68 907: ; \
69 cmpq $0,%rax ; \
70 jnz 908f ; \
71 lock cmpxchgq %rcx,mem ; /* Z=1 (jz) on success */ \
72 jz 909f ; \
73 pause ; \
74 jmp 907b ; \
75 908: ; \
76 pause ; \
77 jmp 906b ; \
78 909: ; \
79
80 #define SPIN_LOCK_PUSH_REGS \
81 subq $16,%rsp ; \
82 movq %rcx,(%rsp) ; \
83 movq %rax,8(%rsp) ; \
84
85 #define SPIN_LOCK_POP_REGS \
86 movq (%rsp),%rcx ; \
87 movq 8(%rsp),%rax ; \
88 addq $16,%rsp ; \
89
90 #define SPIN_LOCK_FRAME_SIZE 16
91
92 #define SPIN_LOCK_NOREG(mem) \
93 SPIN_LOCK_PUSH_REGS ; \
94 SPIN_LOCK(mem) ; \
95 SPIN_LOCK_POP_REGS ; \
96
97 #define SPIN_UNLOCK(mem) \
98 pushq mem ; \
99 movq $0,mem ; \
100 popfq ; \
101
102 #define SPIN_UNLOCK_PUSH_REGS
103 #define SPIN_UNLOCK_POP_REGS
104 #define SPIN_UNLOCK_FRAME_SIZE 0
105
106 #define SPIN_UNLOCK_NOREG(mem) \
107 SPIN_UNLOCK(mem) ; \
108
109 #else /* !LOCORE */
110
111 #ifdef _KERNEL
112
113 /*
114 * Spinlock functions (UP and SMP). Under UP a spinlock still serves
115 * to disable/restore interrupts even if it doesn't spin.
116 */
117 struct spinlock_deprecated {
118 volatile long opaque;
119 };
120
121 void com_lock(void); /* disables int / spinlock combo */
122 void com_unlock(void);
123 void imen_lock(void); /* disables int / spinlock combo */
124 void imen_unlock(void);
125 void clock_lock(void); /* disables int / spinlock combo */
126 void clock_unlock(void);
127
128 void spin_lock_deprecated(struct spinlock_deprecated *lock);
129 void spin_unlock_deprecated(struct spinlock_deprecated *lock);
130
131 /*
132 * Inline version of spinlock routines -- overrides assembly. Only unlock
133 * and init here please.
134 */
135 static __inline void
spin_init_deprecated(struct spinlock_deprecated * lock)136 spin_init_deprecated(struct spinlock_deprecated *lock)
137 {
138 lock->opaque = 0;
139 }
140
141 #endif /* _KERNEL */
142
143 #endif /* LOCORE */
144 #endif /* !_MACHINE_LOCK_H_ */
145