xref: /netbsd/sys/arch/powerpc/include/lock.h (revision 6550d01e)
1 /*	$NetBSD: lock.h,v 1.12 2008/04/28 20:23:32 martin Exp $	*/
2 
3 /*-
4  * Copyright (c) 2000, 2007 The NetBSD Foundation, Inc.
5  * All rights reserved.
6  *
7  * This code is derived from software contributed to The NetBSD Foundation
8  * by Jason R. Thorpe and Andrew Doran.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  * 1. Redistributions of source code must retain the above copyright
14  *    notice, this list of conditions and the following disclaimer.
15  * 2. Redistributions in binary form must reproduce the above copyright
16  *    notice, this list of conditions and the following disclaimer in the
17  *    documentation and/or other materials provided with the distribution.
18  *
19  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29  * POSSIBILITY OF SUCH DAMAGE.
30  */
31 
32 /*
33  * Machine-dependent spin lock operations.
34  */
35 
36 #ifndef _POWERPC_LOCK_H_
37 #define _POWERPC_LOCK_H_
38 
39 static __inline int
40 __SIMPLELOCK_LOCKED_P(__cpu_simple_lock_t *__ptr)
41 {
42 	return *__ptr == __SIMPLELOCK_LOCKED;
43 }
44 
45 static __inline int
46 __SIMPLELOCK_UNLOCKED_P(__cpu_simple_lock_t *__ptr)
47 {
48 	return *__ptr == __SIMPLELOCK_UNLOCKED;
49 }
50 
51 static __inline void
52 __cpu_simple_lock_clear(__cpu_simple_lock_t *__ptr)
53 {
54 	*__ptr = __SIMPLELOCK_UNLOCKED;
55 }
56 
57 static __inline void
58 __cpu_simple_lock_set(__cpu_simple_lock_t *__ptr)
59 {
60 	*__ptr = __SIMPLELOCK_LOCKED;
61 }
62 
63 static __inline void
64 __cpu_simple_lock_init(__cpu_simple_lock_t *alp)
65 {
66 	*alp = __SIMPLELOCK_UNLOCKED;
67 	__asm volatile ("sync");
68 }
69 
70 static __inline void
71 __cpu_simple_lock(__cpu_simple_lock_t *alp)
72 {
73 	int old;
74 
75 	__asm volatile ("	\
76 				\n\
77 1:	lwarx	%0,0,%1		\n\
78 	cmpwi	%0,%2		\n\
79 	beq+	3f		\n\
80 2:	lwzx	%0,0,%1		\n\
81 	cmpwi	%0,%2		\n\
82 	beq+	1b		\n\
83 	b	2b		\n\
84 3:	stwcx.	%3,0,%1		\n\
85 	bne-	1b		\n\
86 	isync			\n\
87 				\n"
88 	: "=&r"(old)
89 	: "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED)
90 	: "memory");
91 }
92 
93 static __inline int
94 __cpu_simple_lock_try(__cpu_simple_lock_t *alp)
95 {
96 	int old, dummy;
97 
98 	__asm volatile ("	\
99 				\n\
100 1:	lwarx	%0,0,%1		\n\
101 	cmpwi	%0,%2		\n\
102 	bne	2f		\n\
103 	stwcx.	%3,0,%1		\n\
104 	bne-	1b		\n\
105 2:	stwcx.	%3,0,%4		\n\
106 	isync			\n\
107 				\n"
108 	: "=&r"(old)
109 	: "r"(alp), "I"(__SIMPLELOCK_UNLOCKED), "r"(__SIMPLELOCK_LOCKED),
110 	  "r"(&dummy)
111 	: "memory");
112 
113 	return (old == __SIMPLELOCK_UNLOCKED);
114 }
115 
116 static __inline void
117 __cpu_simple_unlock(__cpu_simple_lock_t *alp)
118 {
119 	__asm volatile ("sync");
120 	*alp = __SIMPLELOCK_UNLOCKED;
121 }
122 
123 static __inline void
124 mb_read(void)
125 {
126 	__asm volatile ("isync" ::: "memory");
127 }
128 
129 static __inline void
130 mb_write(void)
131 {
132 	__asm volatile ("sync" ::: "memory");
133 }
134 
135 static __inline void
136 mb_memory(void)
137 {
138 	__asm volatile ("sync" ::: "memory");
139 }
140 
141 #endif /* _POWERPC_LOCK_H_ */
142