1 /*
2  * %CopyrightBegin%
3  *
4  * Copyright Ericsson AB 2005-2016. All Rights Reserved.
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License");
7  * you may not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  *     http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS,
14  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  *
18  * %CopyrightEnd%
19  */
20 
21 /*
22  * Native ethread rwlocks on PowerPC.
23  * Author: Mikael Pettersson.
24  *
25  * Based on the examples in Appendix E of Motorola's
26  * "Programming Environments Manual For 32-Bit Implementations
27  * of the PowerPC Architecture".
28  */
29 #ifndef ETHREAD_PPC_RWLOCK_H
30 #define ETHREAD_PPC_RWLOCK_H
31 
32 #define ETHR_HAVE_NATIVE_RWSPINLOCKS 1
33 #define ETHR_NATIVE_RWSPINLOCK_IMPL "ethread"
34 
35 /* Unlocked if zero, read-locked if negative, write-locked if +1. */
36 typedef struct {
37     volatile int lock;
38 } ethr_native_rwlock_t;
39 
40 #if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_AUX_IMPL__)
41 
42 static ETHR_INLINE void
ethr_native_rwlock_init(ethr_native_rwlock_t * lock)43 ethr_native_rwlock_init(ethr_native_rwlock_t *lock)
44 {
45     lock->lock = 0;
46 }
47 
48 static ETHR_INLINE void
ethr_native_read_unlock(ethr_native_rwlock_t * lock)49 ethr_native_read_unlock(ethr_native_rwlock_t *lock)
50 {
51     int tmp;
52 
53     ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
54 
55     /* this is ethr_native_atomic_inc() - isync */
56     __asm__ __volatile__(
57 	"1:\t"
58 	"lwarx	%0,0,%1\n\t"
59 	"addic	%0,%0,1\n\t"
60 	"stwcx.	%0,0,%1\n\t"
61 	"bne-	1b"
62 	: "=&r"(tmp)
63 	: "r"(&lock->lock)
64 	: "cr0", "memory");
65 }
66 
67 static ETHR_INLINE int
ethr_native_read_trylock(ethr_native_rwlock_t * lock)68 ethr_native_read_trylock(ethr_native_rwlock_t *lock)
69 {
70     int counter;
71 
72     __asm__ __volatile__(
73 	"1:\t"
74 	"lwarx	%0,0,%1\n\t"	/* read lock to counter */
75 	"addic.	%0,%0,-1\n\t"	/* decrement counter */
76 	"bge-	2f\n\t"		/* bail if >= 0 (write-locked) */
77 	"stwcx.	%0,0,%1\n\t"	/* try to store decremented counter */
78 	"bne-	1b\n\t"		/* loop if lost reservation */
79 	"isync\n\t"		/* wait for previous insns to complete */
80 	"2:"
81 	: "=&r"(counter)
82 	: "r"(&lock->lock)
83 	: "cr0", "memory"
84 #if __GNUC__ > 2
85 	,"xer"
86 #endif
87 	);
88     return counter < 0;
89 }
90 
91 static ETHR_INLINE int
ethr_native_read_is_locked(ethr_native_rwlock_t * lock)92 ethr_native_read_is_locked(ethr_native_rwlock_t *lock)
93 {
94     return lock->lock > 0;
95 }
96 
97 static ETHR_INLINE void
ethr_native_read_lock(ethr_native_rwlock_t * lock)98 ethr_native_read_lock(ethr_native_rwlock_t *lock)
99 {
100     for(;;) {
101 	if (__builtin_expect(ethr_native_read_trylock(lock) != 0, 1))
102 	    break;
103 	do {
104 	    __asm__ __volatile__("":::"memory");
105 	} while (ethr_native_read_is_locked(lock));
106     }
107 }
108 
109 static ETHR_INLINE void
ethr_native_write_unlock(ethr_native_rwlock_t * lock)110 ethr_native_write_unlock(ethr_native_rwlock_t *lock)
111 {
112     ETHR_MEMBAR(ETHR_LoadStore|ETHR_StoreStore);
113     lock->lock = 0;
114 }
115 
116 static ETHR_INLINE int
ethr_native_write_trylock(ethr_native_rwlock_t * lock)117 ethr_native_write_trylock(ethr_native_rwlock_t *lock)
118 {
119     int prev;
120 
121     /* identical to ethr_native_spin_trylock() */
122     __asm__ __volatile__(
123 	"1:\t"
124 	"lwarx	%0,0,%1\n\t"	/* read lock to prev */
125 	"cmpwi	0,%0,0\n\t"
126 	"bne-	2f\n\t"		/* bail if non-zero (any lock) */
127 	"stwcx.	%2,0,%1\n\t"	/* try to make the lock positive */
128 	"bne-	1b\n\t"		/* loop if lost reservation */
129 	"isync\n\t"		/* wait for previous insns to complete */
130 	"2:"
131 	: "=&r"(prev)
132 	: "r"(&lock->lock), "r"(1)
133 	: "cr0", "memory");
134     return prev == 0;
135 }
136 
137 static ETHR_INLINE int
ethr_native_write_is_locked(ethr_native_rwlock_t * lock)138 ethr_native_write_is_locked(ethr_native_rwlock_t *lock)
139 {
140     return lock->lock != 0;
141 }
142 
143 static ETHR_INLINE void
ethr_native_write_lock(ethr_native_rwlock_t * lock)144 ethr_native_write_lock(ethr_native_rwlock_t *lock)
145 {
146     for(;;) {
147 	if (__builtin_expect(ethr_native_write_trylock(lock) != 0, 1))
148 	    break;
149 	do {
150 	    __asm__ __volatile__("":::"memory");
151 	} while (ethr_native_write_is_locked(lock));
152     }
153 }
154 
155 #endif /* ETHR_TRY_INLINE_FUNCS */
156 
157 #endif /* ETHREAD_PPC_RWLOCK_H */
158