xref: /linux/arch/mips/include/asm/futex.h (revision b56d1caf)
1384740dcSRalf Baechle /*
2384740dcSRalf Baechle  * This file is subject to the terms and conditions of the GNU General Public
3384740dcSRalf Baechle  * License.  See the file "COPYING" in the main directory of this archive
4384740dcSRalf Baechle  * for more details.
5384740dcSRalf Baechle  *
6384740dcSRalf Baechle  * Copyright (c) 2006  Ralf Baechle (ralf@linux-mips.org)
7384740dcSRalf Baechle  */
8384740dcSRalf Baechle #ifndef _ASM_FUTEX_H
9384740dcSRalf Baechle #define _ASM_FUTEX_H
10384740dcSRalf Baechle 
11384740dcSRalf Baechle #ifdef __KERNEL__
12384740dcSRalf Baechle 
13384740dcSRalf Baechle #include <linux/futex.h>
14384740dcSRalf Baechle #include <linux/uaccess.h>
15a6813fe5SMarkos Chandras #include <asm/asm-eva.h>
16384740dcSRalf Baechle #include <asm/barrier.h>
17b0984c43SMaciej W. Rozycki #include <asm/compiler.h>
18384740dcSRalf Baechle #include <asm/errno.h>
193c1d3f09SPaul Burton #include <asm/sync.h>
20384740dcSRalf Baechle 
213f2bedabSArnd Bergmann #define arch_futex_atomic_op_inuser arch_futex_atomic_op_inuser
223f2bedabSArnd Bergmann #define futex_atomic_cmpxchg_inatomic futex_atomic_cmpxchg_inatomic
233f2bedabSArnd Bergmann #include <asm-generic/futex.h>
243f2bedabSArnd Bergmann 
253f2bedabSArnd Bergmann #define __futex_atomic_op(op, insn, ret, oldval, uaddr, oparg)		\
26384740dcSRalf Baechle {									\
27256ec489SThomas Bogendoerfer 	if (cpu_has_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {	\
28384740dcSRalf Baechle 		__asm__ __volatile__(					\
29384740dcSRalf Baechle 		"	.set	push				\n"	\
30384740dcSRalf Baechle 		"	.set	noat				\n"	\
31378ed6f0SPaul Burton 		"	.set	push				\n"	\
32a809d460SRalf Baechle 		"	.set	arch=r4000			\n"	\
33384740dcSRalf Baechle 		"1:	ll	%1, %4	# __futex_atomic_op	\n"	\
34378ed6f0SPaul Burton 		"	.set	pop				\n"	\
35384740dcSRalf Baechle 		"	" insn	"				\n"	\
36a809d460SRalf Baechle 		"	.set	arch=r4000			\n"	\
37384740dcSRalf Baechle 		"2:	sc	$1, %2				\n"	\
38384740dcSRalf Baechle 		"	beqzl	$1, 1b				\n"	\
39fd7710cbSPaul Burton 		__stringify(__WEAK_LLSC_MB) "			\n"	\
40384740dcSRalf Baechle 		"3:						\n"	\
410e525e48SMaciej W. Rozycki 		"	.insn					\n"	\
42384740dcSRalf Baechle 		"	.set	pop				\n"	\
43384740dcSRalf Baechle 		"	.section .fixup,\"ax\"			\n"	\
44384740dcSRalf Baechle 		"4:	li	%0, %6				\n"	\
45384740dcSRalf Baechle 		"	j	3b				\n"	\
46384740dcSRalf Baechle 		"	.previous				\n"	\
47384740dcSRalf Baechle 		"	.section __ex_table,\"a\"		\n"	\
48384740dcSRalf Baechle 		"	"__UA_ADDR "\t1b, 4b			\n"	\
49384740dcSRalf Baechle 		"	"__UA_ADDR "\t2b, 4b			\n"	\
50384740dcSRalf Baechle 		"	.previous				\n"	\
51b0984c43SMaciej W. Rozycki 		: "=r" (ret), "=&r" (oldval),				\
5294bfb75aSMarkos Chandras 		  "=" GCC_OFF_SMALL_ASM() (*uaddr)				\
5394bfb75aSMarkos Chandras 		: "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg),	\
54b0984c43SMaciej W. Rozycki 		  "i" (-EFAULT)						\
55384740dcSRalf Baechle 		: "memory");						\
56384740dcSRalf Baechle 	} else if (cpu_has_llsc) {					\
57384740dcSRalf Baechle 		__asm__ __volatile__(					\
58384740dcSRalf Baechle 		"	.set	push				\n"	\
59384740dcSRalf Baechle 		"	.set	noat				\n"	\
60378ed6f0SPaul Burton 		"	.set	push				\n"	\
611922c356SMarkos Chandras 		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
623c1d3f09SPaul Burton 		"	" __SYNC(full, loongson3_war) "		\n"	\
63a6813fe5SMarkos Chandras 		"1:	"user_ll("%1", "%4")" # __futex_atomic_op\n"	\
64378ed6f0SPaul Burton 		"	.set	pop				\n"	\
65384740dcSRalf Baechle 		"	" insn	"				\n"	\
661922c356SMarkos Chandras 		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
67a6813fe5SMarkos Chandras 		"2:	"user_sc("$1", "%2")"			\n"	\
68384740dcSRalf Baechle 		"	beqz	$1, 1b				\n"	\
69fd7710cbSPaul Burton 		__stringify(__WEAK_LLSC_MB) "			\n"	\
70384740dcSRalf Baechle 		"3:						\n"	\
710e525e48SMaciej W. Rozycki 		"	.insn					\n"	\
72384740dcSRalf Baechle 		"	.set	pop				\n"	\
73384740dcSRalf Baechle 		"	.section .fixup,\"ax\"			\n"	\
74384740dcSRalf Baechle 		"4:	li	%0, %6				\n"	\
75384740dcSRalf Baechle 		"	j	3b				\n"	\
76384740dcSRalf Baechle 		"	.previous				\n"	\
77384740dcSRalf Baechle 		"	.section __ex_table,\"a\"		\n"	\
78384740dcSRalf Baechle 		"	"__UA_ADDR "\t1b, 4b			\n"	\
79384740dcSRalf Baechle 		"	"__UA_ADDR "\t2b, 4b			\n"	\
80384740dcSRalf Baechle 		"	.previous				\n"	\
81b0984c43SMaciej W. Rozycki 		: "=r" (ret), "=&r" (oldval),				\
8294bfb75aSMarkos Chandras 		  "=" GCC_OFF_SMALL_ASM() (*uaddr)				\
8394bfb75aSMarkos Chandras 		: "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg),	\
84b0984c43SMaciej W. Rozycki 		  "i" (-EFAULT)						\
85384740dcSRalf Baechle 		: "memory");						\
863f2bedabSArnd Bergmann 	} else {							\
873f2bedabSArnd Bergmann 		/* fallback for non-SMP */				\
88*0f09c274SArnd Bergmann 		ret = futex_atomic_op_inuser_local(op, oparg, oval, uaddr);	\
89*0f09c274SArnd Bergmann 	}								\
90384740dcSRalf Baechle }
91384740dcSRalf Baechle 
92384740dcSRalf Baechle static inline int
arch_futex_atomic_op_inuser(int op,int oparg,int * oval,u32 __user * uaddr)9330d6e0a4SJiri Slaby arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
94384740dcSRalf Baechle {
95384740dcSRalf Baechle 	int oldval = 0, ret;
96384740dcSRalf Baechle 
97a08971e9SAl Viro 	if (!access_ok(uaddr, sizeof(u32)))
98a08971e9SAl Viro 		return -EFAULT;
99384740dcSRalf Baechle 
100384740dcSRalf Baechle 	switch (op) {
101384740dcSRalf Baechle 	case FUTEX_OP_SET:
1023f2bedabSArnd Bergmann 		__futex_atomic_op(op, "move $1, %z5", ret, oldval, uaddr, oparg);
103384740dcSRalf Baechle 		break;
104384740dcSRalf Baechle 
105384740dcSRalf Baechle 	case FUTEX_OP_ADD:
1063f2bedabSArnd Bergmann 		__futex_atomic_op(op, "addu $1, %1, %z5",
107384740dcSRalf Baechle 				  ret, oldval, uaddr, oparg);
108384740dcSRalf Baechle 		break;
109384740dcSRalf Baechle 	case FUTEX_OP_OR:
1103f2bedabSArnd Bergmann 		__futex_atomic_op(op, "or	$1, %1, %z5",
111384740dcSRalf Baechle 				  ret, oldval, uaddr, oparg);
112384740dcSRalf Baechle 		break;
113384740dcSRalf Baechle 	case FUTEX_OP_ANDN:
1143f2bedabSArnd Bergmann 		__futex_atomic_op(op, "and	$1, %1, %z5",
115384740dcSRalf Baechle 				  ret, oldval, uaddr, ~oparg);
116384740dcSRalf Baechle 		break;
117384740dcSRalf Baechle 	case FUTEX_OP_XOR:
1183f2bedabSArnd Bergmann 		__futex_atomic_op(op, "xor	$1, %1, %z5",
119384740dcSRalf Baechle 				  ret, oldval, uaddr, oparg);
120384740dcSRalf Baechle 		break;
121384740dcSRalf Baechle 	default:
122384740dcSRalf Baechle 		ret = -ENOSYS;
123384740dcSRalf Baechle 	}
124384740dcSRalf Baechle 
12530d6e0a4SJiri Slaby 	if (!ret)
12630d6e0a4SJiri Slaby 		*oval = oldval;
12730d6e0a4SJiri Slaby 
128384740dcSRalf Baechle 	return ret;
129384740dcSRalf Baechle }
130384740dcSRalf Baechle 
131384740dcSRalf Baechle static inline int
futex_atomic_cmpxchg_inatomic(u32 * uval,u32 __user * uaddr,u32 oldval,u32 newval)1328d7718aaSMichel Lespinasse futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
1338d7718aaSMichel Lespinasse 			      u32 oldval, u32 newval)
134384740dcSRalf Baechle {
1358d7718aaSMichel Lespinasse 	int ret = 0;
1368d7718aaSMichel Lespinasse 	u32 val;
137384740dcSRalf Baechle 
13896d4f267SLinus Torvalds 	if (!access_ok(uaddr, sizeof(u32)))
139384740dcSRalf Baechle 		return -EFAULT;
140384740dcSRalf Baechle 
141256ec489SThomas Bogendoerfer 	if (cpu_has_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {
142384740dcSRalf Baechle 		__asm__ __volatile__(
143384740dcSRalf Baechle 		"# futex_atomic_cmpxchg_inatomic			\n"
144384740dcSRalf Baechle 		"	.set	push					\n"
145384740dcSRalf Baechle 		"	.set	noat					\n"
146378ed6f0SPaul Burton 		"	.set	push					\n"
147a809d460SRalf Baechle 		"	.set	arch=r4000				\n"
14837a9d912SMichel Lespinasse 		"1:	ll	%1, %3					\n"
14937a9d912SMichel Lespinasse 		"	bne	%1, %z4, 3f				\n"
150378ed6f0SPaul Burton 		"	.set	pop					\n"
15137a9d912SMichel Lespinasse 		"	move	$1, %z5					\n"
152a809d460SRalf Baechle 		"	.set	arch=r4000				\n"
15337a9d912SMichel Lespinasse 		"2:	sc	$1, %2					\n"
154384740dcSRalf Baechle 		"	beqzl	$1, 1b					\n"
155fd7710cbSPaul Burton 		__stringify(__WEAK_LLSC_MB) "				\n"
156384740dcSRalf Baechle 		"3:							\n"
1570e525e48SMaciej W. Rozycki 		"	.insn						\n"
158384740dcSRalf Baechle 		"	.set	pop					\n"
159384740dcSRalf Baechle 		"	.section .fixup,\"ax\"				\n"
16037a9d912SMichel Lespinasse 		"4:	li	%0, %6					\n"
161384740dcSRalf Baechle 		"	j	3b					\n"
162384740dcSRalf Baechle 		"	.previous					\n"
163384740dcSRalf Baechle 		"	.section __ex_table,\"a\"			\n"
164384740dcSRalf Baechle 		"	"__UA_ADDR "\t1b, 4b				\n"
165384740dcSRalf Baechle 		"	"__UA_ADDR "\t2b, 4b				\n"
166384740dcSRalf Baechle 		"	.previous					\n"
16794bfb75aSMarkos Chandras 		: "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
16894bfb75aSMarkos Chandras 		: GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
169b0984c43SMaciej W. Rozycki 		  "i" (-EFAULT)
170384740dcSRalf Baechle 		: "memory");
171384740dcSRalf Baechle 	} else if (cpu_has_llsc) {
172384740dcSRalf Baechle 		__asm__ __volatile__(
173384740dcSRalf Baechle 		"# futex_atomic_cmpxchg_inatomic			\n"
174384740dcSRalf Baechle 		"	.set	push					\n"
175384740dcSRalf Baechle 		"	.set	noat					\n"
176378ed6f0SPaul Burton 		"	.set	push					\n"
1771922c356SMarkos Chandras 		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
1783c1d3f09SPaul Burton 		"	" __SYNC(full, loongson3_war) "			\n"
179a6813fe5SMarkos Chandras 		"1:	"user_ll("%1", "%3")"				\n"
18037a9d912SMichel Lespinasse 		"	bne	%1, %z4, 3f				\n"
181378ed6f0SPaul Burton 		"	.set	pop					\n"
18237a9d912SMichel Lespinasse 		"	move	$1, %z5					\n"
1831922c356SMarkos Chandras 		"	.set	"MIPS_ISA_ARCH_LEVEL"			\n"
184a6813fe5SMarkos Chandras 		"2:	"user_sc("$1", "%2")"				\n"
185384740dcSRalf Baechle 		"	beqz	$1, 1b					\n"
1863c1d3f09SPaul Burton 		"3:	" __SYNC_ELSE(full, loongson3_war, __WEAK_LLSC_MB) "\n"
1870e525e48SMaciej W. Rozycki 		"	.insn						\n"
188384740dcSRalf Baechle 		"	.set	pop					\n"
189384740dcSRalf Baechle 		"	.section .fixup,\"ax\"				\n"
19037a9d912SMichel Lespinasse 		"4:	li	%0, %6					\n"
191384740dcSRalf Baechle 		"	j	3b					\n"
192384740dcSRalf Baechle 		"	.previous					\n"
193384740dcSRalf Baechle 		"	.section __ex_table,\"a\"			\n"
194384740dcSRalf Baechle 		"	"__UA_ADDR "\t1b, 4b				\n"
195384740dcSRalf Baechle 		"	"__UA_ADDR "\t2b, 4b				\n"
196384740dcSRalf Baechle 		"	.previous					\n"
19794bfb75aSMarkos Chandras 		: "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
19894bfb75aSMarkos Chandras 		: GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
199b0984c43SMaciej W. Rozycki 		  "i" (-EFAULT)
200384740dcSRalf Baechle 		: "memory");
2013f2bedabSArnd Bergmann 	} else {
2023f2bedabSArnd Bergmann 		return futex_atomic_cmpxchg_inatomic_local(uval, uaddr, oldval, newval);
2033f2bedabSArnd Bergmann 	}
204384740dcSRalf Baechle 
20537a9d912SMichel Lespinasse 	*uval = val;
20637a9d912SMichel Lespinasse 	return ret;
207384740dcSRalf Baechle }
208384740dcSRalf Baechle 
209384740dcSRalf Baechle #endif
210384740dcSRalf Baechle #endif /* _ASM_FUTEX_H */
211