xref: /openbsd/sys/arch/mips64/include/atomic.h (revision 97b48441)
1*97b48441Svisa /*	$OpenBSD: atomic.h,v 1.12 2019/10/28 09:41:37 visa Exp $	*/
2f57756c9Sart 
3f57756c9Sart /* Public Domain */
4f57756c9Sart 
52fa72412Spirofti #ifndef _MIPS64_ATOMIC_H_
62fa72412Spirofti #define _MIPS64_ATOMIC_H_
7f57756c9Sart 
88aa3ef09Sderaadt #if defined(_KERNEL)
98aa3ef09Sderaadt 
107d157f4aSsyuu /* wait until the bits to set are clear, and set them */
117d157f4aSsyuu static __inline void
atomic_wait_and_setbits_int(volatile unsigned int * uip,unsigned int v)122df76cc2Sguenther atomic_wait_and_setbits_int(volatile unsigned int *uip, unsigned int v)
137d157f4aSsyuu {
147d157f4aSsyuu 	unsigned int tmp0, tmp1;
157d157f4aSsyuu 
162df76cc2Sguenther 	__asm__ volatile (
177d157f4aSsyuu 	"1:	ll	%0,	0(%2)\n"
187d157f4aSsyuu 	"	and	%1,	%0,	%3\n"
197d157f4aSsyuu 	"	bnez	%1,	1b\n"
207d157f4aSsyuu 	"	or	%0,	%3,	%0\n"
217d157f4aSsyuu 	"	sc	%0,	0(%2)\n"
227d157f4aSsyuu 	"	beqz	%0,	1b\n"
237d157f4aSsyuu 	"	 nop\n" :
247d157f4aSsyuu 		"=&r"(tmp0), "=&r"(tmp1) :
257d157f4aSsyuu 		"r"(uip), "r"(v) : "memory");
267d157f4aSsyuu }
277d157f4aSsyuu 
28f57756c9Sart static __inline void
atomic_setbits_int(volatile unsigned int * uip,unsigned int v)292df76cc2Sguenther atomic_setbits_int(volatile unsigned int *uip, unsigned int v)
30f57756c9Sart {
31d0f595f8Smiod 	unsigned int tmp;
32d0f595f8Smiod 
332df76cc2Sguenther 	__asm__ volatile (
34d0f595f8Smiod 	"1:	ll	%0,	0(%1)\n"
35d0f595f8Smiod 	"	or	%0,	%2,	%0\n"
36d0f595f8Smiod 	"	sc	%0,	0(%1)\n"
37d0f595f8Smiod 	"	beqz	%0,	1b\n"
38d0f595f8Smiod 	"	 nop\n" :
39229f2d83Smiod 		"=&r"(tmp) :
40d0f595f8Smiod 		"r"(uip), "r"(v) : "memory");
41f57756c9Sart }
42f57756c9Sart 
43f57756c9Sart static __inline void
atomic_clearbits_int(volatile unsigned int * uip,unsigned int v)442df76cc2Sguenther atomic_clearbits_int(volatile unsigned int *uip, unsigned int v)
45f57756c9Sart {
46d0f595f8Smiod 	unsigned int tmp;
47d0f595f8Smiod 
482df76cc2Sguenther 	__asm__ volatile (
49d0f595f8Smiod 	"1:	ll	%0,	0(%1)\n"
50d0f595f8Smiod 	"	and	%0,	%2,	%0\n"
51d0f595f8Smiod 	"	sc	%0,	0(%1)\n"
52d0f595f8Smiod 	"	beqz	%0,	1b\n"
53d0f595f8Smiod 	"	 nop\n" :
54229f2d83Smiod 		"=&r"(tmp) :
55d0f595f8Smiod 		"r"(uip), "r"(~v) : "memory");
56f57756c9Sart }
57f57756c9Sart 
58d2c86be4Smpi #endif /* defined(_KERNEL) */
591b2dfb44Sjmatthew 
601b2dfb44Sjmatthew static inline unsigned int
_atomic_cas_uint(volatile unsigned int * p,unsigned int o,unsigned int n)611b2dfb44Sjmatthew _atomic_cas_uint(volatile unsigned int *p, unsigned int o, unsigned int n)
620cdf418bSsyuu {
631b2dfb44Sjmatthew 	unsigned int rv, wv;
640cdf418bSsyuu 
652df76cc2Sguenther 	__asm__ volatile (
661b2dfb44Sjmatthew 	"1:	ll	%0,	%1\n"
671b2dfb44Sjmatthew 	"	bne	%0,	%4,	2f\n"
681b2dfb44Sjmatthew 	"	move	%2,	%3\n"
691b2dfb44Sjmatthew 	"	sc	%2,	%1\n"
701b2dfb44Sjmatthew 	"	beqz	%2,	1b\n"
711b2dfb44Sjmatthew 	"2:	nop\n"
721b2dfb44Sjmatthew 	    : "=&r" (rv), "+m" (*p), "=&r" (wv)
731b2dfb44Sjmatthew 	    : "r" (n), "Ir" (o));
741b2dfb44Sjmatthew 
751b2dfb44Sjmatthew 	return (rv);
760cdf418bSsyuu }
771b2dfb44Sjmatthew #define atomic_cas_uint(_p, _o, _n) _atomic_cas_uint((_p), (_o), (_n))
781b2dfb44Sjmatthew 
791b2dfb44Sjmatthew static inline unsigned long
_atomic_cas_ulong(volatile unsigned long * p,unsigned long o,unsigned long n)801b2dfb44Sjmatthew _atomic_cas_ulong(volatile unsigned long *p, unsigned long o, unsigned long n)
810cdf418bSsyuu {
821b2dfb44Sjmatthew 	unsigned long rv, wv;
830cdf418bSsyuu 
842df76cc2Sguenther 	__asm__ volatile (
851b2dfb44Sjmatthew 	"1:	lld	%0,	%1\n"
861b2dfb44Sjmatthew 	"	bne	%0,	%4,	2f\n"
871b2dfb44Sjmatthew 	"	move	%2,	%3\n"
881b2dfb44Sjmatthew 	"	scd	%2,	%1\n"
891b2dfb44Sjmatthew 	"	beqz	%2,	1b\n"
901b2dfb44Sjmatthew 	"2:	nop\n"
911b2dfb44Sjmatthew 	    : "=&r" (rv), "+m" (*p), "=&r" (wv)
921b2dfb44Sjmatthew 	    : "r" (n), "Ir" (o));
931b2dfb44Sjmatthew 
941b2dfb44Sjmatthew 	return (rv);
950cdf418bSsyuu }
961b2dfb44Sjmatthew #define atomic_cas_ulong(_p, _o, _n) _atomic_cas_ulong((_p), (_o), (_n))
971b2dfb44Sjmatthew 
981b2dfb44Sjmatthew static inline void *
_atomic_cas_ptr(volatile void * pp,void * o,void * n)99119922d2Sdlg _atomic_cas_ptr(volatile void *pp, void *o, void *n)
1001b2dfb44Sjmatthew {
101119922d2Sdlg 	void * volatile *p = pp;
1021b2dfb44Sjmatthew 	void *rv, *wv;
1031b2dfb44Sjmatthew 
1041b2dfb44Sjmatthew 	__asm__ volatile (
1051b2dfb44Sjmatthew 	"1:	lld	%0,	%1\n"
1061b2dfb44Sjmatthew 	"	bne	%0,	%4,	2f\n"
1071b2dfb44Sjmatthew 	"	move	%2,	%3\n"
1081b2dfb44Sjmatthew 	"	scd	%2,	%1\n"
1091b2dfb44Sjmatthew 	"	beqz	%2,	1b\n"
1101b2dfb44Sjmatthew 	"2:	nop\n"
1111b2dfb44Sjmatthew 	    : "=&r" (rv), "+m" (*p), "=&r" (wv)
1121b2dfb44Sjmatthew 	    : "r" (n), "Ir" (o));
1131b2dfb44Sjmatthew 
1141b2dfb44Sjmatthew 	return (rv);
1151b2dfb44Sjmatthew }
1161b2dfb44Sjmatthew #define atomic_cas_ptr(_p, _o, _n) _atomic_cas_ptr((_p), (_o), (_n))
1171b2dfb44Sjmatthew 
1181b2dfb44Sjmatthew 
1191b2dfb44Sjmatthew 
1201b2dfb44Sjmatthew static inline unsigned int
_atomic_swap_uint(volatile unsigned int * uip,unsigned int v)1211b2dfb44Sjmatthew _atomic_swap_uint(volatile unsigned int *uip, unsigned int v)
1221b2dfb44Sjmatthew {
1231b2dfb44Sjmatthew 	unsigned int o, t;
1241b2dfb44Sjmatthew 
1251b2dfb44Sjmatthew 	__asm__ volatile (
1261b2dfb44Sjmatthew 	"1:	ll	%0,	%1\n"
1271b2dfb44Sjmatthew 	"	move	%2,	%3\n"
1281b2dfb44Sjmatthew 	"	sc	%2,	%1\n"
1291b2dfb44Sjmatthew 	"	beqz	%2,	1b\n"
1301b2dfb44Sjmatthew 	"	nop\n"
1311b2dfb44Sjmatthew 	    : "=&r" (o), "+m" (*uip), "=&r" (t)
1321b2dfb44Sjmatthew 	    : "r" (v));
1331b2dfb44Sjmatthew 
1341b2dfb44Sjmatthew 	return (o);
1351b2dfb44Sjmatthew }
1361b2dfb44Sjmatthew #define atomic_swap_uint(_p, _v) _atomic_swap_uint((_p), (_v))
1371b2dfb44Sjmatthew 
1381b2dfb44Sjmatthew static inline unsigned long
_atomic_swap_ulong(volatile unsigned long * uip,unsigned long v)1391b2dfb44Sjmatthew _atomic_swap_ulong(volatile unsigned long *uip, unsigned long v)
1401b2dfb44Sjmatthew {
1411b2dfb44Sjmatthew 	unsigned long o, t;
1421b2dfb44Sjmatthew 
1431b2dfb44Sjmatthew 	__asm__ volatile (
1441b2dfb44Sjmatthew 	"1:	lld	%0,	%1\n"
1451b2dfb44Sjmatthew 	"	move	%2,	%3\n"
1461b2dfb44Sjmatthew 	"	scd	%2,	%1\n"
1471b2dfb44Sjmatthew 	"	beqz	%2,	1b\n"
1481b2dfb44Sjmatthew 	"	nop\n"
1491b2dfb44Sjmatthew 	    : "=&r" (o), "+m" (*uip), "=&r" (t)
1501b2dfb44Sjmatthew 	    : "r" (v));
1511b2dfb44Sjmatthew 
1521b2dfb44Sjmatthew 	return (o);
1531b2dfb44Sjmatthew }
1541b2dfb44Sjmatthew #define atomic_swap_ulong(_p, _v) _atomic_swap_ulong((_p), (_v))
1551b2dfb44Sjmatthew 
1561b2dfb44Sjmatthew 
1571b2dfb44Sjmatthew static inline void *
_atomic_swap_ptr(volatile void * uipp,void * n)158119922d2Sdlg _atomic_swap_ptr(volatile void *uipp, void *n)
1591b2dfb44Sjmatthew {
160119922d2Sdlg 	void * volatile *uip = uipp;
1611b2dfb44Sjmatthew 	void *o, *t;
1621b2dfb44Sjmatthew 
1631b2dfb44Sjmatthew 	__asm__ volatile (
1641b2dfb44Sjmatthew 	"1:	lld	%0,	%1\n"
1651b2dfb44Sjmatthew 	"	move	%2,	%3\n"
1661b2dfb44Sjmatthew 	"	scd	%2,	%1\n"
1671b2dfb44Sjmatthew 	"	beqz	%2,	1b\n"
1681b2dfb44Sjmatthew 	"	nop\n"
1691b2dfb44Sjmatthew 	    : "=&r" (o), "+m" (*uip), "=&r" (t)
1701b2dfb44Sjmatthew 	    : "r" (n));
1711b2dfb44Sjmatthew 
1721b2dfb44Sjmatthew 	return (o);
1731b2dfb44Sjmatthew }
1741b2dfb44Sjmatthew #define atomic_swap_ptr(_p, _n) _atomic_swap_ptr((_p), (_n))
1751b2dfb44Sjmatthew 
1761b2dfb44Sjmatthew static inline unsigned int
_atomic_add_int_nv(volatile unsigned int * uip,unsigned int v)1771b2dfb44Sjmatthew _atomic_add_int_nv(volatile unsigned int *uip, unsigned int v)
1781b2dfb44Sjmatthew {
1791b2dfb44Sjmatthew 	unsigned int rv, nv;
1801b2dfb44Sjmatthew 
1811b2dfb44Sjmatthew 	__asm__ volatile (
1821b2dfb44Sjmatthew 	"1:	ll	%0,	%1\n"
1831b2dfb44Sjmatthew 	"	addu	%2,	%0,	%3\n"
1841b2dfb44Sjmatthew 	"	sc	%2,	%1\n"
1851b2dfb44Sjmatthew 	"	beqz	%2,	1b\n"
1861b2dfb44Sjmatthew 	"	nop\n"
1871b2dfb44Sjmatthew 	    : "=&r" (rv), "+m" (*uip), "=&r" (nv)
1881b2dfb44Sjmatthew 	    : "Ir" (v));
1891b2dfb44Sjmatthew 
1901b2dfb44Sjmatthew 	return (rv + v);
1911b2dfb44Sjmatthew }
1921b2dfb44Sjmatthew #define atomic_add_int_nv(_uip, _v) _atomic_add_int_nv((_uip), (_v))
1931b2dfb44Sjmatthew #define atomic_sub_int_nv(_uip, _v) _atomic_add_int_nv((_uip), 0 - (_v))
1941b2dfb44Sjmatthew 
1951b2dfb44Sjmatthew static inline unsigned long
_atomic_add_long_nv(volatile unsigned long * uip,unsigned long v)1961b2dfb44Sjmatthew _atomic_add_long_nv(volatile unsigned long *uip, unsigned long v)
1971b2dfb44Sjmatthew {
1981b2dfb44Sjmatthew 	unsigned long rv, nv;
1991b2dfb44Sjmatthew 
2001b2dfb44Sjmatthew 	__asm__ volatile (
2011b2dfb44Sjmatthew 	"1:	lld	%0,	%1\n"
2021b2dfb44Sjmatthew 	"	daddu	%2,	%0,	%3\n"
2031b2dfb44Sjmatthew 	"	scd	%2,	%1\n"
2041b2dfb44Sjmatthew 	"	beqz	%2,	1b\n"
2051b2dfb44Sjmatthew 	"	nop\n"
2061b2dfb44Sjmatthew 	    : "=&r" (rv), "+m" (*uip), "=&r" (nv)
2071b2dfb44Sjmatthew 	    : "Ir" (v));
2081b2dfb44Sjmatthew 
2091b2dfb44Sjmatthew 	return (rv + v);
2101b2dfb44Sjmatthew }
2111b2dfb44Sjmatthew #define atomic_add_long_nv(_uip, _v) _atomic_add_long_nv((_uip), (_v))
212*97b48441Svisa #define atomic_sub_long_nv(_uip, _v) _atomic_add_long_nv((_uip), 0UL - (_v))
2131b2dfb44Sjmatthew 
2142fa72412Spirofti #endif /* _MIPS64_ATOMIC_H_ */
215