xref: /linux/arch/x86/include/asm/cmpxchg_64.h (revision db10cb9b)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_CMPXCHG_64_H
3 #define _ASM_X86_CMPXCHG_64_H
4 
5 #define arch_cmpxchg64(ptr, o, n)					\
6 ({									\
7 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
8 	arch_cmpxchg((ptr), (o), (n));					\
9 })
10 
11 #define arch_cmpxchg64_local(ptr, o, n)					\
12 ({									\
13 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
14 	arch_cmpxchg_local((ptr), (o), (n));				\
15 })
16 
17 #define arch_try_cmpxchg64(ptr, po, n)					\
18 ({									\
19 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
20 	arch_try_cmpxchg((ptr), (po), (n));				\
21 })
22 
23 union __u128_halves {
24 	u128 full;
25 	struct {
26 		u64 low, high;
27 	};
28 };
29 
30 #define __arch_cmpxchg128(_ptr, _old, _new, _lock)			\
31 ({									\
32 	union __u128_halves o = { .full = (_old), },			\
33 			    n = { .full = (_new), };			\
34 									\
35 	asm volatile(_lock "cmpxchg16b %[ptr]"				\
36 		     : [ptr] "+m" (*(_ptr)),				\
37 		       "+a" (o.low), "+d" (o.high)			\
38 		     : "b" (n.low), "c" (n.high)			\
39 		     : "memory");					\
40 									\
41 	o.full;								\
42 })
43 
44 static __always_inline u128 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new)
45 {
46 	return __arch_cmpxchg128(ptr, old, new, LOCK_PREFIX);
47 }
48 #define arch_cmpxchg128 arch_cmpxchg128
49 
50 static __always_inline u128 arch_cmpxchg128_local(volatile u128 *ptr, u128 old, u128 new)
51 {
52 	return __arch_cmpxchg128(ptr, old, new,);
53 }
54 #define arch_cmpxchg128_local arch_cmpxchg128_local
55 
56 #define __arch_try_cmpxchg128(_ptr, _oldp, _new, _lock)			\
57 ({									\
58 	union __u128_halves o = { .full = *(_oldp), },			\
59 			    n = { .full = (_new), };			\
60 	bool ret;							\
61 									\
62 	asm volatile(_lock "cmpxchg16b %[ptr]"				\
63 		     CC_SET(e)						\
64 		     : CC_OUT(e) (ret),					\
65 		       [ptr] "+m" (*ptr),				\
66 		       "+a" (o.low), "+d" (o.high)			\
67 		     : "b" (n.low), "c" (n.high)			\
68 		     : "memory");					\
69 									\
70 	if (unlikely(!ret))						\
71 		*(_oldp) = o.full;					\
72 									\
73 	likely(ret);							\
74 })
75 
76 static __always_inline bool arch_try_cmpxchg128(volatile u128 *ptr, u128 *oldp, u128 new)
77 {
78 	return __arch_try_cmpxchg128(ptr, oldp, new, LOCK_PREFIX);
79 }
80 #define arch_try_cmpxchg128 arch_try_cmpxchg128
81 
82 static __always_inline bool arch_try_cmpxchg128_local(volatile u128 *ptr, u128 *oldp, u128 new)
83 {
84 	return __arch_try_cmpxchg128(ptr, oldp, new,);
85 }
86 #define arch_try_cmpxchg128_local arch_try_cmpxchg128_local
87 
88 #define system_has_cmpxchg128()		boot_cpu_has(X86_FEATURE_CX16)
89 
90 #endif /* _ASM_X86_CMPXCHG_64_H */
91