1 /* SPDX-License-Identifier: LGPL-2.1 OR MIT */
2 /*
3  * rseq-arm64.h
4  *
5  * (C) Copyright 2016-2022 - Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6  * (C) Copyright 2018 - Will Deacon <will.deacon@arm.com>
7  */
8 
9 /*
10  * aarch64 -mbig-endian generates mixed endianness code vs data:
11  * little-endian code and big-endian data. Ensure the RSEQ_SIG signature
12  * matches code endianness.
13  */
14 #define RSEQ_SIG_CODE	0xd428bc00	/* BRK #0x45E0.  */
15 
16 #ifdef __AARCH64EB__
17 #define RSEQ_SIG_DATA	0x00bc28d4	/* BRK #0x45E0.  */
18 #else
19 #define RSEQ_SIG_DATA	RSEQ_SIG_CODE
20 #endif
21 
22 #define RSEQ_SIG	RSEQ_SIG_DATA
23 
24 #define rseq_smp_mb()	__asm__ __volatile__ ("dmb ish" ::: "memory")
25 #define rseq_smp_rmb()	__asm__ __volatile__ ("dmb ishld" ::: "memory")
26 #define rseq_smp_wmb()	__asm__ __volatile__ ("dmb ishst" ::: "memory")
27 
28 #define rseq_smp_load_acquire(p)						\
29 __extension__ ({								\
30 	union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u; \
31 	switch (sizeof(*(p))) {							\
32 	case 1:									\
33 		__asm__ __volatile__ ("ldarb %w0, %1"				\
34 			: "=r" (*(__u8 *)__u.__c)				\
35 			: "Q" (*(p)) : "memory");				\
36 		break;								\
37 	case 2:									\
38 		__asm__ __volatile__ ("ldarh %w0, %1"				\
39 			: "=r" (*(__u16 *)__u.__c)				\
40 			: "Q" (*(p)) : "memory");				\
41 		break;								\
42 	case 4:									\
43 		__asm__ __volatile__ ("ldar %w0, %1"				\
44 			: "=r" (*(__u32 *)__u.__c)				\
45 			: "Q" (*(p)) : "memory");				\
46 		break;								\
47 	case 8:									\
48 		__asm__ __volatile__ ("ldar %0, %1"				\
49 			: "=r" (*(__u64 *)__u.__c)				\
50 			: "Q" (*(p)) : "memory");				\
51 		break;								\
52 	}									\
53 	(rseq_unqual_scalar_typeof(*(p)))__u.__val;				\
54 })
55 
56 #define rseq_smp_acquire__after_ctrl_dep()	rseq_smp_rmb()
57 
58 #define rseq_smp_store_release(p, v)						\
59 do {										\
60 	union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u = \
61 		{ .__val = (rseq_unqual_scalar_typeof(*(p))) (v) };		\
62 	switch (sizeof(*(p))) {							\
63 	case 1:									\
64 		__asm__ __volatile__ ("stlrb %w1, %0"				\
65 				: "=Q" (*(p))					\
66 				: "r" (*(__u8 *)__u.__c)			\
67 				: "memory");					\
68 		break;								\
69 	case 2:									\
70 		__asm__ __volatile__ ("stlrh %w1, %0"				\
71 				: "=Q" (*(p))					\
72 				: "r" (*(__u16 *)__u.__c)			\
73 				: "memory");					\
74 		break;								\
75 	case 4:									\
76 		__asm__ __volatile__ ("stlr %w1, %0"				\
77 				: "=Q" (*(p))					\
78 				: "r" (*(__u32 *)__u.__c)			\
79 				: "memory");					\
80 		break;								\
81 	case 8:									\
82 		__asm__ __volatile__ ("stlr %1, %0"				\
83 				: "=Q" (*(p))					\
84 				: "r" (*(__u64 *)__u.__c)			\
85 				: "memory");					\
86 		break;								\
87 	}									\
88 } while (0)
89 
90 #define RSEQ_ASM_TMP_REG32	"w15"
91 #define RSEQ_ASM_TMP_REG	"x15"
92 #define RSEQ_ASM_TMP_REG_2	"x14"
93 
94 #define __RSEQ_ASM_DEFINE_TABLE(label, version, flags, start_ip,		\
95 				post_commit_offset, abort_ip)			\
96 	"	.pushsection	__rseq_cs, \"aw\"\n"				\
97 	"	.balign	32\n"							\
98 	__rseq_str(label) ":\n"							\
99 	"	.long	" __rseq_str(version) ", " __rseq_str(flags) "\n"	\
100 	"	.quad	" __rseq_str(start_ip) ", "				\
101 			  __rseq_str(post_commit_offset) ", "			\
102 			  __rseq_str(abort_ip) "\n"				\
103 	"	.popsection\n\t"						\
104 	"	.pushsection __rseq_cs_ptr_array, \"aw\"\n"				\
105 	"	.quad " __rseq_str(label) "b\n"					\
106 	"	.popsection\n"
107 
108 #define RSEQ_ASM_DEFINE_TABLE(label, start_ip, post_commit_ip, abort_ip)	\
109 	__RSEQ_ASM_DEFINE_TABLE(label, 0x0, 0x0, start_ip,			\
110 				(post_commit_ip - start_ip), abort_ip)
111 
112 /*
113  * Exit points of a rseq critical section consist of all instructions outside
114  * of the critical section where a critical section can either branch to or
115  * reach through the normal course of its execution. The abort IP and the
116  * post-commit IP are already part of the __rseq_cs section and should not be
117  * explicitly defined as additional exit points. Knowing all exit points is
118  * useful to assist debuggers stepping over the critical section.
119  */
120 #define RSEQ_ASM_DEFINE_EXIT_POINT(start_ip, exit_ip)				\
121 	"	.pushsection __rseq_exit_point_array, \"aw\"\n"			\
122 	"	.quad " __rseq_str(start_ip) ", " __rseq_str(exit_ip) "\n"	\
123 	"	.popsection\n"
124 
125 #define RSEQ_ASM_STORE_RSEQ_CS(label, cs_label, rseq_cs)			\
126 	RSEQ_INJECT_ASM(1)							\
127 	"	adrp	" RSEQ_ASM_TMP_REG ", " __rseq_str(cs_label) "\n"	\
128 	"	add	" RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG		\
129 			", :lo12:" __rseq_str(cs_label) "\n"			\
130 	"	str	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(rseq_cs) "]\n"	\
131 	__rseq_str(label) ":\n"
132 
133 #define RSEQ_ASM_DEFINE_ABORT(label, abort_label)				\
134 	"	b	222f\n"							\
135 	"	.inst 	"	__rseq_str(RSEQ_SIG_CODE) "\n"			\
136 	__rseq_str(label) ":\n"							\
137 	"	b	%l[" __rseq_str(abort_label) "]\n"			\
138 	"222:\n"
139 
140 #define RSEQ_ASM_OP_STORE(value, var)						\
141 	"	str	%[" __rseq_str(value) "], %[" __rseq_str(var) "]\n"
142 
143 #define RSEQ_ASM_OP_STORE_RELEASE(value, var)					\
144 	"	stlr	%[" __rseq_str(value) "], %[" __rseq_str(var) "]\n"
145 
146 #define RSEQ_ASM_OP_FINAL_STORE(value, var, post_commit_label)			\
147 	RSEQ_ASM_OP_STORE(value, var)						\
148 	__rseq_str(post_commit_label) ":\n"
149 
150 #define RSEQ_ASM_OP_FINAL_STORE_RELEASE(value, var, post_commit_label)		\
151 	RSEQ_ASM_OP_STORE_RELEASE(value, var)					\
152 	__rseq_str(post_commit_label) ":\n"
153 
154 #define RSEQ_ASM_OP_CMPEQ(var, expect, label)					\
155 	"	ldr	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"		\
156 	"	sub	" RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG		\
157 			", %[" __rseq_str(expect) "]\n"				\
158 	"	cbnz	" RSEQ_ASM_TMP_REG ", " __rseq_str(label) "\n"
159 
160 #define RSEQ_ASM_OP_CMPEQ32(var, expect, label)					\
161 	"	ldr	" RSEQ_ASM_TMP_REG32 ", %[" __rseq_str(var) "]\n"	\
162 	"	sub	" RSEQ_ASM_TMP_REG32 ", " RSEQ_ASM_TMP_REG32		\
163 			", %w[" __rseq_str(expect) "]\n"			\
164 	"	cbnz	" RSEQ_ASM_TMP_REG32 ", " __rseq_str(label) "\n"
165 
166 #define RSEQ_ASM_OP_CMPNE(var, expect, label)					\
167 	"	ldr	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"		\
168 	"	sub	" RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG		\
169 			", %[" __rseq_str(expect) "]\n"				\
170 	"	cbz	" RSEQ_ASM_TMP_REG ", " __rseq_str(label) "\n"
171 
172 #define RSEQ_ASM_CMP_CPU_ID(cpu_id, current_cpu_id, label)			\
173 	RSEQ_INJECT_ASM(2)							\
174 	RSEQ_ASM_OP_CMPEQ32(current_cpu_id, cpu_id, label)
175 
176 #define RSEQ_ASM_OP_R_LOAD(var)							\
177 	"	ldr	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"
178 
179 #define RSEQ_ASM_OP_R_STORE(var)						\
180 	"	str	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"
181 
182 #define RSEQ_ASM_OP_R_LOAD_OFF(offset)						\
183 	"	ldr	" RSEQ_ASM_TMP_REG ", [" RSEQ_ASM_TMP_REG		\
184 			", %[" __rseq_str(offset) "]]\n"
185 
186 #define RSEQ_ASM_OP_R_ADD(count)						\
187 	"	add	" RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG		\
188 			", %[" __rseq_str(count) "]\n"
189 
190 #define RSEQ_ASM_OP_R_FINAL_STORE(var, post_commit_label)			\
191 	"	str	" RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"		\
192 	__rseq_str(post_commit_label) ":\n"
193 
194 #define RSEQ_ASM_OP_R_BAD_MEMCPY(dst, src, len)					\
195 	"	cbz	%[" __rseq_str(len) "], 333f\n"				\
196 	"	mov	" RSEQ_ASM_TMP_REG_2 ", %[" __rseq_str(len) "]\n"	\
197 	"222:	sub	" RSEQ_ASM_TMP_REG_2 ", " RSEQ_ASM_TMP_REG_2 ", #1\n"	\
198 	"	ldrb	" RSEQ_ASM_TMP_REG32 ", [%[" __rseq_str(src) "]"	\
199 			", " RSEQ_ASM_TMP_REG_2 "]\n"				\
200 	"	strb	" RSEQ_ASM_TMP_REG32 ", [%[" __rseq_str(dst) "]"	\
201 			", " RSEQ_ASM_TMP_REG_2 "]\n"				\
202 	"	cbnz	" RSEQ_ASM_TMP_REG_2 ", 222b\n"				\
203 	"333:\n"
204 
205 /* Per-cpu-id indexing. */
206 
207 #define RSEQ_TEMPLATE_CPU_ID
208 #define RSEQ_TEMPLATE_MO_RELAXED
209 #include "rseq-arm64-bits.h"
210 #undef RSEQ_TEMPLATE_MO_RELAXED
211 
212 #define RSEQ_TEMPLATE_MO_RELEASE
213 #include "rseq-arm64-bits.h"
214 #undef RSEQ_TEMPLATE_MO_RELEASE
215 #undef RSEQ_TEMPLATE_CPU_ID
216 
217 /* Per-mm-cid indexing. */
218 
219 #define RSEQ_TEMPLATE_MM_CID
220 #define RSEQ_TEMPLATE_MO_RELAXED
221 #include "rseq-arm64-bits.h"
222 #undef RSEQ_TEMPLATE_MO_RELAXED
223 
224 #define RSEQ_TEMPLATE_MO_RELEASE
225 #include "rseq-arm64-bits.h"
226 #undef RSEQ_TEMPLATE_MO_RELEASE
227 #undef RSEQ_TEMPLATE_MM_CID
228 
229 /* APIs which are not based on cpu ids. */
230 
231 #define RSEQ_TEMPLATE_CPU_ID_NONE
232 #define RSEQ_TEMPLATE_MO_RELAXED
233 #include "rseq-arm64-bits.h"
234 #undef RSEQ_TEMPLATE_MO_RELAXED
235 #undef RSEQ_TEMPLATE_CPU_ID_NONE
236