xref: /linux/arch/arm64/include/asm/kvm_ptrauth.h (revision 44f57d78)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* arch/arm64/include/asm/kvm_ptrauth.h: Guest/host ptrauth save/restore
3  * Copyright 2019 Arm Limited
4  * Authors: Mark Rutland <mark.rutland@arm.com>
5  *         Amit Daniel Kachhap <amit.kachhap@arm.com>
6  */
7 
8 #ifndef __ASM_KVM_PTRAUTH_H
9 #define __ASM_KVM_PTRAUTH_H
10 
11 #ifdef __ASSEMBLY__
12 
13 #include <asm/sysreg.h>
14 
15 #ifdef	CONFIG_ARM64_PTR_AUTH
16 
17 #define PTRAUTH_REG_OFFSET(x)	(x - CPU_APIAKEYLO_EL1)
18 
19 /*
20  * CPU_AP*_EL1 values exceed immediate offset range (512) for stp
21  * instruction so below macros takes CPU_APIAKEYLO_EL1 as base and
22  * calculates the offset of the keys from this base to avoid an extra add
23  * instruction. These macros assumes the keys offsets follow the order of
24  * the sysreg enum in kvm_host.h.
25  */
26 .macro	ptrauth_save_state base, reg1, reg2
27 	mrs_s	\reg1, SYS_APIAKEYLO_EL1
28 	mrs_s	\reg2, SYS_APIAKEYHI_EL1
29 	stp	\reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APIAKEYLO_EL1)]
30 	mrs_s	\reg1, SYS_APIBKEYLO_EL1
31 	mrs_s	\reg2, SYS_APIBKEYHI_EL1
32 	stp	\reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APIBKEYLO_EL1)]
33 	mrs_s	\reg1, SYS_APDAKEYLO_EL1
34 	mrs_s	\reg2, SYS_APDAKEYHI_EL1
35 	stp	\reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APDAKEYLO_EL1)]
36 	mrs_s	\reg1, SYS_APDBKEYLO_EL1
37 	mrs_s	\reg2, SYS_APDBKEYHI_EL1
38 	stp	\reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APDBKEYLO_EL1)]
39 	mrs_s	\reg1, SYS_APGAKEYLO_EL1
40 	mrs_s	\reg2, SYS_APGAKEYHI_EL1
41 	stp	\reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APGAKEYLO_EL1)]
42 .endm
43 
44 .macro	ptrauth_restore_state base, reg1, reg2
45 	ldp	\reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APIAKEYLO_EL1)]
46 	msr_s	SYS_APIAKEYLO_EL1, \reg1
47 	msr_s	SYS_APIAKEYHI_EL1, \reg2
48 	ldp	\reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APIBKEYLO_EL1)]
49 	msr_s	SYS_APIBKEYLO_EL1, \reg1
50 	msr_s	SYS_APIBKEYHI_EL1, \reg2
51 	ldp	\reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APDAKEYLO_EL1)]
52 	msr_s	SYS_APDAKEYLO_EL1, \reg1
53 	msr_s	SYS_APDAKEYHI_EL1, \reg2
54 	ldp	\reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APDBKEYLO_EL1)]
55 	msr_s	SYS_APDBKEYLO_EL1, \reg1
56 	msr_s	SYS_APDBKEYHI_EL1, \reg2
57 	ldp	\reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APGAKEYLO_EL1)]
58 	msr_s	SYS_APGAKEYLO_EL1, \reg1
59 	msr_s	SYS_APGAKEYHI_EL1, \reg2
60 .endm
61 
62 /*
63  * Both ptrauth_switch_to_guest and ptrauth_switch_to_host macros will
64  * check for the presence of one of the cpufeature flag
65  * ARM64_HAS_ADDRESS_AUTH_ARCH or ARM64_HAS_ADDRESS_AUTH_IMP_DEF and
66  * then proceed ahead with the save/restore of Pointer Authentication
67  * key registers.
68  */
69 .macro ptrauth_switch_to_guest g_ctxt, reg1, reg2, reg3
70 alternative_if ARM64_HAS_ADDRESS_AUTH_ARCH
71 	b	1000f
72 alternative_else_nop_endif
73 alternative_if_not ARM64_HAS_ADDRESS_AUTH_IMP_DEF
74 	b	1001f
75 alternative_else_nop_endif
76 1000:
77 	ldr	\reg1, [\g_ctxt, #(VCPU_HCR_EL2 - VCPU_CONTEXT)]
78 	and	\reg1, \reg1, #(HCR_API | HCR_APK)
79 	cbz	\reg1, 1001f
80 	add	\reg1, \g_ctxt, #CPU_APIAKEYLO_EL1
81 	ptrauth_restore_state	\reg1, \reg2, \reg3
82 1001:
83 .endm
84 
85 .macro ptrauth_switch_to_host g_ctxt, h_ctxt, reg1, reg2, reg3
86 alternative_if ARM64_HAS_ADDRESS_AUTH_ARCH
87 	b	2000f
88 alternative_else_nop_endif
89 alternative_if_not ARM64_HAS_ADDRESS_AUTH_IMP_DEF
90 	b	2001f
91 alternative_else_nop_endif
92 2000:
93 	ldr	\reg1, [\g_ctxt, #(VCPU_HCR_EL2 - VCPU_CONTEXT)]
94 	and	\reg1, \reg1, #(HCR_API | HCR_APK)
95 	cbz	\reg1, 2001f
96 	add	\reg1, \g_ctxt, #CPU_APIAKEYLO_EL1
97 	ptrauth_save_state	\reg1, \reg2, \reg3
98 	add	\reg1, \h_ctxt, #CPU_APIAKEYLO_EL1
99 	ptrauth_restore_state	\reg1, \reg2, \reg3
100 	isb
101 2001:
102 .endm
103 
104 #else /* !CONFIG_ARM64_PTR_AUTH */
105 .macro ptrauth_switch_to_guest g_ctxt, reg1, reg2, reg3
106 .endm
107 .macro ptrauth_switch_to_host g_ctxt, h_ctxt, reg1, reg2, reg3
108 .endm
109 #endif /* CONFIG_ARM64_PTR_AUTH */
110 #endif /* __ASSEMBLY__ */
111 #endif /* __ASM_KVM_PTRAUTH_H */
112