1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3 * Based on arch/arm/include/asm/traps.h
4 *
5 * Copyright (C) 2012 ARM Ltd.
6 */
7 #ifndef __ASM_TRAP_H
8 #define __ASM_TRAP_H
9
10 #include <linux/list.h>
11 #include <asm/esr.h>
12 #include <asm/ptrace.h>
13 #include <asm/sections.h>
14
15 #ifdef CONFIG_ARMV8_DEPRECATED
16 bool try_emulate_armv8_deprecated(struct pt_regs *regs, u32 insn);
17 #else
18 static inline bool
try_emulate_armv8_deprecated(struct pt_regs * regs,u32 insn)19 try_emulate_armv8_deprecated(struct pt_regs *regs, u32 insn)
20 {
21 return false;
22 }
23 #endif /* CONFIG_ARMV8_DEPRECATED */
24
25 void force_signal_inject(int signal, int code, unsigned long address, unsigned long err);
26 void arm64_notify_segfault(unsigned long addr);
27 void arm64_force_sig_fault(int signo, int code, unsigned long far, const char *str);
28 void arm64_force_sig_fault_pkey(unsigned long far, const char *str, int pkey);
29 void arm64_force_sig_mceerr(int code, unsigned long far, short lsb, const char *str);
30 void arm64_force_sig_ptrace_errno_trap(int errno, unsigned long far, const char *str);
31
32 int early_brk64(unsigned long addr, unsigned long esr, struct pt_regs *regs);
33
34 /*
35 * Move regs->pc to next instruction and do necessary setup before it
36 * is executed.
37 */
38 void arm64_skip_faulting_instruction(struct pt_regs *regs, unsigned long size);
39
__in_irqentry_text(unsigned long ptr)40 static inline int __in_irqentry_text(unsigned long ptr)
41 {
42 return ptr >= (unsigned long)&__irqentry_text_start &&
43 ptr < (unsigned long)&__irqentry_text_end;
44 }
45
in_entry_text(unsigned long ptr)46 static inline int in_entry_text(unsigned long ptr)
47 {
48 return ptr >= (unsigned long)&__entry_text_start &&
49 ptr < (unsigned long)&__entry_text_end;
50 }
51
52 /*
53 * CPUs with the RAS extensions have an Implementation-Defined-Syndrome bit
54 * to indicate whether this ESR has a RAS encoding. CPUs without this feature
55 * have a ISS-Valid bit in the same position.
56 * If this bit is set, we know its not a RAS SError.
57 * If its clear, we need to know if the CPU supports RAS. Uncategorized RAS
58 * errors share the same encoding as an all-zeros encoding from a CPU that
59 * doesn't support RAS.
60 */
arm64_is_ras_serror(unsigned long esr)61 static inline bool arm64_is_ras_serror(unsigned long esr)
62 {
63 WARN_ON(preemptible());
64
65 if (esr & ESR_ELx_IDS)
66 return false;
67
68 if (this_cpu_has_cap(ARM64_HAS_RAS_EXTN))
69 return true;
70 else
71 return false;
72 }
73
74 /*
75 * Return the AET bits from a RAS SError's ESR.
76 *
77 * It is implementation defined whether Uncategorized errors are containable.
78 * We treat them as Uncontainable.
79 * Non-RAS SError's are reported as Uncontained/Uncategorized.
80 */
arm64_ras_serror_get_severity(unsigned long esr)81 static inline unsigned long arm64_ras_serror_get_severity(unsigned long esr)
82 {
83 unsigned long aet = esr & ESR_ELx_AET;
84
85 if (!arm64_is_ras_serror(esr)) {
86 /* Not a RAS error, we can't interpret the ESR. */
87 return ESR_ELx_AET_UC;
88 }
89
90 /*
91 * AET is RES0 if 'the value returned in the DFSC field is not
92 * [ESR_ELx_FSC_SERROR]'
93 */
94 if ((esr & ESR_ELx_FSC) != ESR_ELx_FSC_SERROR) {
95 /* No severity information : Uncategorized */
96 return ESR_ELx_AET_UC;
97 }
98
99 return aet;
100 }
101
102 bool arm64_is_fatal_ras_serror(struct pt_regs *regs, unsigned long esr);
103 void __noreturn arm64_serror_panic(struct pt_regs *regs, unsigned long esr);
104
arm64_mops_reset_regs(struct user_pt_regs * regs,unsigned long esr)105 static inline void arm64_mops_reset_regs(struct user_pt_regs *regs, unsigned long esr)
106 {
107 bool wrong_option = esr & ESR_ELx_MOPS_ISS_WRONG_OPTION;
108 bool option_a = esr & ESR_ELx_MOPS_ISS_OPTION_A;
109 int dstreg = ESR_ELx_MOPS_ISS_DESTREG(esr);
110 int srcreg = ESR_ELx_MOPS_ISS_SRCREG(esr);
111 int sizereg = ESR_ELx_MOPS_ISS_SIZEREG(esr);
112 unsigned long dst, src, size;
113
114 dst = regs->regs[dstreg];
115 src = regs->regs[srcreg];
116 size = regs->regs[sizereg];
117
118 /*
119 * Put the registers back in the original format suitable for a
120 * prologue instruction, using the generic return routine from the
121 * Arm ARM (DDI 0487I.a) rules CNTMJ and MWFQH.
122 */
123 if (esr & ESR_ELx_MOPS_ISS_MEM_INST) {
124 /* SET* instruction */
125 if (option_a ^ wrong_option) {
126 /* Format is from Option A; forward set */
127 regs->regs[dstreg] = dst + size;
128 regs->regs[sizereg] = -size;
129 }
130 } else {
131 /* CPY* instruction */
132 if (!(option_a ^ wrong_option)) {
133 /* Format is from Option B */
134 if (regs->pstate & PSR_N_BIT) {
135 /* Backward copy */
136 regs->regs[dstreg] = dst - size;
137 regs->regs[srcreg] = src - size;
138 }
139 } else {
140 /* Format is from Option A */
141 if (size & BIT(63)) {
142 /* Forward copy */
143 regs->regs[dstreg] = dst + size;
144 regs->regs[srcreg] = src + size;
145 regs->regs[sizereg] = -size;
146 }
147 }
148 }
149
150 if (esr & ESR_ELx_MOPS_ISS_FROM_EPILOGUE)
151 regs->pc -= 8;
152 else
153 regs->pc -= 4;
154 }
155 #endif
156