xref: /linux/arch/s390/include/asm/stacktrace.h (revision 2da68a77)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_S390_STACKTRACE_H
3 #define _ASM_S390_STACKTRACE_H
4 
5 #include <linux/uaccess.h>
6 #include <linux/ptrace.h>
7 #include <asm/switch_to.h>
8 
9 enum stack_type {
10 	STACK_TYPE_UNKNOWN,
11 	STACK_TYPE_TASK,
12 	STACK_TYPE_IRQ,
13 	STACK_TYPE_NODAT,
14 	STACK_TYPE_RESTART,
15 	STACK_TYPE_MCCK,
16 };
17 
18 struct stack_info {
19 	enum stack_type type;
20 	unsigned long begin, end;
21 };
22 
23 const char *stack_type_name(enum stack_type type);
24 int get_stack_info(unsigned long sp, struct task_struct *task,
25 		   struct stack_info *info, unsigned long *visit_mask);
26 
27 static inline bool on_stack(struct stack_info *info,
28 			    unsigned long addr, size_t len)
29 {
30 	if (info->type == STACK_TYPE_UNKNOWN)
31 		return false;
32 	if (addr + len < addr)
33 		return false;
34 	return addr >= info->begin && addr + len <= info->end;
35 }
36 
37 /*
38  * Stack layout of a C stack frame.
39  * Kernel uses the packed stack layout (-mpacked-stack).
40  */
41 struct stack_frame {
42 	union {
43 		unsigned long empty[9];
44 		struct {
45 			unsigned long sie_control_block;
46 			unsigned long sie_savearea;
47 			unsigned long sie_reason;
48 			unsigned long sie_flags;
49 		};
50 	};
51 	unsigned long gprs[10];
52 	unsigned long back_chain;
53 };
54 
55 /*
56  * Unlike current_stack_pointer which simply contains the current value of %r15
57  * current_frame_address() returns function stack frame address, which matches
58  * %r15 upon function invocation. It may differ from %r15 later if function
59  * allocates stack for local variables or new stack frame to call other
60  * functions.
61  */
62 #define current_frame_address()						\
63 	((unsigned long)__builtin_frame_address(0) -			\
64 	 offsetof(struct stack_frame, back_chain))
65 
66 static __always_inline unsigned long get_stack_pointer(struct task_struct *task,
67 						       struct pt_regs *regs)
68 {
69 	if (regs)
70 		return (unsigned long)kernel_stack_pointer(regs);
71 	if (task == current)
72 		return current_frame_address();
73 	return (unsigned long)task->thread.ksp;
74 }
75 
76 /*
77  * To keep this simple mark register 2-6 as being changed (volatile)
78  * by the called function, even though register 6 is saved/nonvolatile.
79  */
80 #define CALL_FMT_0 "=&d" (r2)
81 #define CALL_FMT_1 "+&d" (r2)
82 #define CALL_FMT_2 CALL_FMT_1, "+&d" (r3)
83 #define CALL_FMT_3 CALL_FMT_2, "+&d" (r4)
84 #define CALL_FMT_4 CALL_FMT_3, "+&d" (r5)
85 #define CALL_FMT_5 CALL_FMT_4, "+&d" (r6)
86 
87 #define CALL_CLOBBER_5 "0", "1", "14", "cc", "memory"
88 #define CALL_CLOBBER_4 CALL_CLOBBER_5
89 #define CALL_CLOBBER_3 CALL_CLOBBER_4, "5"
90 #define CALL_CLOBBER_2 CALL_CLOBBER_3, "4"
91 #define CALL_CLOBBER_1 CALL_CLOBBER_2, "3"
92 #define CALL_CLOBBER_0 CALL_CLOBBER_1
93 
94 #define CALL_LARGS_0(...)						\
95 	long dummy = 0
96 #define CALL_LARGS_1(t1, a1)						\
97 	long arg1  = (long)(t1)(a1)
98 #define CALL_LARGS_2(t1, a1, t2, a2)					\
99 	CALL_LARGS_1(t1, a1);						\
100 	long arg2 = (long)(t2)(a2)
101 #define CALL_LARGS_3(t1, a1, t2, a2, t3, a3)				\
102 	CALL_LARGS_2(t1, a1, t2, a2);					\
103 	long arg3 = (long)(t3)(a3)
104 #define CALL_LARGS_4(t1, a1, t2, a2, t3, a3, t4, a4)			\
105 	CALL_LARGS_3(t1, a1, t2, a2, t3, a3);				\
106 	long arg4  = (long)(t4)(a4)
107 #define CALL_LARGS_5(t1, a1, t2, a2, t3, a3, t4, a4, t5, a5)		\
108 	CALL_LARGS_4(t1, a1, t2, a2, t3, a3, t4, a4);			\
109 	long arg5 = (long)(t5)(a5)
110 
111 #define CALL_REGS_0							\
112 	register long r2 asm("2") = dummy
113 #define CALL_REGS_1							\
114 	register long r2 asm("2") = arg1
115 #define CALL_REGS_2							\
116 	CALL_REGS_1;							\
117 	register long r3 asm("3") = arg2
118 #define CALL_REGS_3							\
119 	CALL_REGS_2;							\
120 	register long r4 asm("4") = arg3
121 #define CALL_REGS_4							\
122 	CALL_REGS_3;							\
123 	register long r5 asm("5") = arg4
124 #define CALL_REGS_5							\
125 	CALL_REGS_4;							\
126 	register long r6 asm("6") = arg5
127 
128 #define CALL_TYPECHECK_0(...)
129 #define CALL_TYPECHECK_1(t, a, ...)					\
130 	typecheck(t, a)
131 #define CALL_TYPECHECK_2(t, a, ...)					\
132 	CALL_TYPECHECK_1(__VA_ARGS__);					\
133 	typecheck(t, a)
134 #define CALL_TYPECHECK_3(t, a, ...)					\
135 	CALL_TYPECHECK_2(__VA_ARGS__);					\
136 	typecheck(t, a)
137 #define CALL_TYPECHECK_4(t, a, ...)					\
138 	CALL_TYPECHECK_3(__VA_ARGS__);					\
139 	typecheck(t, a)
140 #define CALL_TYPECHECK_5(t, a, ...)					\
141 	CALL_TYPECHECK_4(__VA_ARGS__);					\
142 	typecheck(t, a)
143 
144 #define CALL_PARM_0(...) void
145 #define CALL_PARM_1(t, a, ...) t
146 #define CALL_PARM_2(t, a, ...) t, CALL_PARM_1(__VA_ARGS__)
147 #define CALL_PARM_3(t, a, ...) t, CALL_PARM_2(__VA_ARGS__)
148 #define CALL_PARM_4(t, a, ...) t, CALL_PARM_3(__VA_ARGS__)
149 #define CALL_PARM_5(t, a, ...) t, CALL_PARM_4(__VA_ARGS__)
150 #define CALL_PARM_6(t, a, ...) t, CALL_PARM_5(__VA_ARGS__)
151 
152 /*
153  * Use call_on_stack() to call a function switching to a specified
154  * stack. Proper sign and zero extension of function arguments is
155  * done. Usage:
156  *
157  * rc = call_on_stack(nr, stack, rettype, fn, t1, a1, t2, a2, ...)
158  *
159  * - nr specifies the number of function arguments of fn.
160  * - stack specifies the stack to be used.
161  * - fn is the function to be called.
162  * - rettype is the return type of fn.
163  * - t1, a1, ... are pairs, where t1 must match the type of the first
164  *   argument of fn, t2 the second, etc. a1 is the corresponding
165  *   first function argument (not name), etc.
166  */
167 #define call_on_stack(nr, stack, rettype, fn, ...)			\
168 ({									\
169 	rettype (*__fn)(CALL_PARM_##nr(__VA_ARGS__)) = fn;		\
170 	unsigned long frame = current_frame_address();			\
171 	unsigned long __stack = stack;					\
172 	unsigned long prev;						\
173 	CALL_LARGS_##nr(__VA_ARGS__);					\
174 	CALL_REGS_##nr;							\
175 									\
176 	CALL_TYPECHECK_##nr(__VA_ARGS__);				\
177 	asm volatile(							\
178 		"	lgr	%[_prev],15\n"				\
179 		"	lg	15,%[_stack]\n"				\
180 		"	stg	%[_frame],%[_bc](15)\n"			\
181 		"	brasl	14,%[_fn]\n"				\
182 		"	lgr	15,%[_prev]\n"				\
183 		: [_prev] "=&d" (prev), CALL_FMT_##nr			\
184 		: [_stack] "R" (__stack),				\
185 		  [_bc] "i" (offsetof(struct stack_frame, back_chain)),	\
186 		  [_frame] "d" (frame),					\
187 		  [_fn] "X" (__fn) : CALL_CLOBBER_##nr);		\
188 	(rettype)r2;							\
189 })
190 
191 #define call_on_stack_noreturn(fn, stack)				\
192 ({									\
193 	void (*__fn)(void) = fn;					\
194 									\
195 	asm volatile(							\
196 		"	la	15,0(%[_stack])\n"			\
197 		"	xc	%[_bc](8,15),%[_bc](15)\n"		\
198 		"	brasl	14,%[_fn]\n"				\
199 		::[_bc] "i" (offsetof(struct stack_frame, back_chain)),	\
200 		  [_stack] "a" (stack), [_fn] "X" (__fn));		\
201 	BUG();								\
202 })
203 
204 #endif /* _ASM_S390_STACKTRACE_H */
205