xref: /linux/tools/objtool/check.c (revision 9a6b55ac)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4  */
5 
6 #include <string.h>
7 #include <stdlib.h>
8 
9 #include "builtin.h"
10 #include "check.h"
11 #include "elf.h"
12 #include "special.h"
13 #include "arch.h"
14 #include "warn.h"
15 
16 #include <linux/hashtable.h>
17 #include <linux/kernel.h>
18 
19 #define FAKE_JUMP_OFFSET -1
20 
21 #define C_JUMP_TABLE_SECTION ".rodata..c_jump_table"
22 
23 struct alternative {
24 	struct list_head list;
25 	struct instruction *insn;
26 	bool skip_orig;
27 };
28 
29 const char *objname;
30 struct cfi_state initial_func_cfi;
31 
32 struct instruction *find_insn(struct objtool_file *file,
33 			      struct section *sec, unsigned long offset)
34 {
35 	struct instruction *insn;
36 
37 	hash_for_each_possible(file->insn_hash, insn, hash, offset)
38 		if (insn->sec == sec && insn->offset == offset)
39 			return insn;
40 
41 	return NULL;
42 }
43 
44 static struct instruction *next_insn_same_sec(struct objtool_file *file,
45 					      struct instruction *insn)
46 {
47 	struct instruction *next = list_next_entry(insn, list);
48 
49 	if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
50 		return NULL;
51 
52 	return next;
53 }
54 
55 static struct instruction *next_insn_same_func(struct objtool_file *file,
56 					       struct instruction *insn)
57 {
58 	struct instruction *next = list_next_entry(insn, list);
59 	struct symbol *func = insn->func;
60 
61 	if (!func)
62 		return NULL;
63 
64 	if (&next->list != &file->insn_list && next->func == func)
65 		return next;
66 
67 	/* Check if we're already in the subfunction: */
68 	if (func == func->cfunc)
69 		return NULL;
70 
71 	/* Move to the subfunction: */
72 	return find_insn(file, func->cfunc->sec, func->cfunc->offset);
73 }
74 
75 #define func_for_each_insn_all(file, func, insn)			\
76 	for (insn = find_insn(file, func->sec, func->offset);		\
77 	     insn;							\
78 	     insn = next_insn_same_func(file, insn))
79 
80 #define func_for_each_insn(file, func, insn)				\
81 	for (insn = find_insn(file, func->sec, func->offset);		\
82 	     insn && &insn->list != &file->insn_list &&			\
83 		insn->sec == func->sec &&				\
84 		insn->offset < func->offset + func->len;		\
85 	     insn = list_next_entry(insn, list))
86 
87 #define func_for_each_insn_continue_reverse(file, func, insn)		\
88 	for (insn = list_prev_entry(insn, list);			\
89 	     &insn->list != &file->insn_list &&				\
90 		insn->sec == func->sec && insn->offset >= func->offset;	\
91 	     insn = list_prev_entry(insn, list))
92 
93 #define sec_for_each_insn_from(file, insn)				\
94 	for (; insn; insn = next_insn_same_sec(file, insn))
95 
96 #define sec_for_each_insn_continue(file, insn)				\
97 	for (insn = next_insn_same_sec(file, insn); insn;		\
98 	     insn = next_insn_same_sec(file, insn))
99 
100 static bool is_sibling_call(struct instruction *insn)
101 {
102 	/* An indirect jump is either a sibling call or a jump to a table. */
103 	if (insn->type == INSN_JUMP_DYNAMIC)
104 		return list_empty(&insn->alts);
105 
106 	if (insn->type != INSN_JUMP_CONDITIONAL &&
107 	    insn->type != INSN_JUMP_UNCONDITIONAL)
108 		return false;
109 
110 	/* add_jump_destinations() sets insn->call_dest for sibling calls. */
111 	return !!insn->call_dest;
112 }
113 
114 /*
115  * This checks to see if the given function is a "noreturn" function.
116  *
117  * For global functions which are outside the scope of this object file, we
118  * have to keep a manual list of them.
119  *
120  * For local functions, we have to detect them manually by simply looking for
121  * the lack of a return instruction.
122  */
123 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
124 				int recursion)
125 {
126 	int i;
127 	struct instruction *insn;
128 	bool empty = true;
129 
130 	/*
131 	 * Unfortunately these have to be hard coded because the noreturn
132 	 * attribute isn't provided in ELF data.
133 	 */
134 	static const char * const global_noreturns[] = {
135 		"__stack_chk_fail",
136 		"panic",
137 		"do_exit",
138 		"do_task_dead",
139 		"__module_put_and_exit",
140 		"complete_and_exit",
141 		"__reiserfs_panic",
142 		"lbug_with_loc",
143 		"fortify_panic",
144 		"usercopy_abort",
145 		"machine_real_restart",
146 		"rewind_stack_do_exit",
147 		"kunit_try_catch_throw",
148 	};
149 
150 	if (!func)
151 		return false;
152 
153 	if (func->bind == STB_WEAK)
154 		return false;
155 
156 	if (func->bind == STB_GLOBAL)
157 		for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
158 			if (!strcmp(func->name, global_noreturns[i]))
159 				return true;
160 
161 	if (!func->len)
162 		return false;
163 
164 	insn = find_insn(file, func->sec, func->offset);
165 	if (!insn->func)
166 		return false;
167 
168 	func_for_each_insn_all(file, func, insn) {
169 		empty = false;
170 
171 		if (insn->type == INSN_RETURN)
172 			return false;
173 	}
174 
175 	if (empty)
176 		return false;
177 
178 	/*
179 	 * A function can have a sibling call instead of a return.  In that
180 	 * case, the function's dead-end status depends on whether the target
181 	 * of the sibling call returns.
182 	 */
183 	func_for_each_insn_all(file, func, insn) {
184 		if (is_sibling_call(insn)) {
185 			struct instruction *dest = insn->jump_dest;
186 
187 			if (!dest)
188 				/* sibling call to another file */
189 				return false;
190 
191 			/* local sibling call */
192 			if (recursion == 5) {
193 				/*
194 				 * Infinite recursion: two functions have
195 				 * sibling calls to each other.  This is a very
196 				 * rare case.  It means they aren't dead ends.
197 				 */
198 				return false;
199 			}
200 
201 			return __dead_end_function(file, dest->func, recursion+1);
202 		}
203 	}
204 
205 	return true;
206 }
207 
208 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
209 {
210 	return __dead_end_function(file, func, 0);
211 }
212 
213 static void clear_insn_state(struct insn_state *state)
214 {
215 	int i;
216 
217 	memset(state, 0, sizeof(*state));
218 	state->cfa.base = CFI_UNDEFINED;
219 	for (i = 0; i < CFI_NUM_REGS; i++) {
220 		state->regs[i].base = CFI_UNDEFINED;
221 		state->vals[i].base = CFI_UNDEFINED;
222 	}
223 	state->drap_reg = CFI_UNDEFINED;
224 	state->drap_offset = -1;
225 }
226 
227 /*
228  * Call the arch-specific instruction decoder for all the instructions and add
229  * them to the global instruction list.
230  */
231 static int decode_instructions(struct objtool_file *file)
232 {
233 	struct section *sec;
234 	struct symbol *func;
235 	unsigned long offset;
236 	struct instruction *insn;
237 	int ret;
238 
239 	for_each_sec(file, sec) {
240 
241 		if (!(sec->sh.sh_flags & SHF_EXECINSTR))
242 			continue;
243 
244 		if (strcmp(sec->name, ".altinstr_replacement") &&
245 		    strcmp(sec->name, ".altinstr_aux") &&
246 		    strncmp(sec->name, ".discard.", 9))
247 			sec->text = true;
248 
249 		for (offset = 0; offset < sec->len; offset += insn->len) {
250 			insn = malloc(sizeof(*insn));
251 			if (!insn) {
252 				WARN("malloc failed");
253 				return -1;
254 			}
255 			memset(insn, 0, sizeof(*insn));
256 			INIT_LIST_HEAD(&insn->alts);
257 			clear_insn_state(&insn->state);
258 
259 			insn->sec = sec;
260 			insn->offset = offset;
261 
262 			ret = arch_decode_instruction(file->elf, sec, offset,
263 						      sec->len - offset,
264 						      &insn->len, &insn->type,
265 						      &insn->immediate,
266 						      &insn->stack_op);
267 			if (ret)
268 				goto err;
269 
270 			hash_add(file->insn_hash, &insn->hash, insn->offset);
271 			list_add_tail(&insn->list, &file->insn_list);
272 		}
273 
274 		list_for_each_entry(func, &sec->symbol_list, list) {
275 			if (func->type != STT_FUNC || func->alias != func)
276 				continue;
277 
278 			if (!find_insn(file, sec, func->offset)) {
279 				WARN("%s(): can't find starting instruction",
280 				     func->name);
281 				return -1;
282 			}
283 
284 			func_for_each_insn(file, func, insn)
285 				insn->func = func;
286 		}
287 	}
288 
289 	return 0;
290 
291 err:
292 	free(insn);
293 	return ret;
294 }
295 
296 /*
297  * Mark "ud2" instructions and manually annotated dead ends.
298  */
299 static int add_dead_ends(struct objtool_file *file)
300 {
301 	struct section *sec;
302 	struct rela *rela;
303 	struct instruction *insn;
304 	bool found;
305 
306 	/*
307 	 * By default, "ud2" is a dead end unless otherwise annotated, because
308 	 * GCC 7 inserts it for certain divide-by-zero cases.
309 	 */
310 	for_each_insn(file, insn)
311 		if (insn->type == INSN_BUG)
312 			insn->dead_end = true;
313 
314 	/*
315 	 * Check for manually annotated dead ends.
316 	 */
317 	sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
318 	if (!sec)
319 		goto reachable;
320 
321 	list_for_each_entry(rela, &sec->rela_list, list) {
322 		if (rela->sym->type != STT_SECTION) {
323 			WARN("unexpected relocation symbol type in %s", sec->name);
324 			return -1;
325 		}
326 		insn = find_insn(file, rela->sym->sec, rela->addend);
327 		if (insn)
328 			insn = list_prev_entry(insn, list);
329 		else if (rela->addend == rela->sym->sec->len) {
330 			found = false;
331 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
332 				if (insn->sec == rela->sym->sec) {
333 					found = true;
334 					break;
335 				}
336 			}
337 
338 			if (!found) {
339 				WARN("can't find unreachable insn at %s+0x%x",
340 				     rela->sym->sec->name, rela->addend);
341 				return -1;
342 			}
343 		} else {
344 			WARN("can't find unreachable insn at %s+0x%x",
345 			     rela->sym->sec->name, rela->addend);
346 			return -1;
347 		}
348 
349 		insn->dead_end = true;
350 	}
351 
352 reachable:
353 	/*
354 	 * These manually annotated reachable checks are needed for GCC 4.4,
355 	 * where the Linux unreachable() macro isn't supported.  In that case
356 	 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
357 	 * not a dead end.
358 	 */
359 	sec = find_section_by_name(file->elf, ".rela.discard.reachable");
360 	if (!sec)
361 		return 0;
362 
363 	list_for_each_entry(rela, &sec->rela_list, list) {
364 		if (rela->sym->type != STT_SECTION) {
365 			WARN("unexpected relocation symbol type in %s", sec->name);
366 			return -1;
367 		}
368 		insn = find_insn(file, rela->sym->sec, rela->addend);
369 		if (insn)
370 			insn = list_prev_entry(insn, list);
371 		else if (rela->addend == rela->sym->sec->len) {
372 			found = false;
373 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
374 				if (insn->sec == rela->sym->sec) {
375 					found = true;
376 					break;
377 				}
378 			}
379 
380 			if (!found) {
381 				WARN("can't find reachable insn at %s+0x%x",
382 				     rela->sym->sec->name, rela->addend);
383 				return -1;
384 			}
385 		} else {
386 			WARN("can't find reachable insn at %s+0x%x",
387 			     rela->sym->sec->name, rela->addend);
388 			return -1;
389 		}
390 
391 		insn->dead_end = false;
392 	}
393 
394 	return 0;
395 }
396 
397 /*
398  * Warnings shouldn't be reported for ignored functions.
399  */
400 static void add_ignores(struct objtool_file *file)
401 {
402 	struct instruction *insn;
403 	struct section *sec;
404 	struct symbol *func;
405 	struct rela *rela;
406 
407 	sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
408 	if (!sec)
409 		return;
410 
411 	list_for_each_entry(rela, &sec->rela_list, list) {
412 		switch (rela->sym->type) {
413 		case STT_FUNC:
414 			func = rela->sym;
415 			break;
416 
417 		case STT_SECTION:
418 			func = find_symbol_by_offset(rela->sym->sec, rela->addend);
419 			if (!func || func->type != STT_FUNC)
420 				continue;
421 			break;
422 
423 		default:
424 			WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type);
425 			continue;
426 		}
427 
428 		func_for_each_insn_all(file, func, insn)
429 			insn->ignore = true;
430 	}
431 }
432 
433 /*
434  * This is a whitelist of functions that is allowed to be called with AC set.
435  * The list is meant to be minimal and only contains compiler instrumentation
436  * ABI and a few functions used to implement *_{to,from}_user() functions.
437  *
438  * These functions must not directly change AC, but may PUSHF/POPF.
439  */
440 static const char *uaccess_safe_builtin[] = {
441 	/* KASAN */
442 	"kasan_report",
443 	"check_memory_region",
444 	/* KASAN out-of-line */
445 	"__asan_loadN_noabort",
446 	"__asan_load1_noabort",
447 	"__asan_load2_noabort",
448 	"__asan_load4_noabort",
449 	"__asan_load8_noabort",
450 	"__asan_load16_noabort",
451 	"__asan_storeN_noabort",
452 	"__asan_store1_noabort",
453 	"__asan_store2_noabort",
454 	"__asan_store4_noabort",
455 	"__asan_store8_noabort",
456 	"__asan_store16_noabort",
457 	/* KASAN in-line */
458 	"__asan_report_load_n_noabort",
459 	"__asan_report_load1_noabort",
460 	"__asan_report_load2_noabort",
461 	"__asan_report_load4_noabort",
462 	"__asan_report_load8_noabort",
463 	"__asan_report_load16_noabort",
464 	"__asan_report_store_n_noabort",
465 	"__asan_report_store1_noabort",
466 	"__asan_report_store2_noabort",
467 	"__asan_report_store4_noabort",
468 	"__asan_report_store8_noabort",
469 	"__asan_report_store16_noabort",
470 	/* KCOV */
471 	"write_comp_data",
472 	"__sanitizer_cov_trace_pc",
473 	"__sanitizer_cov_trace_const_cmp1",
474 	"__sanitizer_cov_trace_const_cmp2",
475 	"__sanitizer_cov_trace_const_cmp4",
476 	"__sanitizer_cov_trace_const_cmp8",
477 	"__sanitizer_cov_trace_cmp1",
478 	"__sanitizer_cov_trace_cmp2",
479 	"__sanitizer_cov_trace_cmp4",
480 	"__sanitizer_cov_trace_cmp8",
481 	/* UBSAN */
482 	"ubsan_type_mismatch_common",
483 	"__ubsan_handle_type_mismatch",
484 	"__ubsan_handle_type_mismatch_v1",
485 	"__ubsan_handle_shift_out_of_bounds",
486 	/* misc */
487 	"csum_partial_copy_generic",
488 	"__memcpy_mcsafe",
489 	"mcsafe_handle_tail",
490 	"ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
491 	NULL
492 };
493 
494 static void add_uaccess_safe(struct objtool_file *file)
495 {
496 	struct symbol *func;
497 	const char **name;
498 
499 	if (!uaccess)
500 		return;
501 
502 	for (name = uaccess_safe_builtin; *name; name++) {
503 		func = find_symbol_by_name(file->elf, *name);
504 		if (!func)
505 			continue;
506 
507 		func->uaccess_safe = true;
508 	}
509 }
510 
511 /*
512  * FIXME: For now, just ignore any alternatives which add retpolines.  This is
513  * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
514  * But it at least allows objtool to understand the control flow *around* the
515  * retpoline.
516  */
517 static int add_ignore_alternatives(struct objtool_file *file)
518 {
519 	struct section *sec;
520 	struct rela *rela;
521 	struct instruction *insn;
522 
523 	sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
524 	if (!sec)
525 		return 0;
526 
527 	list_for_each_entry(rela, &sec->rela_list, list) {
528 		if (rela->sym->type != STT_SECTION) {
529 			WARN("unexpected relocation symbol type in %s", sec->name);
530 			return -1;
531 		}
532 
533 		insn = find_insn(file, rela->sym->sec, rela->addend);
534 		if (!insn) {
535 			WARN("bad .discard.ignore_alts entry");
536 			return -1;
537 		}
538 
539 		insn->ignore_alts = true;
540 	}
541 
542 	return 0;
543 }
544 
545 /*
546  * Find the destination instructions for all jumps.
547  */
548 static int add_jump_destinations(struct objtool_file *file)
549 {
550 	struct instruction *insn;
551 	struct rela *rela;
552 	struct section *dest_sec;
553 	unsigned long dest_off;
554 
555 	for_each_insn(file, insn) {
556 		if (insn->type != INSN_JUMP_CONDITIONAL &&
557 		    insn->type != INSN_JUMP_UNCONDITIONAL)
558 			continue;
559 
560 		if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET)
561 			continue;
562 
563 		rela = find_rela_by_dest_range(insn->sec, insn->offset,
564 					       insn->len);
565 		if (!rela) {
566 			dest_sec = insn->sec;
567 			dest_off = insn->offset + insn->len + insn->immediate;
568 		} else if (rela->sym->type == STT_SECTION) {
569 			dest_sec = rela->sym->sec;
570 			dest_off = rela->addend + 4;
571 		} else if (rela->sym->sec->idx) {
572 			dest_sec = rela->sym->sec;
573 			dest_off = rela->sym->sym.st_value + rela->addend + 4;
574 		} else if (strstr(rela->sym->name, "_indirect_thunk_")) {
575 			/*
576 			 * Retpoline jumps are really dynamic jumps in
577 			 * disguise, so convert them accordingly.
578 			 */
579 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
580 				insn->type = INSN_JUMP_DYNAMIC;
581 			else
582 				insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
583 
584 			insn->retpoline_safe = true;
585 			continue;
586 		} else {
587 			/* external sibling call */
588 			insn->call_dest = rela->sym;
589 			continue;
590 		}
591 
592 		insn->jump_dest = find_insn(file, dest_sec, dest_off);
593 		if (!insn->jump_dest) {
594 
595 			/*
596 			 * This is a special case where an alt instruction
597 			 * jumps past the end of the section.  These are
598 			 * handled later in handle_group_alt().
599 			 */
600 			if (!strcmp(insn->sec->name, ".altinstr_replacement"))
601 				continue;
602 
603 			WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
604 				  insn->sec, insn->offset, dest_sec->name,
605 				  dest_off);
606 			return -1;
607 		}
608 
609 		/*
610 		 * Cross-function jump.
611 		 */
612 		if (insn->func && insn->jump_dest->func &&
613 		    insn->func != insn->jump_dest->func) {
614 
615 			/*
616 			 * For GCC 8+, create parent/child links for any cold
617 			 * subfunctions.  This is _mostly_ redundant with a
618 			 * similar initialization in read_symbols().
619 			 *
620 			 * If a function has aliases, we want the *first* such
621 			 * function in the symbol table to be the subfunction's
622 			 * parent.  In that case we overwrite the
623 			 * initialization done in read_symbols().
624 			 *
625 			 * However this code can't completely replace the
626 			 * read_symbols() code because this doesn't detect the
627 			 * case where the parent function's only reference to a
628 			 * subfunction is through a jump table.
629 			 */
630 			if (!strstr(insn->func->name, ".cold.") &&
631 			    strstr(insn->jump_dest->func->name, ".cold.")) {
632 				insn->func->cfunc = insn->jump_dest->func;
633 				insn->jump_dest->func->pfunc = insn->func;
634 
635 			} else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
636 				   insn->jump_dest->offset == insn->jump_dest->func->offset) {
637 
638 				/* internal sibling call */
639 				insn->call_dest = insn->jump_dest->func;
640 			}
641 		}
642 	}
643 
644 	return 0;
645 }
646 
647 /*
648  * Find the destination instructions for all calls.
649  */
650 static int add_call_destinations(struct objtool_file *file)
651 {
652 	struct instruction *insn;
653 	unsigned long dest_off;
654 	struct rela *rela;
655 
656 	for_each_insn(file, insn) {
657 		if (insn->type != INSN_CALL)
658 			continue;
659 
660 		rela = find_rela_by_dest_range(insn->sec, insn->offset,
661 					       insn->len);
662 		if (!rela) {
663 			dest_off = insn->offset + insn->len + insn->immediate;
664 			insn->call_dest = find_symbol_by_offset(insn->sec,
665 								dest_off);
666 
667 			if (!insn->call_dest && !insn->ignore) {
668 				WARN_FUNC("unsupported intra-function call",
669 					  insn->sec, insn->offset);
670 				if (retpoline)
671 					WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE.");
672 				return -1;
673 			}
674 
675 		} else if (rela->sym->type == STT_SECTION) {
676 			insn->call_dest = find_symbol_by_offset(rela->sym->sec,
677 								rela->addend+4);
678 			if (!insn->call_dest ||
679 			    insn->call_dest->type != STT_FUNC) {
680 				WARN_FUNC("can't find call dest symbol at %s+0x%x",
681 					  insn->sec, insn->offset,
682 					  rela->sym->sec->name,
683 					  rela->addend + 4);
684 				return -1;
685 			}
686 		} else
687 			insn->call_dest = rela->sym;
688 	}
689 
690 	return 0;
691 }
692 
693 /*
694  * The .alternatives section requires some extra special care, over and above
695  * what other special sections require:
696  *
697  * 1. Because alternatives are patched in-place, we need to insert a fake jump
698  *    instruction at the end so that validate_branch() skips all the original
699  *    replaced instructions when validating the new instruction path.
700  *
701  * 2. An added wrinkle is that the new instruction length might be zero.  In
702  *    that case the old instructions are replaced with noops.  We simulate that
703  *    by creating a fake jump as the only new instruction.
704  *
705  * 3. In some cases, the alternative section includes an instruction which
706  *    conditionally jumps to the _end_ of the entry.  We have to modify these
707  *    jumps' destinations to point back to .text rather than the end of the
708  *    entry in .altinstr_replacement.
709  */
710 static int handle_group_alt(struct objtool_file *file,
711 			    struct special_alt *special_alt,
712 			    struct instruction *orig_insn,
713 			    struct instruction **new_insn)
714 {
715 	struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
716 	unsigned long dest_off;
717 
718 	last_orig_insn = NULL;
719 	insn = orig_insn;
720 	sec_for_each_insn_from(file, insn) {
721 		if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
722 			break;
723 
724 		insn->alt_group = true;
725 		last_orig_insn = insn;
726 	}
727 
728 	if (next_insn_same_sec(file, last_orig_insn)) {
729 		fake_jump = malloc(sizeof(*fake_jump));
730 		if (!fake_jump) {
731 			WARN("malloc failed");
732 			return -1;
733 		}
734 		memset(fake_jump, 0, sizeof(*fake_jump));
735 		INIT_LIST_HEAD(&fake_jump->alts);
736 		clear_insn_state(&fake_jump->state);
737 
738 		fake_jump->sec = special_alt->new_sec;
739 		fake_jump->offset = FAKE_JUMP_OFFSET;
740 		fake_jump->type = INSN_JUMP_UNCONDITIONAL;
741 		fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
742 		fake_jump->func = orig_insn->func;
743 	}
744 
745 	if (!special_alt->new_len) {
746 		if (!fake_jump) {
747 			WARN("%s: empty alternative at end of section",
748 			     special_alt->orig_sec->name);
749 			return -1;
750 		}
751 
752 		*new_insn = fake_jump;
753 		return 0;
754 	}
755 
756 	last_new_insn = NULL;
757 	insn = *new_insn;
758 	sec_for_each_insn_from(file, insn) {
759 		if (insn->offset >= special_alt->new_off + special_alt->new_len)
760 			break;
761 
762 		last_new_insn = insn;
763 
764 		insn->ignore = orig_insn->ignore_alts;
765 		insn->func = orig_insn->func;
766 
767 		if (insn->type != INSN_JUMP_CONDITIONAL &&
768 		    insn->type != INSN_JUMP_UNCONDITIONAL)
769 			continue;
770 
771 		if (!insn->immediate)
772 			continue;
773 
774 		dest_off = insn->offset + insn->len + insn->immediate;
775 		if (dest_off == special_alt->new_off + special_alt->new_len) {
776 			if (!fake_jump) {
777 				WARN("%s: alternative jump to end of section",
778 				     special_alt->orig_sec->name);
779 				return -1;
780 			}
781 			insn->jump_dest = fake_jump;
782 		}
783 
784 		if (!insn->jump_dest) {
785 			WARN_FUNC("can't find alternative jump destination",
786 				  insn->sec, insn->offset);
787 			return -1;
788 		}
789 	}
790 
791 	if (!last_new_insn) {
792 		WARN_FUNC("can't find last new alternative instruction",
793 			  special_alt->new_sec, special_alt->new_off);
794 		return -1;
795 	}
796 
797 	if (fake_jump)
798 		list_add(&fake_jump->list, &last_new_insn->list);
799 
800 	return 0;
801 }
802 
803 /*
804  * A jump table entry can either convert a nop to a jump or a jump to a nop.
805  * If the original instruction is a jump, make the alt entry an effective nop
806  * by just skipping the original instruction.
807  */
808 static int handle_jump_alt(struct objtool_file *file,
809 			   struct special_alt *special_alt,
810 			   struct instruction *orig_insn,
811 			   struct instruction **new_insn)
812 {
813 	if (orig_insn->type == INSN_NOP)
814 		return 0;
815 
816 	if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
817 		WARN_FUNC("unsupported instruction at jump label",
818 			  orig_insn->sec, orig_insn->offset);
819 		return -1;
820 	}
821 
822 	*new_insn = list_next_entry(orig_insn, list);
823 	return 0;
824 }
825 
826 /*
827  * Read all the special sections which have alternate instructions which can be
828  * patched in or redirected to at runtime.  Each instruction having alternate
829  * instruction(s) has them added to its insn->alts list, which will be
830  * traversed in validate_branch().
831  */
832 static int add_special_section_alts(struct objtool_file *file)
833 {
834 	struct list_head special_alts;
835 	struct instruction *orig_insn, *new_insn;
836 	struct special_alt *special_alt, *tmp;
837 	struct alternative *alt;
838 	int ret;
839 
840 	ret = special_get_alts(file->elf, &special_alts);
841 	if (ret)
842 		return ret;
843 
844 	list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
845 
846 		orig_insn = find_insn(file, special_alt->orig_sec,
847 				      special_alt->orig_off);
848 		if (!orig_insn) {
849 			WARN_FUNC("special: can't find orig instruction",
850 				  special_alt->orig_sec, special_alt->orig_off);
851 			ret = -1;
852 			goto out;
853 		}
854 
855 		new_insn = NULL;
856 		if (!special_alt->group || special_alt->new_len) {
857 			new_insn = find_insn(file, special_alt->new_sec,
858 					     special_alt->new_off);
859 			if (!new_insn) {
860 				WARN_FUNC("special: can't find new instruction",
861 					  special_alt->new_sec,
862 					  special_alt->new_off);
863 				ret = -1;
864 				goto out;
865 			}
866 		}
867 
868 		if (special_alt->group) {
869 			ret = handle_group_alt(file, special_alt, orig_insn,
870 					       &new_insn);
871 			if (ret)
872 				goto out;
873 		} else if (special_alt->jump_or_nop) {
874 			ret = handle_jump_alt(file, special_alt, orig_insn,
875 					      &new_insn);
876 			if (ret)
877 				goto out;
878 		}
879 
880 		alt = malloc(sizeof(*alt));
881 		if (!alt) {
882 			WARN("malloc failed");
883 			ret = -1;
884 			goto out;
885 		}
886 
887 		alt->insn = new_insn;
888 		alt->skip_orig = special_alt->skip_orig;
889 		orig_insn->ignore_alts |= special_alt->skip_alt;
890 		list_add_tail(&alt->list, &orig_insn->alts);
891 
892 		list_del(&special_alt->list);
893 		free(special_alt);
894 	}
895 
896 out:
897 	return ret;
898 }
899 
900 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
901 			    struct rela *table)
902 {
903 	struct rela *rela = table;
904 	struct instruction *dest_insn;
905 	struct alternative *alt;
906 	struct symbol *pfunc = insn->func->pfunc;
907 	unsigned int prev_offset = 0;
908 
909 	/*
910 	 * Each @rela is a switch table relocation which points to the target
911 	 * instruction.
912 	 */
913 	list_for_each_entry_from(rela, &table->sec->rela_list, list) {
914 
915 		/* Check for the end of the table: */
916 		if (rela != table && rela->jump_table_start)
917 			break;
918 
919 		/* Make sure the table entries are consecutive: */
920 		if (prev_offset && rela->offset != prev_offset + 8)
921 			break;
922 
923 		/* Detect function pointers from contiguous objects: */
924 		if (rela->sym->sec == pfunc->sec &&
925 		    rela->addend == pfunc->offset)
926 			break;
927 
928 		dest_insn = find_insn(file, rela->sym->sec, rela->addend);
929 		if (!dest_insn)
930 			break;
931 
932 		/* Make sure the destination is in the same function: */
933 		if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
934 			break;
935 
936 		alt = malloc(sizeof(*alt));
937 		if (!alt) {
938 			WARN("malloc failed");
939 			return -1;
940 		}
941 
942 		alt->insn = dest_insn;
943 		list_add_tail(&alt->list, &insn->alts);
944 		prev_offset = rela->offset;
945 	}
946 
947 	if (!prev_offset) {
948 		WARN_FUNC("can't find switch jump table",
949 			  insn->sec, insn->offset);
950 		return -1;
951 	}
952 
953 	return 0;
954 }
955 
956 /*
957  * find_jump_table() - Given a dynamic jump, find the switch jump table in
958  * .rodata associated with it.
959  *
960  * There are 3 basic patterns:
961  *
962  * 1. jmpq *[rodata addr](,%reg,8)
963  *
964  *    This is the most common case by far.  It jumps to an address in a simple
965  *    jump table which is stored in .rodata.
966  *
967  * 2. jmpq *[rodata addr](%rip)
968  *
969  *    This is caused by a rare GCC quirk, currently only seen in three driver
970  *    functions in the kernel, only with certain obscure non-distro configs.
971  *
972  *    As part of an optimization, GCC makes a copy of an existing switch jump
973  *    table, modifies it, and then hard-codes the jump (albeit with an indirect
974  *    jump) to use a single entry in the table.  The rest of the jump table and
975  *    some of its jump targets remain as dead code.
976  *
977  *    In such a case we can just crudely ignore all unreachable instruction
978  *    warnings for the entire object file.  Ideally we would just ignore them
979  *    for the function, but that would require redesigning the code quite a
980  *    bit.  And honestly that's just not worth doing: unreachable instruction
981  *    warnings are of questionable value anyway, and this is such a rare issue.
982  *
983  * 3. mov [rodata addr],%reg1
984  *    ... some instructions ...
985  *    jmpq *(%reg1,%reg2,8)
986  *
987  *    This is a fairly uncommon pattern which is new for GCC 6.  As of this
988  *    writing, there are 11 occurrences of it in the allmodconfig kernel.
989  *
990  *    As of GCC 7 there are quite a few more of these and the 'in between' code
991  *    is significant. Esp. with KASAN enabled some of the code between the mov
992  *    and jmpq uses .rodata itself, which can confuse things.
993  *
994  *    TODO: Once we have DWARF CFI and smarter instruction decoding logic,
995  *    ensure the same register is used in the mov and jump instructions.
996  *
997  *    NOTE: RETPOLINE made it harder still to decode dynamic jumps.
998  */
999 static struct rela *find_jump_table(struct objtool_file *file,
1000 				      struct symbol *func,
1001 				      struct instruction *insn)
1002 {
1003 	struct rela *text_rela, *table_rela;
1004 	struct instruction *orig_insn = insn;
1005 	struct section *table_sec;
1006 	unsigned long table_offset;
1007 
1008 	/*
1009 	 * Backward search using the @first_jump_src links, these help avoid
1010 	 * much of the 'in between' code. Which avoids us getting confused by
1011 	 * it.
1012 	 */
1013 	for (;
1014 	     &insn->list != &file->insn_list &&
1015 	     insn->sec == func->sec &&
1016 	     insn->offset >= func->offset;
1017 
1018 	     insn = insn->first_jump_src ?: list_prev_entry(insn, list)) {
1019 
1020 		if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1021 			break;
1022 
1023 		/* allow small jumps within the range */
1024 		if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1025 		    insn->jump_dest &&
1026 		    (insn->jump_dest->offset <= insn->offset ||
1027 		     insn->jump_dest->offset > orig_insn->offset))
1028 		    break;
1029 
1030 		/* look for a relocation which references .rodata */
1031 		text_rela = find_rela_by_dest_range(insn->sec, insn->offset,
1032 						    insn->len);
1033 		if (!text_rela || text_rela->sym->type != STT_SECTION ||
1034 		    !text_rela->sym->sec->rodata)
1035 			continue;
1036 
1037 		table_offset = text_rela->addend;
1038 		table_sec = text_rela->sym->sec;
1039 
1040 		if (text_rela->type == R_X86_64_PC32)
1041 			table_offset += 4;
1042 
1043 		/*
1044 		 * Make sure the .rodata address isn't associated with a
1045 		 * symbol.  GCC jump tables are anonymous data.
1046 		 *
1047 		 * Also support C jump tables which are in the same format as
1048 		 * switch jump tables.  For objtool to recognize them, they
1049 		 * need to be placed in the C_JUMP_TABLE_SECTION section.  They
1050 		 * have symbols associated with them.
1051 		 */
1052 		if (find_symbol_containing(table_sec, table_offset) &&
1053 		    strcmp(table_sec->name, C_JUMP_TABLE_SECTION))
1054 			continue;
1055 
1056 		/* Each table entry has a rela associated with it. */
1057 		table_rela = find_rela_by_dest(table_sec, table_offset);
1058 		if (!table_rela)
1059 			continue;
1060 
1061 		/*
1062 		 * Use of RIP-relative switch jumps is quite rare, and
1063 		 * indicates a rare GCC quirk/bug which can leave dead code
1064 		 * behind.
1065 		 */
1066 		if (text_rela->type == R_X86_64_PC32)
1067 			file->ignore_unreachables = true;
1068 
1069 		return table_rela;
1070 	}
1071 
1072 	return NULL;
1073 }
1074 
1075 /*
1076  * First pass: Mark the head of each jump table so that in the next pass,
1077  * we know when a given jump table ends and the next one starts.
1078  */
1079 static void mark_func_jump_tables(struct objtool_file *file,
1080 				    struct symbol *func)
1081 {
1082 	struct instruction *insn, *last = NULL;
1083 	struct rela *rela;
1084 
1085 	func_for_each_insn_all(file, func, insn) {
1086 		if (!last)
1087 			last = insn;
1088 
1089 		/*
1090 		 * Store back-pointers for unconditional forward jumps such
1091 		 * that find_jump_table() can back-track using those and
1092 		 * avoid some potentially confusing code.
1093 		 */
1094 		if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1095 		    insn->offset > last->offset &&
1096 		    insn->jump_dest->offset > insn->offset &&
1097 		    !insn->jump_dest->first_jump_src) {
1098 
1099 			insn->jump_dest->first_jump_src = insn;
1100 			last = insn->jump_dest;
1101 		}
1102 
1103 		if (insn->type != INSN_JUMP_DYNAMIC)
1104 			continue;
1105 
1106 		rela = find_jump_table(file, func, insn);
1107 		if (rela) {
1108 			rela->jump_table_start = true;
1109 			insn->jump_table = rela;
1110 		}
1111 	}
1112 }
1113 
1114 static int add_func_jump_tables(struct objtool_file *file,
1115 				  struct symbol *func)
1116 {
1117 	struct instruction *insn;
1118 	int ret;
1119 
1120 	func_for_each_insn_all(file, func, insn) {
1121 		if (!insn->jump_table)
1122 			continue;
1123 
1124 		ret = add_jump_table(file, insn, insn->jump_table);
1125 		if (ret)
1126 			return ret;
1127 	}
1128 
1129 	return 0;
1130 }
1131 
1132 /*
1133  * For some switch statements, gcc generates a jump table in the .rodata
1134  * section which contains a list of addresses within the function to jump to.
1135  * This finds these jump tables and adds them to the insn->alts lists.
1136  */
1137 static int add_jump_table_alts(struct objtool_file *file)
1138 {
1139 	struct section *sec;
1140 	struct symbol *func;
1141 	int ret;
1142 
1143 	if (!file->rodata)
1144 		return 0;
1145 
1146 	for_each_sec(file, sec) {
1147 		list_for_each_entry(func, &sec->symbol_list, list) {
1148 			if (func->type != STT_FUNC)
1149 				continue;
1150 
1151 			mark_func_jump_tables(file, func);
1152 			ret = add_func_jump_tables(file, func);
1153 			if (ret)
1154 				return ret;
1155 		}
1156 	}
1157 
1158 	return 0;
1159 }
1160 
1161 static int read_unwind_hints(struct objtool_file *file)
1162 {
1163 	struct section *sec, *relasec;
1164 	struct rela *rela;
1165 	struct unwind_hint *hint;
1166 	struct instruction *insn;
1167 	struct cfi_reg *cfa;
1168 	int i;
1169 
1170 	sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1171 	if (!sec)
1172 		return 0;
1173 
1174 	relasec = sec->rela;
1175 	if (!relasec) {
1176 		WARN("missing .rela.discard.unwind_hints section");
1177 		return -1;
1178 	}
1179 
1180 	if (sec->len % sizeof(struct unwind_hint)) {
1181 		WARN("struct unwind_hint size mismatch");
1182 		return -1;
1183 	}
1184 
1185 	file->hints = true;
1186 
1187 	for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1188 		hint = (struct unwind_hint *)sec->data->d_buf + i;
1189 
1190 		rela = find_rela_by_dest(sec, i * sizeof(*hint));
1191 		if (!rela) {
1192 			WARN("can't find rela for unwind_hints[%d]", i);
1193 			return -1;
1194 		}
1195 
1196 		insn = find_insn(file, rela->sym->sec, rela->addend);
1197 		if (!insn) {
1198 			WARN("can't find insn for unwind_hints[%d]", i);
1199 			return -1;
1200 		}
1201 
1202 		cfa = &insn->state.cfa;
1203 
1204 		if (hint->type == UNWIND_HINT_TYPE_SAVE) {
1205 			insn->save = true;
1206 			continue;
1207 
1208 		} else if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
1209 			insn->restore = true;
1210 			insn->hint = true;
1211 			continue;
1212 		}
1213 
1214 		insn->hint = true;
1215 
1216 		switch (hint->sp_reg) {
1217 		case ORC_REG_UNDEFINED:
1218 			cfa->base = CFI_UNDEFINED;
1219 			break;
1220 		case ORC_REG_SP:
1221 			cfa->base = CFI_SP;
1222 			break;
1223 		case ORC_REG_BP:
1224 			cfa->base = CFI_BP;
1225 			break;
1226 		case ORC_REG_SP_INDIRECT:
1227 			cfa->base = CFI_SP_INDIRECT;
1228 			break;
1229 		case ORC_REG_R10:
1230 			cfa->base = CFI_R10;
1231 			break;
1232 		case ORC_REG_R13:
1233 			cfa->base = CFI_R13;
1234 			break;
1235 		case ORC_REG_DI:
1236 			cfa->base = CFI_DI;
1237 			break;
1238 		case ORC_REG_DX:
1239 			cfa->base = CFI_DX;
1240 			break;
1241 		default:
1242 			WARN_FUNC("unsupported unwind_hint sp base reg %d",
1243 				  insn->sec, insn->offset, hint->sp_reg);
1244 			return -1;
1245 		}
1246 
1247 		cfa->offset = hint->sp_offset;
1248 		insn->state.type = hint->type;
1249 		insn->state.end = hint->end;
1250 	}
1251 
1252 	return 0;
1253 }
1254 
1255 static int read_retpoline_hints(struct objtool_file *file)
1256 {
1257 	struct section *sec;
1258 	struct instruction *insn;
1259 	struct rela *rela;
1260 
1261 	sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1262 	if (!sec)
1263 		return 0;
1264 
1265 	list_for_each_entry(rela, &sec->rela_list, list) {
1266 		if (rela->sym->type != STT_SECTION) {
1267 			WARN("unexpected relocation symbol type in %s", sec->name);
1268 			return -1;
1269 		}
1270 
1271 		insn = find_insn(file, rela->sym->sec, rela->addend);
1272 		if (!insn) {
1273 			WARN("bad .discard.retpoline_safe entry");
1274 			return -1;
1275 		}
1276 
1277 		if (insn->type != INSN_JUMP_DYNAMIC &&
1278 		    insn->type != INSN_CALL_DYNAMIC) {
1279 			WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1280 				  insn->sec, insn->offset);
1281 			return -1;
1282 		}
1283 
1284 		insn->retpoline_safe = true;
1285 	}
1286 
1287 	return 0;
1288 }
1289 
1290 static void mark_rodata(struct objtool_file *file)
1291 {
1292 	struct section *sec;
1293 	bool found = false;
1294 
1295 	/*
1296 	 * Search for the following rodata sections, each of which can
1297 	 * potentially contain jump tables:
1298 	 *
1299 	 * - .rodata: can contain GCC switch tables
1300 	 * - .rodata.<func>: same, if -fdata-sections is being used
1301 	 * - .rodata..c_jump_table: contains C annotated jump tables
1302 	 *
1303 	 * .rodata.str1.* sections are ignored; they don't contain jump tables.
1304 	 */
1305 	for_each_sec(file, sec) {
1306 		if ((!strncmp(sec->name, ".rodata", 7) && !strstr(sec->name, ".str1.")) ||
1307 		    !strcmp(sec->name, C_JUMP_TABLE_SECTION)) {
1308 			sec->rodata = true;
1309 			found = true;
1310 		}
1311 	}
1312 
1313 	file->rodata = found;
1314 }
1315 
1316 static int decode_sections(struct objtool_file *file)
1317 {
1318 	int ret;
1319 
1320 	mark_rodata(file);
1321 
1322 	ret = decode_instructions(file);
1323 	if (ret)
1324 		return ret;
1325 
1326 	ret = add_dead_ends(file);
1327 	if (ret)
1328 		return ret;
1329 
1330 	add_ignores(file);
1331 	add_uaccess_safe(file);
1332 
1333 	ret = add_ignore_alternatives(file);
1334 	if (ret)
1335 		return ret;
1336 
1337 	ret = add_jump_destinations(file);
1338 	if (ret)
1339 		return ret;
1340 
1341 	ret = add_special_section_alts(file);
1342 	if (ret)
1343 		return ret;
1344 
1345 	ret = add_call_destinations(file);
1346 	if (ret)
1347 		return ret;
1348 
1349 	ret = add_jump_table_alts(file);
1350 	if (ret)
1351 		return ret;
1352 
1353 	ret = read_unwind_hints(file);
1354 	if (ret)
1355 		return ret;
1356 
1357 	ret = read_retpoline_hints(file);
1358 	if (ret)
1359 		return ret;
1360 
1361 	return 0;
1362 }
1363 
1364 static bool is_fentry_call(struct instruction *insn)
1365 {
1366 	if (insn->type == INSN_CALL &&
1367 	    insn->call_dest->type == STT_NOTYPE &&
1368 	    !strcmp(insn->call_dest->name, "__fentry__"))
1369 		return true;
1370 
1371 	return false;
1372 }
1373 
1374 static bool has_modified_stack_frame(struct insn_state *state)
1375 {
1376 	int i;
1377 
1378 	if (state->cfa.base != initial_func_cfi.cfa.base ||
1379 	    state->cfa.offset != initial_func_cfi.cfa.offset ||
1380 	    state->stack_size != initial_func_cfi.cfa.offset ||
1381 	    state->drap)
1382 		return true;
1383 
1384 	for (i = 0; i < CFI_NUM_REGS; i++)
1385 		if (state->regs[i].base != initial_func_cfi.regs[i].base ||
1386 		    state->regs[i].offset != initial_func_cfi.regs[i].offset)
1387 			return true;
1388 
1389 	return false;
1390 }
1391 
1392 static bool has_valid_stack_frame(struct insn_state *state)
1393 {
1394 	if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA &&
1395 	    state->regs[CFI_BP].offset == -16)
1396 		return true;
1397 
1398 	if (state->drap && state->regs[CFI_BP].base == CFI_BP)
1399 		return true;
1400 
1401 	return false;
1402 }
1403 
1404 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state)
1405 {
1406 	struct cfi_reg *cfa = &state->cfa;
1407 	struct stack_op *op = &insn->stack_op;
1408 
1409 	if (cfa->base != CFI_SP)
1410 		return 0;
1411 
1412 	/* push */
1413 	if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1414 		cfa->offset += 8;
1415 
1416 	/* pop */
1417 	if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1418 		cfa->offset -= 8;
1419 
1420 	/* add immediate to sp */
1421 	if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1422 	    op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1423 		cfa->offset -= op->src.offset;
1424 
1425 	return 0;
1426 }
1427 
1428 static void save_reg(struct insn_state *state, unsigned char reg, int base,
1429 		     int offset)
1430 {
1431 	if (arch_callee_saved_reg(reg) &&
1432 	    state->regs[reg].base == CFI_UNDEFINED) {
1433 		state->regs[reg].base = base;
1434 		state->regs[reg].offset = offset;
1435 	}
1436 }
1437 
1438 static void restore_reg(struct insn_state *state, unsigned char reg)
1439 {
1440 	state->regs[reg].base = CFI_UNDEFINED;
1441 	state->regs[reg].offset = 0;
1442 }
1443 
1444 /*
1445  * A note about DRAP stack alignment:
1446  *
1447  * GCC has the concept of a DRAP register, which is used to help keep track of
1448  * the stack pointer when aligning the stack.  r10 or r13 is used as the DRAP
1449  * register.  The typical DRAP pattern is:
1450  *
1451  *   4c 8d 54 24 08		lea    0x8(%rsp),%r10
1452  *   48 83 e4 c0		and    $0xffffffffffffffc0,%rsp
1453  *   41 ff 72 f8		pushq  -0x8(%r10)
1454  *   55				push   %rbp
1455  *   48 89 e5			mov    %rsp,%rbp
1456  *				(more pushes)
1457  *   41 52			push   %r10
1458  *				...
1459  *   41 5a			pop    %r10
1460  *				(more pops)
1461  *   5d				pop    %rbp
1462  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1463  *   c3				retq
1464  *
1465  * There are some variations in the epilogues, like:
1466  *
1467  *   5b				pop    %rbx
1468  *   41 5a			pop    %r10
1469  *   41 5c			pop    %r12
1470  *   41 5d			pop    %r13
1471  *   41 5e			pop    %r14
1472  *   c9				leaveq
1473  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1474  *   c3				retq
1475  *
1476  * and:
1477  *
1478  *   4c 8b 55 e8		mov    -0x18(%rbp),%r10
1479  *   48 8b 5d e0		mov    -0x20(%rbp),%rbx
1480  *   4c 8b 65 f0		mov    -0x10(%rbp),%r12
1481  *   4c 8b 6d f8		mov    -0x8(%rbp),%r13
1482  *   c9				leaveq
1483  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1484  *   c3				retq
1485  *
1486  * Sometimes r13 is used as the DRAP register, in which case it's saved and
1487  * restored beforehand:
1488  *
1489  *   41 55			push   %r13
1490  *   4c 8d 6c 24 10		lea    0x10(%rsp),%r13
1491  *   48 83 e4 f0		and    $0xfffffffffffffff0,%rsp
1492  *				...
1493  *   49 8d 65 f0		lea    -0x10(%r13),%rsp
1494  *   41 5d			pop    %r13
1495  *   c3				retq
1496  */
1497 static int update_insn_state(struct instruction *insn, struct insn_state *state)
1498 {
1499 	struct stack_op *op = &insn->stack_op;
1500 	struct cfi_reg *cfa = &state->cfa;
1501 	struct cfi_reg *regs = state->regs;
1502 
1503 	/* stack operations don't make sense with an undefined CFA */
1504 	if (cfa->base == CFI_UNDEFINED) {
1505 		if (insn->func) {
1506 			WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1507 			return -1;
1508 		}
1509 		return 0;
1510 	}
1511 
1512 	if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET)
1513 		return update_insn_state_regs(insn, state);
1514 
1515 	switch (op->dest.type) {
1516 
1517 	case OP_DEST_REG:
1518 		switch (op->src.type) {
1519 
1520 		case OP_SRC_REG:
1521 			if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1522 			    cfa->base == CFI_SP &&
1523 			    regs[CFI_BP].base == CFI_CFA &&
1524 			    regs[CFI_BP].offset == -cfa->offset) {
1525 
1526 				/* mov %rsp, %rbp */
1527 				cfa->base = op->dest.reg;
1528 				state->bp_scratch = false;
1529 			}
1530 
1531 			else if (op->src.reg == CFI_SP &&
1532 				 op->dest.reg == CFI_BP && state->drap) {
1533 
1534 				/* drap: mov %rsp, %rbp */
1535 				regs[CFI_BP].base = CFI_BP;
1536 				regs[CFI_BP].offset = -state->stack_size;
1537 				state->bp_scratch = false;
1538 			}
1539 
1540 			else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1541 
1542 				/*
1543 				 * mov %rsp, %reg
1544 				 *
1545 				 * This is needed for the rare case where GCC
1546 				 * does:
1547 				 *
1548 				 *   mov    %rsp, %rax
1549 				 *   ...
1550 				 *   mov    %rax, %rsp
1551 				 */
1552 				state->vals[op->dest.reg].base = CFI_CFA;
1553 				state->vals[op->dest.reg].offset = -state->stack_size;
1554 			}
1555 
1556 			else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
1557 				 cfa->base == CFI_BP) {
1558 
1559 				/*
1560 				 * mov %rbp, %rsp
1561 				 *
1562 				 * Restore the original stack pointer (Clang).
1563 				 */
1564 				state->stack_size = -state->regs[CFI_BP].offset;
1565 			}
1566 
1567 			else if (op->dest.reg == cfa->base) {
1568 
1569 				/* mov %reg, %rsp */
1570 				if (cfa->base == CFI_SP &&
1571 				    state->vals[op->src.reg].base == CFI_CFA) {
1572 
1573 					/*
1574 					 * This is needed for the rare case
1575 					 * where GCC does something dumb like:
1576 					 *
1577 					 *   lea    0x8(%rsp), %rcx
1578 					 *   ...
1579 					 *   mov    %rcx, %rsp
1580 					 */
1581 					cfa->offset = -state->vals[op->src.reg].offset;
1582 					state->stack_size = cfa->offset;
1583 
1584 				} else {
1585 					cfa->base = CFI_UNDEFINED;
1586 					cfa->offset = 0;
1587 				}
1588 			}
1589 
1590 			break;
1591 
1592 		case OP_SRC_ADD:
1593 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
1594 
1595 				/* add imm, %rsp */
1596 				state->stack_size -= op->src.offset;
1597 				if (cfa->base == CFI_SP)
1598 					cfa->offset -= op->src.offset;
1599 				break;
1600 			}
1601 
1602 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
1603 
1604 				/* lea disp(%rbp), %rsp */
1605 				state->stack_size = -(op->src.offset + regs[CFI_BP].offset);
1606 				break;
1607 			}
1608 
1609 			if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1610 
1611 				/* drap: lea disp(%rsp), %drap */
1612 				state->drap_reg = op->dest.reg;
1613 
1614 				/*
1615 				 * lea disp(%rsp), %reg
1616 				 *
1617 				 * This is needed for the rare case where GCC
1618 				 * does something dumb like:
1619 				 *
1620 				 *   lea    0x8(%rsp), %rcx
1621 				 *   ...
1622 				 *   mov    %rcx, %rsp
1623 				 */
1624 				state->vals[op->dest.reg].base = CFI_CFA;
1625 				state->vals[op->dest.reg].offset = \
1626 					-state->stack_size + op->src.offset;
1627 
1628 				break;
1629 			}
1630 
1631 			if (state->drap && op->dest.reg == CFI_SP &&
1632 			    op->src.reg == state->drap_reg) {
1633 
1634 				 /* drap: lea disp(%drap), %rsp */
1635 				cfa->base = CFI_SP;
1636 				cfa->offset = state->stack_size = -op->src.offset;
1637 				state->drap_reg = CFI_UNDEFINED;
1638 				state->drap = false;
1639 				break;
1640 			}
1641 
1642 			if (op->dest.reg == state->cfa.base) {
1643 				WARN_FUNC("unsupported stack register modification",
1644 					  insn->sec, insn->offset);
1645 				return -1;
1646 			}
1647 
1648 			break;
1649 
1650 		case OP_SRC_AND:
1651 			if (op->dest.reg != CFI_SP ||
1652 			    (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
1653 			    (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
1654 				WARN_FUNC("unsupported stack pointer realignment",
1655 					  insn->sec, insn->offset);
1656 				return -1;
1657 			}
1658 
1659 			if (state->drap_reg != CFI_UNDEFINED) {
1660 				/* drap: and imm, %rsp */
1661 				cfa->base = state->drap_reg;
1662 				cfa->offset = state->stack_size = 0;
1663 				state->drap = true;
1664 			}
1665 
1666 			/*
1667 			 * Older versions of GCC (4.8ish) realign the stack
1668 			 * without DRAP, with a frame pointer.
1669 			 */
1670 
1671 			break;
1672 
1673 		case OP_SRC_POP:
1674 		case OP_SRC_POPF:
1675 			if (!state->drap && op->dest.type == OP_DEST_REG &&
1676 			    op->dest.reg == cfa->base) {
1677 
1678 				/* pop %rbp */
1679 				cfa->base = CFI_SP;
1680 			}
1681 
1682 			if (state->drap && cfa->base == CFI_BP_INDIRECT &&
1683 			    op->dest.type == OP_DEST_REG &&
1684 			    op->dest.reg == state->drap_reg &&
1685 			    state->drap_offset == -state->stack_size) {
1686 
1687 				/* drap: pop %drap */
1688 				cfa->base = state->drap_reg;
1689 				cfa->offset = 0;
1690 				state->drap_offset = -1;
1691 
1692 			} else if (regs[op->dest.reg].offset == -state->stack_size) {
1693 
1694 				/* pop %reg */
1695 				restore_reg(state, op->dest.reg);
1696 			}
1697 
1698 			state->stack_size -= 8;
1699 			if (cfa->base == CFI_SP)
1700 				cfa->offset -= 8;
1701 
1702 			break;
1703 
1704 		case OP_SRC_REG_INDIRECT:
1705 			if (state->drap && op->src.reg == CFI_BP &&
1706 			    op->src.offset == state->drap_offset) {
1707 
1708 				/* drap: mov disp(%rbp), %drap */
1709 				cfa->base = state->drap_reg;
1710 				cfa->offset = 0;
1711 				state->drap_offset = -1;
1712 			}
1713 
1714 			if (state->drap && op->src.reg == CFI_BP &&
1715 			    op->src.offset == regs[op->dest.reg].offset) {
1716 
1717 				/* drap: mov disp(%rbp), %reg */
1718 				restore_reg(state, op->dest.reg);
1719 
1720 			} else if (op->src.reg == cfa->base &&
1721 			    op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
1722 
1723 				/* mov disp(%rbp), %reg */
1724 				/* mov disp(%rsp), %reg */
1725 				restore_reg(state, op->dest.reg);
1726 			}
1727 
1728 			break;
1729 
1730 		default:
1731 			WARN_FUNC("unknown stack-related instruction",
1732 				  insn->sec, insn->offset);
1733 			return -1;
1734 		}
1735 
1736 		break;
1737 
1738 	case OP_DEST_PUSH:
1739 	case OP_DEST_PUSHF:
1740 		state->stack_size += 8;
1741 		if (cfa->base == CFI_SP)
1742 			cfa->offset += 8;
1743 
1744 		if (op->src.type != OP_SRC_REG)
1745 			break;
1746 
1747 		if (state->drap) {
1748 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1749 
1750 				/* drap: push %drap */
1751 				cfa->base = CFI_BP_INDIRECT;
1752 				cfa->offset = -state->stack_size;
1753 
1754 				/* save drap so we know when to restore it */
1755 				state->drap_offset = -state->stack_size;
1756 
1757 			} else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) {
1758 
1759 				/* drap: push %rbp */
1760 				state->stack_size = 0;
1761 
1762 			} else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1763 
1764 				/* drap: push %reg */
1765 				save_reg(state, op->src.reg, CFI_BP, -state->stack_size);
1766 			}
1767 
1768 		} else {
1769 
1770 			/* push %reg */
1771 			save_reg(state, op->src.reg, CFI_CFA, -state->stack_size);
1772 		}
1773 
1774 		/* detect when asm code uses rbp as a scratch register */
1775 		if (!no_fp && insn->func && op->src.reg == CFI_BP &&
1776 		    cfa->base != CFI_BP)
1777 			state->bp_scratch = true;
1778 		break;
1779 
1780 	case OP_DEST_REG_INDIRECT:
1781 
1782 		if (state->drap) {
1783 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1784 
1785 				/* drap: mov %drap, disp(%rbp) */
1786 				cfa->base = CFI_BP_INDIRECT;
1787 				cfa->offset = op->dest.offset;
1788 
1789 				/* save drap offset so we know when to restore it */
1790 				state->drap_offset = op->dest.offset;
1791 			}
1792 
1793 			else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1794 
1795 				/* drap: mov reg, disp(%rbp) */
1796 				save_reg(state, op->src.reg, CFI_BP, op->dest.offset);
1797 			}
1798 
1799 		} else if (op->dest.reg == cfa->base) {
1800 
1801 			/* mov reg, disp(%rbp) */
1802 			/* mov reg, disp(%rsp) */
1803 			save_reg(state, op->src.reg, CFI_CFA,
1804 				 op->dest.offset - state->cfa.offset);
1805 		}
1806 
1807 		break;
1808 
1809 	case OP_DEST_LEAVE:
1810 		if ((!state->drap && cfa->base != CFI_BP) ||
1811 		    (state->drap && cfa->base != state->drap_reg)) {
1812 			WARN_FUNC("leave instruction with modified stack frame",
1813 				  insn->sec, insn->offset);
1814 			return -1;
1815 		}
1816 
1817 		/* leave (mov %rbp, %rsp; pop %rbp) */
1818 
1819 		state->stack_size = -state->regs[CFI_BP].offset - 8;
1820 		restore_reg(state, CFI_BP);
1821 
1822 		if (!state->drap) {
1823 			cfa->base = CFI_SP;
1824 			cfa->offset -= 8;
1825 		}
1826 
1827 		break;
1828 
1829 	case OP_DEST_MEM:
1830 		if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
1831 			WARN_FUNC("unknown stack-related memory operation",
1832 				  insn->sec, insn->offset);
1833 			return -1;
1834 		}
1835 
1836 		/* pop mem */
1837 		state->stack_size -= 8;
1838 		if (cfa->base == CFI_SP)
1839 			cfa->offset -= 8;
1840 
1841 		break;
1842 
1843 	default:
1844 		WARN_FUNC("unknown stack-related instruction",
1845 			  insn->sec, insn->offset);
1846 		return -1;
1847 	}
1848 
1849 	return 0;
1850 }
1851 
1852 static bool insn_state_match(struct instruction *insn, struct insn_state *state)
1853 {
1854 	struct insn_state *state1 = &insn->state, *state2 = state;
1855 	int i;
1856 
1857 	if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) {
1858 		WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
1859 			  insn->sec, insn->offset,
1860 			  state1->cfa.base, state1->cfa.offset,
1861 			  state2->cfa.base, state2->cfa.offset);
1862 
1863 	} else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) {
1864 		for (i = 0; i < CFI_NUM_REGS; i++) {
1865 			if (!memcmp(&state1->regs[i], &state2->regs[i],
1866 				    sizeof(struct cfi_reg)))
1867 				continue;
1868 
1869 			WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
1870 				  insn->sec, insn->offset,
1871 				  i, state1->regs[i].base, state1->regs[i].offset,
1872 				  i, state2->regs[i].base, state2->regs[i].offset);
1873 			break;
1874 		}
1875 
1876 	} else if (state1->type != state2->type) {
1877 		WARN_FUNC("stack state mismatch: type1=%d type2=%d",
1878 			  insn->sec, insn->offset, state1->type, state2->type);
1879 
1880 	} else if (state1->drap != state2->drap ||
1881 		 (state1->drap && state1->drap_reg != state2->drap_reg) ||
1882 		 (state1->drap && state1->drap_offset != state2->drap_offset)) {
1883 		WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
1884 			  insn->sec, insn->offset,
1885 			  state1->drap, state1->drap_reg, state1->drap_offset,
1886 			  state2->drap, state2->drap_reg, state2->drap_offset);
1887 
1888 	} else
1889 		return true;
1890 
1891 	return false;
1892 }
1893 
1894 static inline bool func_uaccess_safe(struct symbol *func)
1895 {
1896 	if (func)
1897 		return func->uaccess_safe;
1898 
1899 	return false;
1900 }
1901 
1902 static inline const char *call_dest_name(struct instruction *insn)
1903 {
1904 	if (insn->call_dest)
1905 		return insn->call_dest->name;
1906 
1907 	return "{dynamic}";
1908 }
1909 
1910 static int validate_call(struct instruction *insn, struct insn_state *state)
1911 {
1912 	if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
1913 		WARN_FUNC("call to %s() with UACCESS enabled",
1914 				insn->sec, insn->offset, call_dest_name(insn));
1915 		return 1;
1916 	}
1917 
1918 	if (state->df) {
1919 		WARN_FUNC("call to %s() with DF set",
1920 				insn->sec, insn->offset, call_dest_name(insn));
1921 		return 1;
1922 	}
1923 
1924 	return 0;
1925 }
1926 
1927 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
1928 {
1929 	if (has_modified_stack_frame(state)) {
1930 		WARN_FUNC("sibling call from callable instruction with modified stack frame",
1931 				insn->sec, insn->offset);
1932 		return 1;
1933 	}
1934 
1935 	return validate_call(insn, state);
1936 }
1937 
1938 /*
1939  * Follow the branch starting at the given instruction, and recursively follow
1940  * any other branches (jumps).  Meanwhile, track the frame pointer state at
1941  * each instruction and validate all the rules described in
1942  * tools/objtool/Documentation/stack-validation.txt.
1943  */
1944 static int validate_branch(struct objtool_file *file, struct symbol *func,
1945 			   struct instruction *first, struct insn_state state)
1946 {
1947 	struct alternative *alt;
1948 	struct instruction *insn, *next_insn;
1949 	struct section *sec;
1950 	u8 visited;
1951 	int ret;
1952 
1953 	insn = first;
1954 	sec = insn->sec;
1955 
1956 	if (insn->alt_group && list_empty(&insn->alts)) {
1957 		WARN_FUNC("don't know how to handle branch to middle of alternative instruction group",
1958 			  sec, insn->offset);
1959 		return 1;
1960 	}
1961 
1962 	while (1) {
1963 		next_insn = next_insn_same_sec(file, insn);
1964 
1965 		if (file->c_file && func && insn->func && func != insn->func->pfunc) {
1966 			WARN("%s() falls through to next function %s()",
1967 			     func->name, insn->func->name);
1968 			return 1;
1969 		}
1970 
1971 		if (func && insn->ignore) {
1972 			WARN_FUNC("BUG: why am I validating an ignored function?",
1973 				  sec, insn->offset);
1974 			return 1;
1975 		}
1976 
1977 		visited = 1 << state.uaccess;
1978 		if (insn->visited) {
1979 			if (!insn->hint && !insn_state_match(insn, &state))
1980 				return 1;
1981 
1982 			if (insn->visited & visited)
1983 				return 0;
1984 		}
1985 
1986 		if (insn->hint) {
1987 			if (insn->restore) {
1988 				struct instruction *save_insn, *i;
1989 
1990 				i = insn;
1991 				save_insn = NULL;
1992 				func_for_each_insn_continue_reverse(file, func, i) {
1993 					if (i->save) {
1994 						save_insn = i;
1995 						break;
1996 					}
1997 				}
1998 
1999 				if (!save_insn) {
2000 					WARN_FUNC("no corresponding CFI save for CFI restore",
2001 						  sec, insn->offset);
2002 					return 1;
2003 				}
2004 
2005 				if (!save_insn->visited) {
2006 					/*
2007 					 * Oops, no state to copy yet.
2008 					 * Hopefully we can reach this
2009 					 * instruction from another branch
2010 					 * after the save insn has been
2011 					 * visited.
2012 					 */
2013 					if (insn == first)
2014 						return 0;
2015 
2016 					WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
2017 						  sec, insn->offset);
2018 					return 1;
2019 				}
2020 
2021 				insn->state = save_insn->state;
2022 			}
2023 
2024 			state = insn->state;
2025 
2026 		} else
2027 			insn->state = state;
2028 
2029 		insn->visited |= visited;
2030 
2031 		if (!insn->ignore_alts) {
2032 			bool skip_orig = false;
2033 
2034 			list_for_each_entry(alt, &insn->alts, list) {
2035 				if (alt->skip_orig)
2036 					skip_orig = true;
2037 
2038 				ret = validate_branch(file, func, alt->insn, state);
2039 				if (ret) {
2040 					if (backtrace)
2041 						BT_FUNC("(alt)", insn);
2042 					return ret;
2043 				}
2044 			}
2045 
2046 			if (skip_orig)
2047 				return 0;
2048 		}
2049 
2050 		switch (insn->type) {
2051 
2052 		case INSN_RETURN:
2053 			if (state.uaccess && !func_uaccess_safe(func)) {
2054 				WARN_FUNC("return with UACCESS enabled", sec, insn->offset);
2055 				return 1;
2056 			}
2057 
2058 			if (!state.uaccess && func_uaccess_safe(func)) {
2059 				WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function", sec, insn->offset);
2060 				return 1;
2061 			}
2062 
2063 			if (state.df) {
2064 				WARN_FUNC("return with DF set", sec, insn->offset);
2065 				return 1;
2066 			}
2067 
2068 			if (func && has_modified_stack_frame(&state)) {
2069 				WARN_FUNC("return with modified stack frame",
2070 					  sec, insn->offset);
2071 				return 1;
2072 			}
2073 
2074 			if (state.bp_scratch) {
2075 				WARN("%s uses BP as a scratch register",
2076 				     func->name);
2077 				return 1;
2078 			}
2079 
2080 			return 0;
2081 
2082 		case INSN_CALL:
2083 		case INSN_CALL_DYNAMIC:
2084 			ret = validate_call(insn, &state);
2085 			if (ret)
2086 				return ret;
2087 
2088 			if (!no_fp && func && !is_fentry_call(insn) &&
2089 			    !has_valid_stack_frame(&state)) {
2090 				WARN_FUNC("call without frame pointer save/setup",
2091 					  sec, insn->offset);
2092 				return 1;
2093 			}
2094 
2095 			if (dead_end_function(file, insn->call_dest))
2096 				return 0;
2097 
2098 			break;
2099 
2100 		case INSN_JUMP_CONDITIONAL:
2101 		case INSN_JUMP_UNCONDITIONAL:
2102 			if (func && is_sibling_call(insn)) {
2103 				ret = validate_sibling_call(insn, &state);
2104 				if (ret)
2105 					return ret;
2106 
2107 			} else if (insn->jump_dest) {
2108 				ret = validate_branch(file, func,
2109 						      insn->jump_dest, state);
2110 				if (ret) {
2111 					if (backtrace)
2112 						BT_FUNC("(branch)", insn);
2113 					return ret;
2114 				}
2115 			}
2116 
2117 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
2118 				return 0;
2119 
2120 			break;
2121 
2122 		case INSN_JUMP_DYNAMIC:
2123 		case INSN_JUMP_DYNAMIC_CONDITIONAL:
2124 			if (func && is_sibling_call(insn)) {
2125 				ret = validate_sibling_call(insn, &state);
2126 				if (ret)
2127 					return ret;
2128 			}
2129 
2130 			if (insn->type == INSN_JUMP_DYNAMIC)
2131 				return 0;
2132 
2133 			break;
2134 
2135 		case INSN_CONTEXT_SWITCH:
2136 			if (func && (!next_insn || !next_insn->hint)) {
2137 				WARN_FUNC("unsupported instruction in callable function",
2138 					  sec, insn->offset);
2139 				return 1;
2140 			}
2141 			return 0;
2142 
2143 		case INSN_STACK:
2144 			if (update_insn_state(insn, &state))
2145 				return 1;
2146 
2147 			if (insn->stack_op.dest.type == OP_DEST_PUSHF) {
2148 				if (!state.uaccess_stack) {
2149 					state.uaccess_stack = 1;
2150 				} else if (state.uaccess_stack >> 31) {
2151 					WARN_FUNC("PUSHF stack exhausted", sec, insn->offset);
2152 					return 1;
2153 				}
2154 				state.uaccess_stack <<= 1;
2155 				state.uaccess_stack  |= state.uaccess;
2156 			}
2157 
2158 			if (insn->stack_op.src.type == OP_SRC_POPF) {
2159 				if (state.uaccess_stack) {
2160 					state.uaccess = state.uaccess_stack & 1;
2161 					state.uaccess_stack >>= 1;
2162 					if (state.uaccess_stack == 1)
2163 						state.uaccess_stack = 0;
2164 				}
2165 			}
2166 
2167 			break;
2168 
2169 		case INSN_STAC:
2170 			if (state.uaccess) {
2171 				WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2172 				return 1;
2173 			}
2174 
2175 			state.uaccess = true;
2176 			break;
2177 
2178 		case INSN_CLAC:
2179 			if (!state.uaccess && func) {
2180 				WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2181 				return 1;
2182 			}
2183 
2184 			if (func_uaccess_safe(func) && !state.uaccess_stack) {
2185 				WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2186 				return 1;
2187 			}
2188 
2189 			state.uaccess = false;
2190 			break;
2191 
2192 		case INSN_STD:
2193 			if (state.df)
2194 				WARN_FUNC("recursive STD", sec, insn->offset);
2195 
2196 			state.df = true;
2197 			break;
2198 
2199 		case INSN_CLD:
2200 			if (!state.df && func)
2201 				WARN_FUNC("redundant CLD", sec, insn->offset);
2202 
2203 			state.df = false;
2204 			break;
2205 
2206 		default:
2207 			break;
2208 		}
2209 
2210 		if (insn->dead_end)
2211 			return 0;
2212 
2213 		if (!next_insn) {
2214 			if (state.cfa.base == CFI_UNDEFINED)
2215 				return 0;
2216 			WARN("%s: unexpected end of section", sec->name);
2217 			return 1;
2218 		}
2219 
2220 		insn = next_insn;
2221 	}
2222 
2223 	return 0;
2224 }
2225 
2226 static int validate_unwind_hints(struct objtool_file *file)
2227 {
2228 	struct instruction *insn;
2229 	int ret, warnings = 0;
2230 	struct insn_state state;
2231 
2232 	if (!file->hints)
2233 		return 0;
2234 
2235 	clear_insn_state(&state);
2236 
2237 	for_each_insn(file, insn) {
2238 		if (insn->hint && !insn->visited) {
2239 			ret = validate_branch(file, insn->func, insn, state);
2240 			if (ret && backtrace)
2241 				BT_FUNC("<=== (hint)", insn);
2242 			warnings += ret;
2243 		}
2244 	}
2245 
2246 	return warnings;
2247 }
2248 
2249 static int validate_retpoline(struct objtool_file *file)
2250 {
2251 	struct instruction *insn;
2252 	int warnings = 0;
2253 
2254 	for_each_insn(file, insn) {
2255 		if (insn->type != INSN_JUMP_DYNAMIC &&
2256 		    insn->type != INSN_CALL_DYNAMIC)
2257 			continue;
2258 
2259 		if (insn->retpoline_safe)
2260 			continue;
2261 
2262 		/*
2263 		 * .init.text code is ran before userspace and thus doesn't
2264 		 * strictly need retpolines, except for modules which are
2265 		 * loaded late, they very much do need retpoline in their
2266 		 * .init.text
2267 		 */
2268 		if (!strcmp(insn->sec->name, ".init.text") && !module)
2269 			continue;
2270 
2271 		WARN_FUNC("indirect %s found in RETPOLINE build",
2272 			  insn->sec, insn->offset,
2273 			  insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2274 
2275 		warnings++;
2276 	}
2277 
2278 	return warnings;
2279 }
2280 
2281 static bool is_kasan_insn(struct instruction *insn)
2282 {
2283 	return (insn->type == INSN_CALL &&
2284 		!strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2285 }
2286 
2287 static bool is_ubsan_insn(struct instruction *insn)
2288 {
2289 	return (insn->type == INSN_CALL &&
2290 		!strcmp(insn->call_dest->name,
2291 			"__ubsan_handle_builtin_unreachable"));
2292 }
2293 
2294 static bool ignore_unreachable_insn(struct instruction *insn)
2295 {
2296 	int i;
2297 
2298 	if (insn->ignore || insn->type == INSN_NOP)
2299 		return true;
2300 
2301 	/*
2302 	 * Ignore any unused exceptions.  This can happen when a whitelisted
2303 	 * function has an exception table entry.
2304 	 *
2305 	 * Also ignore alternative replacement instructions.  This can happen
2306 	 * when a whitelisted function uses one of the ALTERNATIVE macros.
2307 	 */
2308 	if (!strcmp(insn->sec->name, ".fixup") ||
2309 	    !strcmp(insn->sec->name, ".altinstr_replacement") ||
2310 	    !strcmp(insn->sec->name, ".altinstr_aux"))
2311 		return true;
2312 
2313 	/*
2314 	 * Check if this (or a subsequent) instruction is related to
2315 	 * CONFIG_UBSAN or CONFIG_KASAN.
2316 	 *
2317 	 * End the search at 5 instructions to avoid going into the weeds.
2318 	 */
2319 	if (!insn->func)
2320 		return false;
2321 	for (i = 0; i < 5; i++) {
2322 
2323 		if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2324 			return true;
2325 
2326 		if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2327 			if (insn->jump_dest &&
2328 			    insn->jump_dest->func == insn->func) {
2329 				insn = insn->jump_dest;
2330 				continue;
2331 			}
2332 
2333 			break;
2334 		}
2335 
2336 		if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2337 			break;
2338 
2339 		insn = list_next_entry(insn, list);
2340 	}
2341 
2342 	return false;
2343 }
2344 
2345 static int validate_functions(struct objtool_file *file)
2346 {
2347 	struct section *sec;
2348 	struct symbol *func;
2349 	struct instruction *insn;
2350 	struct insn_state state;
2351 	int ret, warnings = 0;
2352 
2353 	clear_insn_state(&state);
2354 
2355 	state.cfa = initial_func_cfi.cfa;
2356 	memcpy(&state.regs, &initial_func_cfi.regs,
2357 	       CFI_NUM_REGS * sizeof(struct cfi_reg));
2358 	state.stack_size = initial_func_cfi.cfa.offset;
2359 
2360 	for_each_sec(file, sec) {
2361 		list_for_each_entry(func, &sec->symbol_list, list) {
2362 			if (func->type != STT_FUNC)
2363 				continue;
2364 
2365 			if (!func->len) {
2366 				WARN("%s() is missing an ELF size annotation",
2367 				     func->name);
2368 				warnings++;
2369 			}
2370 
2371 			if (func->pfunc != func || func->alias != func)
2372 				continue;
2373 
2374 			insn = find_insn(file, sec, func->offset);
2375 			if (!insn || insn->ignore || insn->visited)
2376 				continue;
2377 
2378 			state.uaccess = func->uaccess_safe;
2379 
2380 			ret = validate_branch(file, func, insn, state);
2381 			if (ret && backtrace)
2382 				BT_FUNC("<=== (func)", insn);
2383 			warnings += ret;
2384 		}
2385 	}
2386 
2387 	return warnings;
2388 }
2389 
2390 static int validate_reachable_instructions(struct objtool_file *file)
2391 {
2392 	struct instruction *insn;
2393 
2394 	if (file->ignore_unreachables)
2395 		return 0;
2396 
2397 	for_each_insn(file, insn) {
2398 		if (insn->visited || ignore_unreachable_insn(insn))
2399 			continue;
2400 
2401 		WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
2402 		return 1;
2403 	}
2404 
2405 	return 0;
2406 }
2407 
2408 static void cleanup(struct objtool_file *file)
2409 {
2410 	struct instruction *insn, *tmpinsn;
2411 	struct alternative *alt, *tmpalt;
2412 
2413 	list_for_each_entry_safe(insn, tmpinsn, &file->insn_list, list) {
2414 		list_for_each_entry_safe(alt, tmpalt, &insn->alts, list) {
2415 			list_del(&alt->list);
2416 			free(alt);
2417 		}
2418 		list_del(&insn->list);
2419 		hash_del(&insn->hash);
2420 		free(insn);
2421 	}
2422 	elf_close(file->elf);
2423 }
2424 
2425 static struct objtool_file file;
2426 
2427 int check(const char *_objname, bool orc)
2428 {
2429 	int ret, warnings = 0;
2430 
2431 	objname = _objname;
2432 
2433 	file.elf = elf_read(objname, orc ? O_RDWR : O_RDONLY);
2434 	if (!file.elf)
2435 		return 1;
2436 
2437 	INIT_LIST_HEAD(&file.insn_list);
2438 	hash_init(file.insn_hash);
2439 	file.c_file = find_section_by_name(file.elf, ".comment");
2440 	file.ignore_unreachables = no_unreachable;
2441 	file.hints = false;
2442 
2443 	arch_initial_func_cfi_state(&initial_func_cfi);
2444 
2445 	ret = decode_sections(&file);
2446 	if (ret < 0)
2447 		goto out;
2448 	warnings += ret;
2449 
2450 	if (list_empty(&file.insn_list))
2451 		goto out;
2452 
2453 	if (retpoline) {
2454 		ret = validate_retpoline(&file);
2455 		if (ret < 0)
2456 			return ret;
2457 		warnings += ret;
2458 	}
2459 
2460 	ret = validate_functions(&file);
2461 	if (ret < 0)
2462 		goto out;
2463 	warnings += ret;
2464 
2465 	ret = validate_unwind_hints(&file);
2466 	if (ret < 0)
2467 		goto out;
2468 	warnings += ret;
2469 
2470 	if (!warnings) {
2471 		ret = validate_reachable_instructions(&file);
2472 		if (ret < 0)
2473 			goto out;
2474 		warnings += ret;
2475 	}
2476 
2477 	if (orc) {
2478 		ret = create_orc(&file);
2479 		if (ret < 0)
2480 			goto out;
2481 
2482 		ret = create_orc_sections(&file);
2483 		if (ret < 0)
2484 			goto out;
2485 
2486 		ret = elf_write(file.elf);
2487 		if (ret < 0)
2488 			goto out;
2489 	}
2490 
2491 out:
2492 	cleanup(&file);
2493 
2494 	/* ignore warnings for now until we get all the code cleaned up */
2495 	if (ret || warnings)
2496 		return 0;
2497 	return 0;
2498 }
2499