xref: /linux-6.15/tools/objtool/check.c (revision e378fa17)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C) 2015-2017 Josh Poimboeuf <[email protected]>
4  */
5 
6 #include <string.h>
7 #include <stdlib.h>
8 
9 #include "builtin.h"
10 #include "check.h"
11 #include "elf.h"
12 #include "special.h"
13 #include "arch.h"
14 #include "warn.h"
15 
16 #include <linux/hashtable.h>
17 #include <linux/kernel.h>
18 
19 #define FAKE_JUMP_OFFSET -1
20 
21 #define C_JUMP_TABLE_SECTION ".rodata..c_jump_table"
22 
23 struct alternative {
24 	struct list_head list;
25 	struct instruction *insn;
26 	bool skip_orig;
27 };
28 
29 const char *objname;
30 struct cfi_state initial_func_cfi;
31 
32 struct instruction *find_insn(struct objtool_file *file,
33 			      struct section *sec, unsigned long offset)
34 {
35 	struct instruction *insn;
36 
37 	hash_for_each_possible(file->insn_hash, insn, hash, offset)
38 		if (insn->sec == sec && insn->offset == offset)
39 			return insn;
40 
41 	return NULL;
42 }
43 
44 static struct instruction *next_insn_same_sec(struct objtool_file *file,
45 					      struct instruction *insn)
46 {
47 	struct instruction *next = list_next_entry(insn, list);
48 
49 	if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
50 		return NULL;
51 
52 	return next;
53 }
54 
55 static struct instruction *next_insn_same_func(struct objtool_file *file,
56 					       struct instruction *insn)
57 {
58 	struct instruction *next = list_next_entry(insn, list);
59 	struct symbol *func = insn->func;
60 
61 	if (!func)
62 		return NULL;
63 
64 	if (&next->list != &file->insn_list && next->func == func)
65 		return next;
66 
67 	/* Check if we're already in the subfunction: */
68 	if (func == func->cfunc)
69 		return NULL;
70 
71 	/* Move to the subfunction: */
72 	return find_insn(file, func->cfunc->sec, func->cfunc->offset);
73 }
74 
75 #define func_for_each_insn(file, func, insn)				\
76 	for (insn = find_insn(file, func->sec, func->offset);		\
77 	     insn;							\
78 	     insn = next_insn_same_func(file, insn))
79 
80 #define sym_for_each_insn(file, sym, insn)				\
81 	for (insn = find_insn(file, sym->sec, sym->offset);		\
82 	     insn && &insn->list != &file->insn_list &&			\
83 		insn->sec == sym->sec &&				\
84 		insn->offset < sym->offset + sym->len;			\
85 	     insn = list_next_entry(insn, list))
86 
87 #define sym_for_each_insn_continue_reverse(file, sym, insn)		\
88 	for (insn = list_prev_entry(insn, list);			\
89 	     &insn->list != &file->insn_list &&				\
90 		insn->sec == sym->sec && insn->offset >= sym->offset;	\
91 	     insn = list_prev_entry(insn, list))
92 
93 #define sec_for_each_insn_from(file, insn)				\
94 	for (; insn; insn = next_insn_same_sec(file, insn))
95 
96 #define sec_for_each_insn_continue(file, insn)				\
97 	for (insn = next_insn_same_sec(file, insn); insn;		\
98 	     insn = next_insn_same_sec(file, insn))
99 
100 static bool is_static_jump(struct instruction *insn)
101 {
102 	return insn->type == INSN_JUMP_CONDITIONAL ||
103 	       insn->type == INSN_JUMP_UNCONDITIONAL;
104 }
105 
106 static bool is_sibling_call(struct instruction *insn)
107 {
108 	/* An indirect jump is either a sibling call or a jump to a table. */
109 	if (insn->type == INSN_JUMP_DYNAMIC)
110 		return list_empty(&insn->alts);
111 
112 	if (!is_static_jump(insn))
113 		return false;
114 
115 	/* add_jump_destinations() sets insn->call_dest for sibling calls. */
116 	return !!insn->call_dest;
117 }
118 
119 /*
120  * This checks to see if the given function is a "noreturn" function.
121  *
122  * For global functions which are outside the scope of this object file, we
123  * have to keep a manual list of them.
124  *
125  * For local functions, we have to detect them manually by simply looking for
126  * the lack of a return instruction.
127  */
128 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
129 				int recursion)
130 {
131 	int i;
132 	struct instruction *insn;
133 	bool empty = true;
134 
135 	/*
136 	 * Unfortunately these have to be hard coded because the noreturn
137 	 * attribute isn't provided in ELF data.
138 	 */
139 	static const char * const global_noreturns[] = {
140 		"__stack_chk_fail",
141 		"panic",
142 		"do_exit",
143 		"do_task_dead",
144 		"__module_put_and_exit",
145 		"complete_and_exit",
146 		"__reiserfs_panic",
147 		"lbug_with_loc",
148 		"fortify_panic",
149 		"usercopy_abort",
150 		"machine_real_restart",
151 		"rewind_stack_do_exit",
152 		"kunit_try_catch_throw",
153 	};
154 
155 	if (!func)
156 		return false;
157 
158 	if (func->bind == STB_WEAK)
159 		return false;
160 
161 	if (func->bind == STB_GLOBAL)
162 		for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
163 			if (!strcmp(func->name, global_noreturns[i]))
164 				return true;
165 
166 	if (!func->len)
167 		return false;
168 
169 	insn = find_insn(file, func->sec, func->offset);
170 	if (!insn->func)
171 		return false;
172 
173 	func_for_each_insn(file, func, insn) {
174 		empty = false;
175 
176 		if (insn->type == INSN_RETURN)
177 			return false;
178 	}
179 
180 	if (empty)
181 		return false;
182 
183 	/*
184 	 * A function can have a sibling call instead of a return.  In that
185 	 * case, the function's dead-end status depends on whether the target
186 	 * of the sibling call returns.
187 	 */
188 	func_for_each_insn(file, func, insn) {
189 		if (is_sibling_call(insn)) {
190 			struct instruction *dest = insn->jump_dest;
191 
192 			if (!dest)
193 				/* sibling call to another file */
194 				return false;
195 
196 			/* local sibling call */
197 			if (recursion == 5) {
198 				/*
199 				 * Infinite recursion: two functions have
200 				 * sibling calls to each other.  This is a very
201 				 * rare case.  It means they aren't dead ends.
202 				 */
203 				return false;
204 			}
205 
206 			return __dead_end_function(file, dest->func, recursion+1);
207 		}
208 	}
209 
210 	return true;
211 }
212 
213 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
214 {
215 	return __dead_end_function(file, func, 0);
216 }
217 
218 static void clear_insn_state(struct insn_state *state)
219 {
220 	int i;
221 
222 	memset(state, 0, sizeof(*state));
223 	state->cfa.base = CFI_UNDEFINED;
224 	for (i = 0; i < CFI_NUM_REGS; i++) {
225 		state->regs[i].base = CFI_UNDEFINED;
226 		state->vals[i].base = CFI_UNDEFINED;
227 	}
228 	state->drap_reg = CFI_UNDEFINED;
229 	state->drap_offset = -1;
230 }
231 
232 /*
233  * Call the arch-specific instruction decoder for all the instructions and add
234  * them to the global instruction list.
235  */
236 static int decode_instructions(struct objtool_file *file)
237 {
238 	struct section *sec;
239 	struct symbol *func;
240 	unsigned long offset;
241 	struct instruction *insn;
242 	unsigned long nr_insns = 0;
243 	int ret;
244 
245 	for_each_sec(file, sec) {
246 
247 		if (!(sec->sh.sh_flags & SHF_EXECINSTR))
248 			continue;
249 
250 		if (strcmp(sec->name, ".altinstr_replacement") &&
251 		    strcmp(sec->name, ".altinstr_aux") &&
252 		    strncmp(sec->name, ".discard.", 9))
253 			sec->text = true;
254 
255 		for (offset = 0; offset < sec->len; offset += insn->len) {
256 			insn = malloc(sizeof(*insn));
257 			if (!insn) {
258 				WARN("malloc failed");
259 				return -1;
260 			}
261 			memset(insn, 0, sizeof(*insn));
262 			INIT_LIST_HEAD(&insn->alts);
263 			clear_insn_state(&insn->state);
264 
265 			insn->sec = sec;
266 			insn->offset = offset;
267 
268 			ret = arch_decode_instruction(file->elf, sec, offset,
269 						      sec->len - offset,
270 						      &insn->len, &insn->type,
271 						      &insn->immediate,
272 						      &insn->stack_op);
273 			if (ret)
274 				goto err;
275 
276 			hash_add(file->insn_hash, &insn->hash, insn->offset);
277 			list_add_tail(&insn->list, &file->insn_list);
278 			nr_insns++;
279 		}
280 
281 		list_for_each_entry(func, &sec->symbol_list, list) {
282 			if (func->type != STT_FUNC || func->alias != func)
283 				continue;
284 
285 			if (!find_insn(file, sec, func->offset)) {
286 				WARN("%s(): can't find starting instruction",
287 				     func->name);
288 				return -1;
289 			}
290 
291 			sym_for_each_insn(file, func, insn)
292 				insn->func = func;
293 		}
294 	}
295 
296 	if (stats)
297 		printf("nr_insns: %lu\n", nr_insns);
298 
299 	return 0;
300 
301 err:
302 	free(insn);
303 	return ret;
304 }
305 
306 /*
307  * Mark "ud2" instructions and manually annotated dead ends.
308  */
309 static int add_dead_ends(struct objtool_file *file)
310 {
311 	struct section *sec;
312 	struct rela *rela;
313 	struct instruction *insn;
314 	bool found;
315 
316 	/*
317 	 * By default, "ud2" is a dead end unless otherwise annotated, because
318 	 * GCC 7 inserts it for certain divide-by-zero cases.
319 	 */
320 	for_each_insn(file, insn)
321 		if (insn->type == INSN_BUG)
322 			insn->dead_end = true;
323 
324 	/*
325 	 * Check for manually annotated dead ends.
326 	 */
327 	sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
328 	if (!sec)
329 		goto reachable;
330 
331 	list_for_each_entry(rela, &sec->rela_list, list) {
332 		if (rela->sym->type != STT_SECTION) {
333 			WARN("unexpected relocation symbol type in %s", sec->name);
334 			return -1;
335 		}
336 		insn = find_insn(file, rela->sym->sec, rela->addend);
337 		if (insn)
338 			insn = list_prev_entry(insn, list);
339 		else if (rela->addend == rela->sym->sec->len) {
340 			found = false;
341 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
342 				if (insn->sec == rela->sym->sec) {
343 					found = true;
344 					break;
345 				}
346 			}
347 
348 			if (!found) {
349 				WARN("can't find unreachable insn at %s+0x%x",
350 				     rela->sym->sec->name, rela->addend);
351 				return -1;
352 			}
353 		} else {
354 			WARN("can't find unreachable insn at %s+0x%x",
355 			     rela->sym->sec->name, rela->addend);
356 			return -1;
357 		}
358 
359 		insn->dead_end = true;
360 	}
361 
362 reachable:
363 	/*
364 	 * These manually annotated reachable checks are needed for GCC 4.4,
365 	 * where the Linux unreachable() macro isn't supported.  In that case
366 	 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
367 	 * not a dead end.
368 	 */
369 	sec = find_section_by_name(file->elf, ".rela.discard.reachable");
370 	if (!sec)
371 		return 0;
372 
373 	list_for_each_entry(rela, &sec->rela_list, list) {
374 		if (rela->sym->type != STT_SECTION) {
375 			WARN("unexpected relocation symbol type in %s", sec->name);
376 			return -1;
377 		}
378 		insn = find_insn(file, rela->sym->sec, rela->addend);
379 		if (insn)
380 			insn = list_prev_entry(insn, list);
381 		else if (rela->addend == rela->sym->sec->len) {
382 			found = false;
383 			list_for_each_entry_reverse(insn, &file->insn_list, list) {
384 				if (insn->sec == rela->sym->sec) {
385 					found = true;
386 					break;
387 				}
388 			}
389 
390 			if (!found) {
391 				WARN("can't find reachable insn at %s+0x%x",
392 				     rela->sym->sec->name, rela->addend);
393 				return -1;
394 			}
395 		} else {
396 			WARN("can't find reachable insn at %s+0x%x",
397 			     rela->sym->sec->name, rela->addend);
398 			return -1;
399 		}
400 
401 		insn->dead_end = false;
402 	}
403 
404 	return 0;
405 }
406 
407 /*
408  * Warnings shouldn't be reported for ignored functions.
409  */
410 static void add_ignores(struct objtool_file *file)
411 {
412 	struct instruction *insn;
413 	struct section *sec;
414 	struct symbol *func;
415 	struct rela *rela;
416 
417 	sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
418 	if (!sec)
419 		return;
420 
421 	list_for_each_entry(rela, &sec->rela_list, list) {
422 		switch (rela->sym->type) {
423 		case STT_FUNC:
424 			func = rela->sym;
425 			break;
426 
427 		case STT_SECTION:
428 			func = find_func_by_offset(rela->sym->sec, rela->addend);
429 			if (!func)
430 				continue;
431 			break;
432 
433 		default:
434 			WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type);
435 			continue;
436 		}
437 
438 		func_for_each_insn(file, func, insn)
439 			insn->ignore = true;
440 	}
441 }
442 
443 /*
444  * This is a whitelist of functions that is allowed to be called with AC set.
445  * The list is meant to be minimal and only contains compiler instrumentation
446  * ABI and a few functions used to implement *_{to,from}_user() functions.
447  *
448  * These functions must not directly change AC, but may PUSHF/POPF.
449  */
450 static const char *uaccess_safe_builtin[] = {
451 	/* KASAN */
452 	"kasan_report",
453 	"check_memory_region",
454 	/* KASAN out-of-line */
455 	"__asan_loadN_noabort",
456 	"__asan_load1_noabort",
457 	"__asan_load2_noabort",
458 	"__asan_load4_noabort",
459 	"__asan_load8_noabort",
460 	"__asan_load16_noabort",
461 	"__asan_storeN_noabort",
462 	"__asan_store1_noabort",
463 	"__asan_store2_noabort",
464 	"__asan_store4_noabort",
465 	"__asan_store8_noabort",
466 	"__asan_store16_noabort",
467 	/* KASAN in-line */
468 	"__asan_report_load_n_noabort",
469 	"__asan_report_load1_noabort",
470 	"__asan_report_load2_noabort",
471 	"__asan_report_load4_noabort",
472 	"__asan_report_load8_noabort",
473 	"__asan_report_load16_noabort",
474 	"__asan_report_store_n_noabort",
475 	"__asan_report_store1_noabort",
476 	"__asan_report_store2_noabort",
477 	"__asan_report_store4_noabort",
478 	"__asan_report_store8_noabort",
479 	"__asan_report_store16_noabort",
480 	/* KCOV */
481 	"write_comp_data",
482 	"__sanitizer_cov_trace_pc",
483 	"__sanitizer_cov_trace_const_cmp1",
484 	"__sanitizer_cov_trace_const_cmp2",
485 	"__sanitizer_cov_trace_const_cmp4",
486 	"__sanitizer_cov_trace_const_cmp8",
487 	"__sanitizer_cov_trace_cmp1",
488 	"__sanitizer_cov_trace_cmp2",
489 	"__sanitizer_cov_trace_cmp4",
490 	"__sanitizer_cov_trace_cmp8",
491 	"__sanitizer_cov_trace_switch",
492 	/* UBSAN */
493 	"ubsan_type_mismatch_common",
494 	"__ubsan_handle_type_mismatch",
495 	"__ubsan_handle_type_mismatch_v1",
496 	"__ubsan_handle_shift_out_of_bounds",
497 	/* misc */
498 	"csum_partial_copy_generic",
499 	"__memcpy_mcsafe",
500 	"mcsafe_handle_tail",
501 	"ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
502 	NULL
503 };
504 
505 static void add_uaccess_safe(struct objtool_file *file)
506 {
507 	struct symbol *func;
508 	const char **name;
509 
510 	if (!uaccess)
511 		return;
512 
513 	for (name = uaccess_safe_builtin; *name; name++) {
514 		func = find_symbol_by_name(file->elf, *name);
515 		if (!func)
516 			continue;
517 
518 		func->uaccess_safe = true;
519 	}
520 }
521 
522 /*
523  * FIXME: For now, just ignore any alternatives which add retpolines.  This is
524  * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
525  * But it at least allows objtool to understand the control flow *around* the
526  * retpoline.
527  */
528 static int add_ignore_alternatives(struct objtool_file *file)
529 {
530 	struct section *sec;
531 	struct rela *rela;
532 	struct instruction *insn;
533 
534 	sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
535 	if (!sec)
536 		return 0;
537 
538 	list_for_each_entry(rela, &sec->rela_list, list) {
539 		if (rela->sym->type != STT_SECTION) {
540 			WARN("unexpected relocation symbol type in %s", sec->name);
541 			return -1;
542 		}
543 
544 		insn = find_insn(file, rela->sym->sec, rela->addend);
545 		if (!insn) {
546 			WARN("bad .discard.ignore_alts entry");
547 			return -1;
548 		}
549 
550 		insn->ignore_alts = true;
551 	}
552 
553 	return 0;
554 }
555 
556 /*
557  * Find the destination instructions for all jumps.
558  */
559 static int add_jump_destinations(struct objtool_file *file)
560 {
561 	struct instruction *insn;
562 	struct rela *rela;
563 	struct section *dest_sec;
564 	unsigned long dest_off;
565 
566 	for_each_insn(file, insn) {
567 		if (!is_static_jump(insn))
568 			continue;
569 
570 		if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET)
571 			continue;
572 
573 		rela = find_rela_by_dest_range(file->elf, insn->sec,
574 					       insn->offset, insn->len);
575 		if (!rela) {
576 			dest_sec = insn->sec;
577 			dest_off = arch_jump_destination(insn);
578 		} else if (rela->sym->type == STT_SECTION) {
579 			dest_sec = rela->sym->sec;
580 			dest_off = arch_dest_rela_offset(rela->addend);
581 		} else if (rela->sym->sec->idx) {
582 			dest_sec = rela->sym->sec;
583 			dest_off = rela->sym->sym.st_value +
584 				   arch_dest_rela_offset(rela->addend);
585 		} else if (strstr(rela->sym->name, "_indirect_thunk_")) {
586 			/*
587 			 * Retpoline jumps are really dynamic jumps in
588 			 * disguise, so convert them accordingly.
589 			 */
590 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
591 				insn->type = INSN_JUMP_DYNAMIC;
592 			else
593 				insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
594 
595 			insn->retpoline_safe = true;
596 			continue;
597 		} else {
598 			/* external sibling call */
599 			insn->call_dest = rela->sym;
600 			continue;
601 		}
602 
603 		insn->jump_dest = find_insn(file, dest_sec, dest_off);
604 		if (!insn->jump_dest) {
605 
606 			/*
607 			 * This is a special case where an alt instruction
608 			 * jumps past the end of the section.  These are
609 			 * handled later in handle_group_alt().
610 			 */
611 			if (!strcmp(insn->sec->name, ".altinstr_replacement"))
612 				continue;
613 
614 			WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
615 				  insn->sec, insn->offset, dest_sec->name,
616 				  dest_off);
617 			return -1;
618 		}
619 
620 		/*
621 		 * Cross-function jump.
622 		 */
623 		if (insn->func && insn->jump_dest->func &&
624 		    insn->func != insn->jump_dest->func) {
625 
626 			/*
627 			 * For GCC 8+, create parent/child links for any cold
628 			 * subfunctions.  This is _mostly_ redundant with a
629 			 * similar initialization in read_symbols().
630 			 *
631 			 * If a function has aliases, we want the *first* such
632 			 * function in the symbol table to be the subfunction's
633 			 * parent.  In that case we overwrite the
634 			 * initialization done in read_symbols().
635 			 *
636 			 * However this code can't completely replace the
637 			 * read_symbols() code because this doesn't detect the
638 			 * case where the parent function's only reference to a
639 			 * subfunction is through a jump table.
640 			 */
641 			if (!strstr(insn->func->name, ".cold.") &&
642 			    strstr(insn->jump_dest->func->name, ".cold.")) {
643 				insn->func->cfunc = insn->jump_dest->func;
644 				insn->jump_dest->func->pfunc = insn->func;
645 
646 			} else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
647 				   insn->jump_dest->offset == insn->jump_dest->func->offset) {
648 
649 				/* internal sibling call */
650 				insn->call_dest = insn->jump_dest->func;
651 			}
652 		}
653 	}
654 
655 	return 0;
656 }
657 
658 /*
659  * Find the destination instructions for all calls.
660  */
661 static int add_call_destinations(struct objtool_file *file)
662 {
663 	struct instruction *insn;
664 	unsigned long dest_off;
665 	struct rela *rela;
666 
667 	for_each_insn(file, insn) {
668 		if (insn->type != INSN_CALL)
669 			continue;
670 
671 		rela = find_rela_by_dest_range(file->elf, insn->sec,
672 					       insn->offset, insn->len);
673 		if (!rela) {
674 			dest_off = arch_jump_destination(insn);
675 			insn->call_dest = find_func_by_offset(insn->sec, dest_off);
676 			if (!insn->call_dest)
677 				insn->call_dest = find_symbol_by_offset(insn->sec, dest_off);
678 
679 			if (insn->ignore)
680 				continue;
681 
682 			if (!insn->call_dest) {
683 				WARN_FUNC("unsupported intra-function call",
684 					  insn->sec, insn->offset);
685 				if (retpoline)
686 					WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE.");
687 				return -1;
688 			}
689 
690 			if (insn->func && insn->call_dest->type != STT_FUNC) {
691 				WARN_FUNC("unsupported call to non-function",
692 					  insn->sec, insn->offset);
693 				return -1;
694 			}
695 
696 		} else if (rela->sym->type == STT_SECTION) {
697 			dest_off = arch_dest_rela_offset(rela->addend);
698 			insn->call_dest = find_func_by_offset(rela->sym->sec,
699 							      dest_off);
700 			if (!insn->call_dest) {
701 				WARN_FUNC("can't find call dest symbol at %s+0x%lx",
702 					  insn->sec, insn->offset,
703 					  rela->sym->sec->name,
704 					  dest_off);
705 				return -1;
706 			}
707 		} else
708 			insn->call_dest = rela->sym;
709 	}
710 
711 	return 0;
712 }
713 
714 /*
715  * The .alternatives section requires some extra special care, over and above
716  * what other special sections require:
717  *
718  * 1. Because alternatives are patched in-place, we need to insert a fake jump
719  *    instruction at the end so that validate_branch() skips all the original
720  *    replaced instructions when validating the new instruction path.
721  *
722  * 2. An added wrinkle is that the new instruction length might be zero.  In
723  *    that case the old instructions are replaced with noops.  We simulate that
724  *    by creating a fake jump as the only new instruction.
725  *
726  * 3. In some cases, the alternative section includes an instruction which
727  *    conditionally jumps to the _end_ of the entry.  We have to modify these
728  *    jumps' destinations to point back to .text rather than the end of the
729  *    entry in .altinstr_replacement.
730  */
731 static int handle_group_alt(struct objtool_file *file,
732 			    struct special_alt *special_alt,
733 			    struct instruction *orig_insn,
734 			    struct instruction **new_insn)
735 {
736 	struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
737 	unsigned long dest_off;
738 
739 	last_orig_insn = NULL;
740 	insn = orig_insn;
741 	sec_for_each_insn_from(file, insn) {
742 		if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
743 			break;
744 
745 		insn->alt_group = true;
746 		last_orig_insn = insn;
747 	}
748 
749 	if (next_insn_same_sec(file, last_orig_insn)) {
750 		fake_jump = malloc(sizeof(*fake_jump));
751 		if (!fake_jump) {
752 			WARN("malloc failed");
753 			return -1;
754 		}
755 		memset(fake_jump, 0, sizeof(*fake_jump));
756 		INIT_LIST_HEAD(&fake_jump->alts);
757 		clear_insn_state(&fake_jump->state);
758 
759 		fake_jump->sec = special_alt->new_sec;
760 		fake_jump->offset = FAKE_JUMP_OFFSET;
761 		fake_jump->type = INSN_JUMP_UNCONDITIONAL;
762 		fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
763 		fake_jump->func = orig_insn->func;
764 	}
765 
766 	if (!special_alt->new_len) {
767 		if (!fake_jump) {
768 			WARN("%s: empty alternative at end of section",
769 			     special_alt->orig_sec->name);
770 			return -1;
771 		}
772 
773 		*new_insn = fake_jump;
774 		return 0;
775 	}
776 
777 	last_new_insn = NULL;
778 	insn = *new_insn;
779 	sec_for_each_insn_from(file, insn) {
780 		if (insn->offset >= special_alt->new_off + special_alt->new_len)
781 			break;
782 
783 		last_new_insn = insn;
784 
785 		insn->ignore = orig_insn->ignore_alts;
786 		insn->func = orig_insn->func;
787 
788 		/*
789 		 * Since alternative replacement code is copy/pasted by the
790 		 * kernel after applying relocations, generally such code can't
791 		 * have relative-address relocation references to outside the
792 		 * .altinstr_replacement section, unless the arch's
793 		 * alternatives code can adjust the relative offsets
794 		 * accordingly.
795 		 *
796 		 * The x86 alternatives code adjusts the offsets only when it
797 		 * encounters a branch instruction at the very beginning of the
798 		 * replacement group.
799 		 */
800 		if ((insn->offset != special_alt->new_off ||
801 		    (insn->type != INSN_CALL && !is_static_jump(insn))) &&
802 		    find_rela_by_dest_range(file->elf, insn->sec, insn->offset, insn->len)) {
803 
804 			WARN_FUNC("unsupported relocation in alternatives section",
805 				  insn->sec, insn->offset);
806 			return -1;
807 		}
808 
809 		if (!is_static_jump(insn))
810 			continue;
811 
812 		if (!insn->immediate)
813 			continue;
814 
815 		dest_off = arch_jump_destination(insn);
816 		if (dest_off == special_alt->new_off + special_alt->new_len) {
817 			if (!fake_jump) {
818 				WARN("%s: alternative jump to end of section",
819 				     special_alt->orig_sec->name);
820 				return -1;
821 			}
822 			insn->jump_dest = fake_jump;
823 		}
824 
825 		if (!insn->jump_dest) {
826 			WARN_FUNC("can't find alternative jump destination",
827 				  insn->sec, insn->offset);
828 			return -1;
829 		}
830 	}
831 
832 	if (!last_new_insn) {
833 		WARN_FUNC("can't find last new alternative instruction",
834 			  special_alt->new_sec, special_alt->new_off);
835 		return -1;
836 	}
837 
838 	if (fake_jump)
839 		list_add(&fake_jump->list, &last_new_insn->list);
840 
841 	return 0;
842 }
843 
844 /*
845  * A jump table entry can either convert a nop to a jump or a jump to a nop.
846  * If the original instruction is a jump, make the alt entry an effective nop
847  * by just skipping the original instruction.
848  */
849 static int handle_jump_alt(struct objtool_file *file,
850 			   struct special_alt *special_alt,
851 			   struct instruction *orig_insn,
852 			   struct instruction **new_insn)
853 {
854 	if (orig_insn->type == INSN_NOP)
855 		return 0;
856 
857 	if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
858 		WARN_FUNC("unsupported instruction at jump label",
859 			  orig_insn->sec, orig_insn->offset);
860 		return -1;
861 	}
862 
863 	*new_insn = list_next_entry(orig_insn, list);
864 	return 0;
865 }
866 
867 /*
868  * Read all the special sections which have alternate instructions which can be
869  * patched in or redirected to at runtime.  Each instruction having alternate
870  * instruction(s) has them added to its insn->alts list, which will be
871  * traversed in validate_branch().
872  */
873 static int add_special_section_alts(struct objtool_file *file)
874 {
875 	struct list_head special_alts;
876 	struct instruction *orig_insn, *new_insn;
877 	struct special_alt *special_alt, *tmp;
878 	struct alternative *alt;
879 	int ret;
880 
881 	ret = special_get_alts(file->elf, &special_alts);
882 	if (ret)
883 		return ret;
884 
885 	list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
886 
887 		orig_insn = find_insn(file, special_alt->orig_sec,
888 				      special_alt->orig_off);
889 		if (!orig_insn) {
890 			WARN_FUNC("special: can't find orig instruction",
891 				  special_alt->orig_sec, special_alt->orig_off);
892 			ret = -1;
893 			goto out;
894 		}
895 
896 		new_insn = NULL;
897 		if (!special_alt->group || special_alt->new_len) {
898 			new_insn = find_insn(file, special_alt->new_sec,
899 					     special_alt->new_off);
900 			if (!new_insn) {
901 				WARN_FUNC("special: can't find new instruction",
902 					  special_alt->new_sec,
903 					  special_alt->new_off);
904 				ret = -1;
905 				goto out;
906 			}
907 		}
908 
909 		if (special_alt->group) {
910 			if (!special_alt->orig_len) {
911 				WARN_FUNC("empty alternative entry",
912 					  orig_insn->sec, orig_insn->offset);
913 				continue;
914 			}
915 
916 			ret = handle_group_alt(file, special_alt, orig_insn,
917 					       &new_insn);
918 			if (ret)
919 				goto out;
920 		} else if (special_alt->jump_or_nop) {
921 			ret = handle_jump_alt(file, special_alt, orig_insn,
922 					      &new_insn);
923 			if (ret)
924 				goto out;
925 		}
926 
927 		alt = malloc(sizeof(*alt));
928 		if (!alt) {
929 			WARN("malloc failed");
930 			ret = -1;
931 			goto out;
932 		}
933 
934 		alt->insn = new_insn;
935 		alt->skip_orig = special_alt->skip_orig;
936 		orig_insn->ignore_alts |= special_alt->skip_alt;
937 		list_add_tail(&alt->list, &orig_insn->alts);
938 
939 		list_del(&special_alt->list);
940 		free(special_alt);
941 	}
942 
943 out:
944 	return ret;
945 }
946 
947 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
948 			    struct rela *table)
949 {
950 	struct rela *rela = table;
951 	struct instruction *dest_insn;
952 	struct alternative *alt;
953 	struct symbol *pfunc = insn->func->pfunc;
954 	unsigned int prev_offset = 0;
955 
956 	/*
957 	 * Each @rela is a switch table relocation which points to the target
958 	 * instruction.
959 	 */
960 	list_for_each_entry_from(rela, &table->sec->rela_list, list) {
961 
962 		/* Check for the end of the table: */
963 		if (rela != table && rela->jump_table_start)
964 			break;
965 
966 		/* Make sure the table entries are consecutive: */
967 		if (prev_offset && rela->offset != prev_offset + 8)
968 			break;
969 
970 		/* Detect function pointers from contiguous objects: */
971 		if (rela->sym->sec == pfunc->sec &&
972 		    rela->addend == pfunc->offset)
973 			break;
974 
975 		dest_insn = find_insn(file, rela->sym->sec, rela->addend);
976 		if (!dest_insn)
977 			break;
978 
979 		/* Make sure the destination is in the same function: */
980 		if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
981 			break;
982 
983 		alt = malloc(sizeof(*alt));
984 		if (!alt) {
985 			WARN("malloc failed");
986 			return -1;
987 		}
988 
989 		alt->insn = dest_insn;
990 		list_add_tail(&alt->list, &insn->alts);
991 		prev_offset = rela->offset;
992 	}
993 
994 	if (!prev_offset) {
995 		WARN_FUNC("can't find switch jump table",
996 			  insn->sec, insn->offset);
997 		return -1;
998 	}
999 
1000 	return 0;
1001 }
1002 
1003 /*
1004  * find_jump_table() - Given a dynamic jump, find the switch jump table in
1005  * .rodata associated with it.
1006  *
1007  * There are 3 basic patterns:
1008  *
1009  * 1. jmpq *[rodata addr](,%reg,8)
1010  *
1011  *    This is the most common case by far.  It jumps to an address in a simple
1012  *    jump table which is stored in .rodata.
1013  *
1014  * 2. jmpq *[rodata addr](%rip)
1015  *
1016  *    This is caused by a rare GCC quirk, currently only seen in three driver
1017  *    functions in the kernel, only with certain obscure non-distro configs.
1018  *
1019  *    As part of an optimization, GCC makes a copy of an existing switch jump
1020  *    table, modifies it, and then hard-codes the jump (albeit with an indirect
1021  *    jump) to use a single entry in the table.  The rest of the jump table and
1022  *    some of its jump targets remain as dead code.
1023  *
1024  *    In such a case we can just crudely ignore all unreachable instruction
1025  *    warnings for the entire object file.  Ideally we would just ignore them
1026  *    for the function, but that would require redesigning the code quite a
1027  *    bit.  And honestly that's just not worth doing: unreachable instruction
1028  *    warnings are of questionable value anyway, and this is such a rare issue.
1029  *
1030  * 3. mov [rodata addr],%reg1
1031  *    ... some instructions ...
1032  *    jmpq *(%reg1,%reg2,8)
1033  *
1034  *    This is a fairly uncommon pattern which is new for GCC 6.  As of this
1035  *    writing, there are 11 occurrences of it in the allmodconfig kernel.
1036  *
1037  *    As of GCC 7 there are quite a few more of these and the 'in between' code
1038  *    is significant. Esp. with KASAN enabled some of the code between the mov
1039  *    and jmpq uses .rodata itself, which can confuse things.
1040  *
1041  *    TODO: Once we have DWARF CFI and smarter instruction decoding logic,
1042  *    ensure the same register is used in the mov and jump instructions.
1043  *
1044  *    NOTE: RETPOLINE made it harder still to decode dynamic jumps.
1045  */
1046 static struct rela *find_jump_table(struct objtool_file *file,
1047 				      struct symbol *func,
1048 				      struct instruction *insn)
1049 {
1050 	struct rela *text_rela, *table_rela;
1051 	struct instruction *dest_insn, *orig_insn = insn;
1052 	struct section *table_sec;
1053 	unsigned long table_offset;
1054 
1055 	/*
1056 	 * Backward search using the @first_jump_src links, these help avoid
1057 	 * much of the 'in between' code. Which avoids us getting confused by
1058 	 * it.
1059 	 */
1060 	for (;
1061 	     &insn->list != &file->insn_list && insn->func && insn->func->pfunc == func;
1062 	     insn = insn->first_jump_src ?: list_prev_entry(insn, list)) {
1063 
1064 		if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1065 			break;
1066 
1067 		/* allow small jumps within the range */
1068 		if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1069 		    insn->jump_dest &&
1070 		    (insn->jump_dest->offset <= insn->offset ||
1071 		     insn->jump_dest->offset > orig_insn->offset))
1072 		    break;
1073 
1074 		/* look for a relocation which references .rodata */
1075 		text_rela = find_rela_by_dest_range(file->elf, insn->sec,
1076 						    insn->offset, insn->len);
1077 		if (!text_rela || text_rela->sym->type != STT_SECTION ||
1078 		    !text_rela->sym->sec->rodata)
1079 			continue;
1080 
1081 		table_offset = text_rela->addend;
1082 		table_sec = text_rela->sym->sec;
1083 
1084 		if (text_rela->type == R_X86_64_PC32)
1085 			table_offset += 4;
1086 
1087 		/*
1088 		 * Make sure the .rodata address isn't associated with a
1089 		 * symbol.  GCC jump tables are anonymous data.
1090 		 *
1091 		 * Also support C jump tables which are in the same format as
1092 		 * switch jump tables.  For objtool to recognize them, they
1093 		 * need to be placed in the C_JUMP_TABLE_SECTION section.  They
1094 		 * have symbols associated with them.
1095 		 */
1096 		if (find_symbol_containing(table_sec, table_offset) &&
1097 		    strcmp(table_sec->name, C_JUMP_TABLE_SECTION))
1098 			continue;
1099 
1100 		/*
1101 		 * Each table entry has a rela associated with it.  The rela
1102 		 * should reference text in the same function as the original
1103 		 * instruction.
1104 		 */
1105 		table_rela = find_rela_by_dest(file->elf, table_sec, table_offset);
1106 		if (!table_rela)
1107 			continue;
1108 		dest_insn = find_insn(file, table_rela->sym->sec, table_rela->addend);
1109 		if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func)
1110 			continue;
1111 
1112 		/*
1113 		 * Use of RIP-relative switch jumps is quite rare, and
1114 		 * indicates a rare GCC quirk/bug which can leave dead code
1115 		 * behind.
1116 		 */
1117 		if (text_rela->type == R_X86_64_PC32)
1118 			file->ignore_unreachables = true;
1119 
1120 		return table_rela;
1121 	}
1122 
1123 	return NULL;
1124 }
1125 
1126 /*
1127  * First pass: Mark the head of each jump table so that in the next pass,
1128  * we know when a given jump table ends and the next one starts.
1129  */
1130 static void mark_func_jump_tables(struct objtool_file *file,
1131 				    struct symbol *func)
1132 {
1133 	struct instruction *insn, *last = NULL;
1134 	struct rela *rela;
1135 
1136 	func_for_each_insn(file, func, insn) {
1137 		if (!last)
1138 			last = insn;
1139 
1140 		/*
1141 		 * Store back-pointers for unconditional forward jumps such
1142 		 * that find_jump_table() can back-track using those and
1143 		 * avoid some potentially confusing code.
1144 		 */
1145 		if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1146 		    insn->offset > last->offset &&
1147 		    insn->jump_dest->offset > insn->offset &&
1148 		    !insn->jump_dest->first_jump_src) {
1149 
1150 			insn->jump_dest->first_jump_src = insn;
1151 			last = insn->jump_dest;
1152 		}
1153 
1154 		if (insn->type != INSN_JUMP_DYNAMIC)
1155 			continue;
1156 
1157 		rela = find_jump_table(file, func, insn);
1158 		if (rela) {
1159 			rela->jump_table_start = true;
1160 			insn->jump_table = rela;
1161 		}
1162 	}
1163 }
1164 
1165 static int add_func_jump_tables(struct objtool_file *file,
1166 				  struct symbol *func)
1167 {
1168 	struct instruction *insn;
1169 	int ret;
1170 
1171 	func_for_each_insn(file, func, insn) {
1172 		if (!insn->jump_table)
1173 			continue;
1174 
1175 		ret = add_jump_table(file, insn, insn->jump_table);
1176 		if (ret)
1177 			return ret;
1178 	}
1179 
1180 	return 0;
1181 }
1182 
1183 /*
1184  * For some switch statements, gcc generates a jump table in the .rodata
1185  * section which contains a list of addresses within the function to jump to.
1186  * This finds these jump tables and adds them to the insn->alts lists.
1187  */
1188 static int add_jump_table_alts(struct objtool_file *file)
1189 {
1190 	struct section *sec;
1191 	struct symbol *func;
1192 	int ret;
1193 
1194 	if (!file->rodata)
1195 		return 0;
1196 
1197 	for_each_sec(file, sec) {
1198 		list_for_each_entry(func, &sec->symbol_list, list) {
1199 			if (func->type != STT_FUNC)
1200 				continue;
1201 
1202 			mark_func_jump_tables(file, func);
1203 			ret = add_func_jump_tables(file, func);
1204 			if (ret)
1205 				return ret;
1206 		}
1207 	}
1208 
1209 	return 0;
1210 }
1211 
1212 static int read_unwind_hints(struct objtool_file *file)
1213 {
1214 	struct section *sec, *relasec;
1215 	struct rela *rela;
1216 	struct unwind_hint *hint;
1217 	struct instruction *insn;
1218 	struct cfi_reg *cfa;
1219 	int i;
1220 
1221 	sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1222 	if (!sec)
1223 		return 0;
1224 
1225 	relasec = sec->rela;
1226 	if (!relasec) {
1227 		WARN("missing .rela.discard.unwind_hints section");
1228 		return -1;
1229 	}
1230 
1231 	if (sec->len % sizeof(struct unwind_hint)) {
1232 		WARN("struct unwind_hint size mismatch");
1233 		return -1;
1234 	}
1235 
1236 	file->hints = true;
1237 
1238 	for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1239 		hint = (struct unwind_hint *)sec->data->d_buf + i;
1240 
1241 		rela = find_rela_by_dest(file->elf, sec, i * sizeof(*hint));
1242 		if (!rela) {
1243 			WARN("can't find rela for unwind_hints[%d]", i);
1244 			return -1;
1245 		}
1246 
1247 		insn = find_insn(file, rela->sym->sec, rela->addend);
1248 		if (!insn) {
1249 			WARN("can't find insn for unwind_hints[%d]", i);
1250 			return -1;
1251 		}
1252 
1253 		cfa = &insn->state.cfa;
1254 
1255 		if (hint->type == UNWIND_HINT_TYPE_SAVE) {
1256 			insn->save = true;
1257 			continue;
1258 
1259 		} else if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
1260 			insn->restore = true;
1261 			insn->hint = true;
1262 			continue;
1263 		}
1264 
1265 		insn->hint = true;
1266 
1267 		switch (hint->sp_reg) {
1268 		case ORC_REG_UNDEFINED:
1269 			cfa->base = CFI_UNDEFINED;
1270 			break;
1271 		case ORC_REG_SP:
1272 			cfa->base = CFI_SP;
1273 			break;
1274 		case ORC_REG_BP:
1275 			cfa->base = CFI_BP;
1276 			break;
1277 		case ORC_REG_SP_INDIRECT:
1278 			cfa->base = CFI_SP_INDIRECT;
1279 			break;
1280 		case ORC_REG_R10:
1281 			cfa->base = CFI_R10;
1282 			break;
1283 		case ORC_REG_R13:
1284 			cfa->base = CFI_R13;
1285 			break;
1286 		case ORC_REG_DI:
1287 			cfa->base = CFI_DI;
1288 			break;
1289 		case ORC_REG_DX:
1290 			cfa->base = CFI_DX;
1291 			break;
1292 		default:
1293 			WARN_FUNC("unsupported unwind_hint sp base reg %d",
1294 				  insn->sec, insn->offset, hint->sp_reg);
1295 			return -1;
1296 		}
1297 
1298 		cfa->offset = hint->sp_offset;
1299 		insn->state.type = hint->type;
1300 		insn->state.end = hint->end;
1301 	}
1302 
1303 	return 0;
1304 }
1305 
1306 static int read_retpoline_hints(struct objtool_file *file)
1307 {
1308 	struct section *sec;
1309 	struct instruction *insn;
1310 	struct rela *rela;
1311 
1312 	sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1313 	if (!sec)
1314 		return 0;
1315 
1316 	list_for_each_entry(rela, &sec->rela_list, list) {
1317 		if (rela->sym->type != STT_SECTION) {
1318 			WARN("unexpected relocation symbol type in %s", sec->name);
1319 			return -1;
1320 		}
1321 
1322 		insn = find_insn(file, rela->sym->sec, rela->addend);
1323 		if (!insn) {
1324 			WARN("bad .discard.retpoline_safe entry");
1325 			return -1;
1326 		}
1327 
1328 		if (insn->type != INSN_JUMP_DYNAMIC &&
1329 		    insn->type != INSN_CALL_DYNAMIC) {
1330 			WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1331 				  insn->sec, insn->offset);
1332 			return -1;
1333 		}
1334 
1335 		insn->retpoline_safe = true;
1336 	}
1337 
1338 	return 0;
1339 }
1340 
1341 static void mark_rodata(struct objtool_file *file)
1342 {
1343 	struct section *sec;
1344 	bool found = false;
1345 
1346 	/*
1347 	 * Search for the following rodata sections, each of which can
1348 	 * potentially contain jump tables:
1349 	 *
1350 	 * - .rodata: can contain GCC switch tables
1351 	 * - .rodata.<func>: same, if -fdata-sections is being used
1352 	 * - .rodata..c_jump_table: contains C annotated jump tables
1353 	 *
1354 	 * .rodata.str1.* sections are ignored; they don't contain jump tables.
1355 	 */
1356 	for_each_sec(file, sec) {
1357 		if ((!strncmp(sec->name, ".rodata", 7) && !strstr(sec->name, ".str1.")) ||
1358 		    !strcmp(sec->name, C_JUMP_TABLE_SECTION)) {
1359 			sec->rodata = true;
1360 			found = true;
1361 		}
1362 	}
1363 
1364 	file->rodata = found;
1365 }
1366 
1367 static int decode_sections(struct objtool_file *file)
1368 {
1369 	int ret;
1370 
1371 	mark_rodata(file);
1372 
1373 	ret = decode_instructions(file);
1374 	if (ret)
1375 		return ret;
1376 
1377 	ret = add_dead_ends(file);
1378 	if (ret)
1379 		return ret;
1380 
1381 	add_ignores(file);
1382 	add_uaccess_safe(file);
1383 
1384 	ret = add_ignore_alternatives(file);
1385 	if (ret)
1386 		return ret;
1387 
1388 	ret = add_jump_destinations(file);
1389 	if (ret)
1390 		return ret;
1391 
1392 	ret = add_special_section_alts(file);
1393 	if (ret)
1394 		return ret;
1395 
1396 	ret = add_call_destinations(file);
1397 	if (ret)
1398 		return ret;
1399 
1400 	ret = add_jump_table_alts(file);
1401 	if (ret)
1402 		return ret;
1403 
1404 	ret = read_unwind_hints(file);
1405 	if (ret)
1406 		return ret;
1407 
1408 	ret = read_retpoline_hints(file);
1409 	if (ret)
1410 		return ret;
1411 
1412 	return 0;
1413 }
1414 
1415 static bool is_fentry_call(struct instruction *insn)
1416 {
1417 	if (insn->type == INSN_CALL &&
1418 	    insn->call_dest->type == STT_NOTYPE &&
1419 	    !strcmp(insn->call_dest->name, "__fentry__"))
1420 		return true;
1421 
1422 	return false;
1423 }
1424 
1425 static bool has_modified_stack_frame(struct insn_state *state)
1426 {
1427 	int i;
1428 
1429 	if (state->cfa.base != initial_func_cfi.cfa.base ||
1430 	    state->cfa.offset != initial_func_cfi.cfa.offset ||
1431 	    state->stack_size != initial_func_cfi.cfa.offset ||
1432 	    state->drap)
1433 		return true;
1434 
1435 	for (i = 0; i < CFI_NUM_REGS; i++)
1436 		if (state->regs[i].base != initial_func_cfi.regs[i].base ||
1437 		    state->regs[i].offset != initial_func_cfi.regs[i].offset)
1438 			return true;
1439 
1440 	return false;
1441 }
1442 
1443 static bool has_valid_stack_frame(struct insn_state *state)
1444 {
1445 	if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA &&
1446 	    state->regs[CFI_BP].offset == -16)
1447 		return true;
1448 
1449 	if (state->drap && state->regs[CFI_BP].base == CFI_BP)
1450 		return true;
1451 
1452 	return false;
1453 }
1454 
1455 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state)
1456 {
1457 	struct cfi_reg *cfa = &state->cfa;
1458 	struct stack_op *op = &insn->stack_op;
1459 
1460 	if (cfa->base != CFI_SP)
1461 		return 0;
1462 
1463 	/* push */
1464 	if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1465 		cfa->offset += 8;
1466 
1467 	/* pop */
1468 	if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1469 		cfa->offset -= 8;
1470 
1471 	/* add immediate to sp */
1472 	if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1473 	    op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1474 		cfa->offset -= op->src.offset;
1475 
1476 	return 0;
1477 }
1478 
1479 static void save_reg(struct insn_state *state, unsigned char reg, int base,
1480 		     int offset)
1481 {
1482 	if (arch_callee_saved_reg(reg) &&
1483 	    state->regs[reg].base == CFI_UNDEFINED) {
1484 		state->regs[reg].base = base;
1485 		state->regs[reg].offset = offset;
1486 	}
1487 }
1488 
1489 static void restore_reg(struct insn_state *state, unsigned char reg)
1490 {
1491 	state->regs[reg].base = initial_func_cfi.regs[reg].base;
1492 	state->regs[reg].offset = initial_func_cfi.regs[reg].offset;
1493 }
1494 
1495 /*
1496  * A note about DRAP stack alignment:
1497  *
1498  * GCC has the concept of a DRAP register, which is used to help keep track of
1499  * the stack pointer when aligning the stack.  r10 or r13 is used as the DRAP
1500  * register.  The typical DRAP pattern is:
1501  *
1502  *   4c 8d 54 24 08		lea    0x8(%rsp),%r10
1503  *   48 83 e4 c0		and    $0xffffffffffffffc0,%rsp
1504  *   41 ff 72 f8		pushq  -0x8(%r10)
1505  *   55				push   %rbp
1506  *   48 89 e5			mov    %rsp,%rbp
1507  *				(more pushes)
1508  *   41 52			push   %r10
1509  *				...
1510  *   41 5a			pop    %r10
1511  *				(more pops)
1512  *   5d				pop    %rbp
1513  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1514  *   c3				retq
1515  *
1516  * There are some variations in the epilogues, like:
1517  *
1518  *   5b				pop    %rbx
1519  *   41 5a			pop    %r10
1520  *   41 5c			pop    %r12
1521  *   41 5d			pop    %r13
1522  *   41 5e			pop    %r14
1523  *   c9				leaveq
1524  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1525  *   c3				retq
1526  *
1527  * and:
1528  *
1529  *   4c 8b 55 e8		mov    -0x18(%rbp),%r10
1530  *   48 8b 5d e0		mov    -0x20(%rbp),%rbx
1531  *   4c 8b 65 f0		mov    -0x10(%rbp),%r12
1532  *   4c 8b 6d f8		mov    -0x8(%rbp),%r13
1533  *   c9				leaveq
1534  *   49 8d 62 f8		lea    -0x8(%r10),%rsp
1535  *   c3				retq
1536  *
1537  * Sometimes r13 is used as the DRAP register, in which case it's saved and
1538  * restored beforehand:
1539  *
1540  *   41 55			push   %r13
1541  *   4c 8d 6c 24 10		lea    0x10(%rsp),%r13
1542  *   48 83 e4 f0		and    $0xfffffffffffffff0,%rsp
1543  *				...
1544  *   49 8d 65 f0		lea    -0x10(%r13),%rsp
1545  *   41 5d			pop    %r13
1546  *   c3				retq
1547  */
1548 static int update_insn_state(struct instruction *insn, struct insn_state *state)
1549 {
1550 	struct stack_op *op = &insn->stack_op;
1551 	struct cfi_reg *cfa = &state->cfa;
1552 	struct cfi_reg *regs = state->regs;
1553 
1554 	/* stack operations don't make sense with an undefined CFA */
1555 	if (cfa->base == CFI_UNDEFINED) {
1556 		if (insn->func) {
1557 			WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1558 			return -1;
1559 		}
1560 		return 0;
1561 	}
1562 
1563 	if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET)
1564 		return update_insn_state_regs(insn, state);
1565 
1566 	switch (op->dest.type) {
1567 
1568 	case OP_DEST_REG:
1569 		switch (op->src.type) {
1570 
1571 		case OP_SRC_REG:
1572 			if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1573 			    cfa->base == CFI_SP &&
1574 			    regs[CFI_BP].base == CFI_CFA &&
1575 			    regs[CFI_BP].offset == -cfa->offset) {
1576 
1577 				/* mov %rsp, %rbp */
1578 				cfa->base = op->dest.reg;
1579 				state->bp_scratch = false;
1580 			}
1581 
1582 			else if (op->src.reg == CFI_SP &&
1583 				 op->dest.reg == CFI_BP && state->drap) {
1584 
1585 				/* drap: mov %rsp, %rbp */
1586 				regs[CFI_BP].base = CFI_BP;
1587 				regs[CFI_BP].offset = -state->stack_size;
1588 				state->bp_scratch = false;
1589 			}
1590 
1591 			else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1592 
1593 				/*
1594 				 * mov %rsp, %reg
1595 				 *
1596 				 * This is needed for the rare case where GCC
1597 				 * does:
1598 				 *
1599 				 *   mov    %rsp, %rax
1600 				 *   ...
1601 				 *   mov    %rax, %rsp
1602 				 */
1603 				state->vals[op->dest.reg].base = CFI_CFA;
1604 				state->vals[op->dest.reg].offset = -state->stack_size;
1605 			}
1606 
1607 			else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
1608 				 cfa->base == CFI_BP) {
1609 
1610 				/*
1611 				 * mov %rbp, %rsp
1612 				 *
1613 				 * Restore the original stack pointer (Clang).
1614 				 */
1615 				state->stack_size = -state->regs[CFI_BP].offset;
1616 			}
1617 
1618 			else if (op->dest.reg == cfa->base) {
1619 
1620 				/* mov %reg, %rsp */
1621 				if (cfa->base == CFI_SP &&
1622 				    state->vals[op->src.reg].base == CFI_CFA) {
1623 
1624 					/*
1625 					 * This is needed for the rare case
1626 					 * where GCC does something dumb like:
1627 					 *
1628 					 *   lea    0x8(%rsp), %rcx
1629 					 *   ...
1630 					 *   mov    %rcx, %rsp
1631 					 */
1632 					cfa->offset = -state->vals[op->src.reg].offset;
1633 					state->stack_size = cfa->offset;
1634 
1635 				} else {
1636 					cfa->base = CFI_UNDEFINED;
1637 					cfa->offset = 0;
1638 				}
1639 			}
1640 
1641 			break;
1642 
1643 		case OP_SRC_ADD:
1644 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
1645 
1646 				/* add imm, %rsp */
1647 				state->stack_size -= op->src.offset;
1648 				if (cfa->base == CFI_SP)
1649 					cfa->offset -= op->src.offset;
1650 				break;
1651 			}
1652 
1653 			if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
1654 
1655 				/* lea disp(%rbp), %rsp */
1656 				state->stack_size = -(op->src.offset + regs[CFI_BP].offset);
1657 				break;
1658 			}
1659 
1660 			if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1661 
1662 				/* drap: lea disp(%rsp), %drap */
1663 				state->drap_reg = op->dest.reg;
1664 
1665 				/*
1666 				 * lea disp(%rsp), %reg
1667 				 *
1668 				 * This is needed for the rare case where GCC
1669 				 * does something dumb like:
1670 				 *
1671 				 *   lea    0x8(%rsp), %rcx
1672 				 *   ...
1673 				 *   mov    %rcx, %rsp
1674 				 */
1675 				state->vals[op->dest.reg].base = CFI_CFA;
1676 				state->vals[op->dest.reg].offset = \
1677 					-state->stack_size + op->src.offset;
1678 
1679 				break;
1680 			}
1681 
1682 			if (state->drap && op->dest.reg == CFI_SP &&
1683 			    op->src.reg == state->drap_reg) {
1684 
1685 				 /* drap: lea disp(%drap), %rsp */
1686 				cfa->base = CFI_SP;
1687 				cfa->offset = state->stack_size = -op->src.offset;
1688 				state->drap_reg = CFI_UNDEFINED;
1689 				state->drap = false;
1690 				break;
1691 			}
1692 
1693 			if (op->dest.reg == state->cfa.base) {
1694 				WARN_FUNC("unsupported stack register modification",
1695 					  insn->sec, insn->offset);
1696 				return -1;
1697 			}
1698 
1699 			break;
1700 
1701 		case OP_SRC_AND:
1702 			if (op->dest.reg != CFI_SP ||
1703 			    (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
1704 			    (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
1705 				WARN_FUNC("unsupported stack pointer realignment",
1706 					  insn->sec, insn->offset);
1707 				return -1;
1708 			}
1709 
1710 			if (state->drap_reg != CFI_UNDEFINED) {
1711 				/* drap: and imm, %rsp */
1712 				cfa->base = state->drap_reg;
1713 				cfa->offset = state->stack_size = 0;
1714 				state->drap = true;
1715 			}
1716 
1717 			/*
1718 			 * Older versions of GCC (4.8ish) realign the stack
1719 			 * without DRAP, with a frame pointer.
1720 			 */
1721 
1722 			break;
1723 
1724 		case OP_SRC_POP:
1725 		case OP_SRC_POPF:
1726 			if (!state->drap && op->dest.reg == cfa->base) {
1727 
1728 				/* pop %rbp */
1729 				cfa->base = CFI_SP;
1730 			}
1731 
1732 			if (state->drap && cfa->base == CFI_BP_INDIRECT &&
1733 			    op->dest.reg == state->drap_reg &&
1734 			    state->drap_offset == -state->stack_size) {
1735 
1736 				/* drap: pop %drap */
1737 				cfa->base = state->drap_reg;
1738 				cfa->offset = 0;
1739 				state->drap_offset = -1;
1740 
1741 			} else if (regs[op->dest.reg].offset == -state->stack_size) {
1742 
1743 				/* pop %reg */
1744 				restore_reg(state, op->dest.reg);
1745 			}
1746 
1747 			state->stack_size -= 8;
1748 			if (cfa->base == CFI_SP)
1749 				cfa->offset -= 8;
1750 
1751 			break;
1752 
1753 		case OP_SRC_REG_INDIRECT:
1754 			if (state->drap && op->src.reg == CFI_BP &&
1755 			    op->src.offset == state->drap_offset) {
1756 
1757 				/* drap: mov disp(%rbp), %drap */
1758 				cfa->base = state->drap_reg;
1759 				cfa->offset = 0;
1760 				state->drap_offset = -1;
1761 			}
1762 
1763 			if (state->drap && op->src.reg == CFI_BP &&
1764 			    op->src.offset == regs[op->dest.reg].offset) {
1765 
1766 				/* drap: mov disp(%rbp), %reg */
1767 				restore_reg(state, op->dest.reg);
1768 
1769 			} else if (op->src.reg == cfa->base &&
1770 			    op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
1771 
1772 				/* mov disp(%rbp), %reg */
1773 				/* mov disp(%rsp), %reg */
1774 				restore_reg(state, op->dest.reg);
1775 			}
1776 
1777 			break;
1778 
1779 		default:
1780 			WARN_FUNC("unknown stack-related instruction",
1781 				  insn->sec, insn->offset);
1782 			return -1;
1783 		}
1784 
1785 		break;
1786 
1787 	case OP_DEST_PUSH:
1788 	case OP_DEST_PUSHF:
1789 		state->stack_size += 8;
1790 		if (cfa->base == CFI_SP)
1791 			cfa->offset += 8;
1792 
1793 		if (op->src.type != OP_SRC_REG)
1794 			break;
1795 
1796 		if (state->drap) {
1797 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1798 
1799 				/* drap: push %drap */
1800 				cfa->base = CFI_BP_INDIRECT;
1801 				cfa->offset = -state->stack_size;
1802 
1803 				/* save drap so we know when to restore it */
1804 				state->drap_offset = -state->stack_size;
1805 
1806 			} else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) {
1807 
1808 				/* drap: push %rbp */
1809 				state->stack_size = 0;
1810 
1811 			} else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1812 
1813 				/* drap: push %reg */
1814 				save_reg(state, op->src.reg, CFI_BP, -state->stack_size);
1815 			}
1816 
1817 		} else {
1818 
1819 			/* push %reg */
1820 			save_reg(state, op->src.reg, CFI_CFA, -state->stack_size);
1821 		}
1822 
1823 		/* detect when asm code uses rbp as a scratch register */
1824 		if (!no_fp && insn->func && op->src.reg == CFI_BP &&
1825 		    cfa->base != CFI_BP)
1826 			state->bp_scratch = true;
1827 		break;
1828 
1829 	case OP_DEST_REG_INDIRECT:
1830 
1831 		if (state->drap) {
1832 			if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1833 
1834 				/* drap: mov %drap, disp(%rbp) */
1835 				cfa->base = CFI_BP_INDIRECT;
1836 				cfa->offset = op->dest.offset;
1837 
1838 				/* save drap offset so we know when to restore it */
1839 				state->drap_offset = op->dest.offset;
1840 			}
1841 
1842 			else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1843 
1844 				/* drap: mov reg, disp(%rbp) */
1845 				save_reg(state, op->src.reg, CFI_BP, op->dest.offset);
1846 			}
1847 
1848 		} else if (op->dest.reg == cfa->base) {
1849 
1850 			/* mov reg, disp(%rbp) */
1851 			/* mov reg, disp(%rsp) */
1852 			save_reg(state, op->src.reg, CFI_CFA,
1853 				 op->dest.offset - state->cfa.offset);
1854 		}
1855 
1856 		break;
1857 
1858 	case OP_DEST_LEAVE:
1859 		if ((!state->drap && cfa->base != CFI_BP) ||
1860 		    (state->drap && cfa->base != state->drap_reg)) {
1861 			WARN_FUNC("leave instruction with modified stack frame",
1862 				  insn->sec, insn->offset);
1863 			return -1;
1864 		}
1865 
1866 		/* leave (mov %rbp, %rsp; pop %rbp) */
1867 
1868 		state->stack_size = -state->regs[CFI_BP].offset - 8;
1869 		restore_reg(state, CFI_BP);
1870 
1871 		if (!state->drap) {
1872 			cfa->base = CFI_SP;
1873 			cfa->offset -= 8;
1874 		}
1875 
1876 		break;
1877 
1878 	case OP_DEST_MEM:
1879 		if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
1880 			WARN_FUNC("unknown stack-related memory operation",
1881 				  insn->sec, insn->offset);
1882 			return -1;
1883 		}
1884 
1885 		/* pop mem */
1886 		state->stack_size -= 8;
1887 		if (cfa->base == CFI_SP)
1888 			cfa->offset -= 8;
1889 
1890 		break;
1891 
1892 	default:
1893 		WARN_FUNC("unknown stack-related instruction",
1894 			  insn->sec, insn->offset);
1895 		return -1;
1896 	}
1897 
1898 	return 0;
1899 }
1900 
1901 static bool insn_state_match(struct instruction *insn, struct insn_state *state)
1902 {
1903 	struct insn_state *state1 = &insn->state, *state2 = state;
1904 	int i;
1905 
1906 	if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) {
1907 		WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
1908 			  insn->sec, insn->offset,
1909 			  state1->cfa.base, state1->cfa.offset,
1910 			  state2->cfa.base, state2->cfa.offset);
1911 
1912 	} else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) {
1913 		for (i = 0; i < CFI_NUM_REGS; i++) {
1914 			if (!memcmp(&state1->regs[i], &state2->regs[i],
1915 				    sizeof(struct cfi_reg)))
1916 				continue;
1917 
1918 			WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
1919 				  insn->sec, insn->offset,
1920 				  i, state1->regs[i].base, state1->regs[i].offset,
1921 				  i, state2->regs[i].base, state2->regs[i].offset);
1922 			break;
1923 		}
1924 
1925 	} else if (state1->type != state2->type) {
1926 		WARN_FUNC("stack state mismatch: type1=%d type2=%d",
1927 			  insn->sec, insn->offset, state1->type, state2->type);
1928 
1929 	} else if (state1->drap != state2->drap ||
1930 		 (state1->drap && state1->drap_reg != state2->drap_reg) ||
1931 		 (state1->drap && state1->drap_offset != state2->drap_offset)) {
1932 		WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
1933 			  insn->sec, insn->offset,
1934 			  state1->drap, state1->drap_reg, state1->drap_offset,
1935 			  state2->drap, state2->drap_reg, state2->drap_offset);
1936 
1937 	} else
1938 		return true;
1939 
1940 	return false;
1941 }
1942 
1943 static inline bool func_uaccess_safe(struct symbol *func)
1944 {
1945 	if (func)
1946 		return func->uaccess_safe;
1947 
1948 	return false;
1949 }
1950 
1951 static inline const char *call_dest_name(struct instruction *insn)
1952 {
1953 	if (insn->call_dest)
1954 		return insn->call_dest->name;
1955 
1956 	return "{dynamic}";
1957 }
1958 
1959 static int validate_call(struct instruction *insn, struct insn_state *state)
1960 {
1961 	if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
1962 		WARN_FUNC("call to %s() with UACCESS enabled",
1963 				insn->sec, insn->offset, call_dest_name(insn));
1964 		return 1;
1965 	}
1966 
1967 	if (state->df) {
1968 		WARN_FUNC("call to %s() with DF set",
1969 				insn->sec, insn->offset, call_dest_name(insn));
1970 		return 1;
1971 	}
1972 
1973 	return 0;
1974 }
1975 
1976 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
1977 {
1978 	if (has_modified_stack_frame(state)) {
1979 		WARN_FUNC("sibling call from callable instruction with modified stack frame",
1980 				insn->sec, insn->offset);
1981 		return 1;
1982 	}
1983 
1984 	return validate_call(insn, state);
1985 }
1986 
1987 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
1988 {
1989 	if (state->uaccess && !func_uaccess_safe(func)) {
1990 		WARN_FUNC("return with UACCESS enabled",
1991 			  insn->sec, insn->offset);
1992 		return 1;
1993 	}
1994 
1995 	if (!state->uaccess && func_uaccess_safe(func)) {
1996 		WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function",
1997 			  insn->sec, insn->offset);
1998 		return 1;
1999 	}
2000 
2001 	if (state->df) {
2002 		WARN_FUNC("return with DF set",
2003 			  insn->sec, insn->offset);
2004 		return 1;
2005 	}
2006 
2007 	if (func && has_modified_stack_frame(state)) {
2008 		WARN_FUNC("return with modified stack frame",
2009 			  insn->sec, insn->offset);
2010 		return 1;
2011 	}
2012 
2013 	if (state->bp_scratch) {
2014 		WARN_FUNC("BP used as a scratch register",
2015 			  insn->sec, insn->offset);
2016 		return 1;
2017 	}
2018 
2019 	return 0;
2020 }
2021 
2022 /*
2023  * Follow the branch starting at the given instruction, and recursively follow
2024  * any other branches (jumps).  Meanwhile, track the frame pointer state at
2025  * each instruction and validate all the rules described in
2026  * tools/objtool/Documentation/stack-validation.txt.
2027  */
2028 static int validate_branch(struct objtool_file *file, struct symbol *func,
2029 			   struct instruction *first, struct insn_state state)
2030 {
2031 	struct alternative *alt;
2032 	struct instruction *insn, *next_insn;
2033 	struct section *sec;
2034 	u8 visited;
2035 	int ret;
2036 
2037 	insn = first;
2038 	sec = insn->sec;
2039 
2040 	if (insn->alt_group && list_empty(&insn->alts)) {
2041 		WARN_FUNC("don't know how to handle branch to middle of alternative instruction group",
2042 			  sec, insn->offset);
2043 		return 1;
2044 	}
2045 
2046 	while (1) {
2047 		next_insn = next_insn_same_sec(file, insn);
2048 
2049 		if (file->c_file && func && insn->func && func != insn->func->pfunc) {
2050 			WARN("%s() falls through to next function %s()",
2051 			     func->name, insn->func->name);
2052 			return 1;
2053 		}
2054 
2055 		if (func && insn->ignore) {
2056 			WARN_FUNC("BUG: why am I validating an ignored function?",
2057 				  sec, insn->offset);
2058 			return 1;
2059 		}
2060 
2061 		visited = 1 << state.uaccess;
2062 		if (insn->visited) {
2063 			if (!insn->hint && !insn_state_match(insn, &state))
2064 				return 1;
2065 
2066 			if (insn->visited & visited)
2067 				return 0;
2068 		}
2069 
2070 		if (insn->hint) {
2071 			if (insn->restore) {
2072 				struct instruction *save_insn, *i;
2073 
2074 				i = insn;
2075 				save_insn = NULL;
2076 				sym_for_each_insn_continue_reverse(file, func, i) {
2077 					if (i->save) {
2078 						save_insn = i;
2079 						break;
2080 					}
2081 				}
2082 
2083 				if (!save_insn) {
2084 					WARN_FUNC("no corresponding CFI save for CFI restore",
2085 						  sec, insn->offset);
2086 					return 1;
2087 				}
2088 
2089 				if (!save_insn->visited) {
2090 					/*
2091 					 * Oops, no state to copy yet.
2092 					 * Hopefully we can reach this
2093 					 * instruction from another branch
2094 					 * after the save insn has been
2095 					 * visited.
2096 					 */
2097 					if (insn == first)
2098 						return 0;
2099 
2100 					WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
2101 						  sec, insn->offset);
2102 					return 1;
2103 				}
2104 
2105 				insn->state = save_insn->state;
2106 			}
2107 
2108 			state = insn->state;
2109 
2110 		} else
2111 			insn->state = state;
2112 
2113 		insn->visited |= visited;
2114 
2115 		if (!insn->ignore_alts) {
2116 			bool skip_orig = false;
2117 
2118 			list_for_each_entry(alt, &insn->alts, list) {
2119 				if (alt->skip_orig)
2120 					skip_orig = true;
2121 
2122 				ret = validate_branch(file, func, alt->insn, state);
2123 				if (ret) {
2124 					if (backtrace)
2125 						BT_FUNC("(alt)", insn);
2126 					return ret;
2127 				}
2128 			}
2129 
2130 			if (skip_orig)
2131 				return 0;
2132 		}
2133 
2134 		switch (insn->type) {
2135 
2136 		case INSN_RETURN:
2137 			return validate_return(func, insn, &state);
2138 
2139 		case INSN_CALL:
2140 		case INSN_CALL_DYNAMIC:
2141 			ret = validate_call(insn, &state);
2142 			if (ret)
2143 				return ret;
2144 
2145 			if (!no_fp && func && !is_fentry_call(insn) &&
2146 			    !has_valid_stack_frame(&state)) {
2147 				WARN_FUNC("call without frame pointer save/setup",
2148 					  sec, insn->offset);
2149 				return 1;
2150 			}
2151 
2152 			if (dead_end_function(file, insn->call_dest))
2153 				return 0;
2154 
2155 			break;
2156 
2157 		case INSN_JUMP_CONDITIONAL:
2158 		case INSN_JUMP_UNCONDITIONAL:
2159 			if (func && is_sibling_call(insn)) {
2160 				ret = validate_sibling_call(insn, &state);
2161 				if (ret)
2162 					return ret;
2163 
2164 			} else if (insn->jump_dest) {
2165 				ret = validate_branch(file, func,
2166 						      insn->jump_dest, state);
2167 				if (ret) {
2168 					if (backtrace)
2169 						BT_FUNC("(branch)", insn);
2170 					return ret;
2171 				}
2172 			}
2173 
2174 			if (insn->type == INSN_JUMP_UNCONDITIONAL)
2175 				return 0;
2176 
2177 			break;
2178 
2179 		case INSN_JUMP_DYNAMIC:
2180 		case INSN_JUMP_DYNAMIC_CONDITIONAL:
2181 			if (func && is_sibling_call(insn)) {
2182 				ret = validate_sibling_call(insn, &state);
2183 				if (ret)
2184 					return ret;
2185 			}
2186 
2187 			if (insn->type == INSN_JUMP_DYNAMIC)
2188 				return 0;
2189 
2190 			break;
2191 
2192 		case INSN_CONTEXT_SWITCH:
2193 			if (func && (!next_insn || !next_insn->hint)) {
2194 				WARN_FUNC("unsupported instruction in callable function",
2195 					  sec, insn->offset);
2196 				return 1;
2197 			}
2198 			return 0;
2199 
2200 		case INSN_STACK:
2201 			if (update_insn_state(insn, &state))
2202 				return 1;
2203 
2204 			if (insn->stack_op.dest.type == OP_DEST_PUSHF) {
2205 				if (!state.uaccess_stack) {
2206 					state.uaccess_stack = 1;
2207 				} else if (state.uaccess_stack >> 31) {
2208 					WARN_FUNC("PUSHF stack exhausted", sec, insn->offset);
2209 					return 1;
2210 				}
2211 				state.uaccess_stack <<= 1;
2212 				state.uaccess_stack  |= state.uaccess;
2213 			}
2214 
2215 			if (insn->stack_op.src.type == OP_SRC_POPF) {
2216 				if (state.uaccess_stack) {
2217 					state.uaccess = state.uaccess_stack & 1;
2218 					state.uaccess_stack >>= 1;
2219 					if (state.uaccess_stack == 1)
2220 						state.uaccess_stack = 0;
2221 				}
2222 			}
2223 
2224 			break;
2225 
2226 		case INSN_STAC:
2227 			if (state.uaccess) {
2228 				WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2229 				return 1;
2230 			}
2231 
2232 			state.uaccess = true;
2233 			break;
2234 
2235 		case INSN_CLAC:
2236 			if (!state.uaccess && func) {
2237 				WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2238 				return 1;
2239 			}
2240 
2241 			if (func_uaccess_safe(func) && !state.uaccess_stack) {
2242 				WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2243 				return 1;
2244 			}
2245 
2246 			state.uaccess = false;
2247 			break;
2248 
2249 		case INSN_STD:
2250 			if (state.df)
2251 				WARN_FUNC("recursive STD", sec, insn->offset);
2252 
2253 			state.df = true;
2254 			break;
2255 
2256 		case INSN_CLD:
2257 			if (!state.df && func)
2258 				WARN_FUNC("redundant CLD", sec, insn->offset);
2259 
2260 			state.df = false;
2261 			break;
2262 
2263 		default:
2264 			break;
2265 		}
2266 
2267 		if (insn->dead_end)
2268 			return 0;
2269 
2270 		if (!next_insn) {
2271 			if (state.cfa.base == CFI_UNDEFINED)
2272 				return 0;
2273 			WARN("%s: unexpected end of section", sec->name);
2274 			return 1;
2275 		}
2276 
2277 		insn = next_insn;
2278 	}
2279 
2280 	return 0;
2281 }
2282 
2283 static int validate_unwind_hints(struct objtool_file *file)
2284 {
2285 	struct instruction *insn;
2286 	int ret, warnings = 0;
2287 	struct insn_state state;
2288 
2289 	if (!file->hints)
2290 		return 0;
2291 
2292 	clear_insn_state(&state);
2293 
2294 	for_each_insn(file, insn) {
2295 		if (insn->hint && !insn->visited) {
2296 			ret = validate_branch(file, insn->func, insn, state);
2297 			if (ret && backtrace)
2298 				BT_FUNC("<=== (hint)", insn);
2299 			warnings += ret;
2300 		}
2301 	}
2302 
2303 	return warnings;
2304 }
2305 
2306 static int validate_retpoline(struct objtool_file *file)
2307 {
2308 	struct instruction *insn;
2309 	int warnings = 0;
2310 
2311 	for_each_insn(file, insn) {
2312 		if (insn->type != INSN_JUMP_DYNAMIC &&
2313 		    insn->type != INSN_CALL_DYNAMIC)
2314 			continue;
2315 
2316 		if (insn->retpoline_safe)
2317 			continue;
2318 
2319 		/*
2320 		 * .init.text code is ran before userspace and thus doesn't
2321 		 * strictly need retpolines, except for modules which are
2322 		 * loaded late, they very much do need retpoline in their
2323 		 * .init.text
2324 		 */
2325 		if (!strcmp(insn->sec->name, ".init.text") && !module)
2326 			continue;
2327 
2328 		WARN_FUNC("indirect %s found in RETPOLINE build",
2329 			  insn->sec, insn->offset,
2330 			  insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2331 
2332 		warnings++;
2333 	}
2334 
2335 	return warnings;
2336 }
2337 
2338 static bool is_kasan_insn(struct instruction *insn)
2339 {
2340 	return (insn->type == INSN_CALL &&
2341 		!strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2342 }
2343 
2344 static bool is_ubsan_insn(struct instruction *insn)
2345 {
2346 	return (insn->type == INSN_CALL &&
2347 		!strcmp(insn->call_dest->name,
2348 			"__ubsan_handle_builtin_unreachable"));
2349 }
2350 
2351 static bool ignore_unreachable_insn(struct instruction *insn)
2352 {
2353 	int i;
2354 
2355 	if (insn->ignore || insn->type == INSN_NOP)
2356 		return true;
2357 
2358 	/*
2359 	 * Ignore any unused exceptions.  This can happen when a whitelisted
2360 	 * function has an exception table entry.
2361 	 *
2362 	 * Also ignore alternative replacement instructions.  This can happen
2363 	 * when a whitelisted function uses one of the ALTERNATIVE macros.
2364 	 */
2365 	if (!strcmp(insn->sec->name, ".fixup") ||
2366 	    !strcmp(insn->sec->name, ".altinstr_replacement") ||
2367 	    !strcmp(insn->sec->name, ".altinstr_aux"))
2368 		return true;
2369 
2370 	if (!insn->func)
2371 		return false;
2372 
2373 	/*
2374 	 * CONFIG_UBSAN_TRAP inserts a UD2 when it sees
2375 	 * __builtin_unreachable().  The BUG() macro has an unreachable() after
2376 	 * the UD2, which causes GCC's undefined trap logic to emit another UD2
2377 	 * (or occasionally a JMP to UD2).
2378 	 */
2379 	if (list_prev_entry(insn, list)->dead_end &&
2380 	    (insn->type == INSN_BUG ||
2381 	     (insn->type == INSN_JUMP_UNCONDITIONAL &&
2382 	      insn->jump_dest && insn->jump_dest->type == INSN_BUG)))
2383 		return true;
2384 
2385 	/*
2386 	 * Check if this (or a subsequent) instruction is related to
2387 	 * CONFIG_UBSAN or CONFIG_KASAN.
2388 	 *
2389 	 * End the search at 5 instructions to avoid going into the weeds.
2390 	 */
2391 	for (i = 0; i < 5; i++) {
2392 
2393 		if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2394 			return true;
2395 
2396 		if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2397 			if (insn->jump_dest &&
2398 			    insn->jump_dest->func == insn->func) {
2399 				insn = insn->jump_dest;
2400 				continue;
2401 			}
2402 
2403 			break;
2404 		}
2405 
2406 		if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2407 			break;
2408 
2409 		insn = list_next_entry(insn, list);
2410 	}
2411 
2412 	return false;
2413 }
2414 
2415 static int validate_section(struct objtool_file *file, struct section *sec)
2416 {
2417 	struct symbol *func;
2418 	struct instruction *insn;
2419 	struct insn_state state;
2420 	int ret, warnings = 0;
2421 
2422 	list_for_each_entry(func, &sec->symbol_list, list) {
2423 		if (func->type != STT_FUNC)
2424 			continue;
2425 
2426 		if (!func->len) {
2427 			WARN("%s() is missing an ELF size annotation",
2428 			     func->name);
2429 			warnings++;
2430 		}
2431 
2432 		if (func->pfunc != func || func->alias != func)
2433 			continue;
2434 
2435 		insn = find_insn(file, sec, func->offset);
2436 		if (!insn || insn->ignore || insn->visited)
2437 			continue;
2438 
2439 		clear_insn_state(&state);
2440 		state.cfa = initial_func_cfi.cfa;
2441 		memcpy(&state.regs, &initial_func_cfi.regs,
2442 		       CFI_NUM_REGS * sizeof(struct cfi_reg));
2443 		state.stack_size = initial_func_cfi.cfa.offset;
2444 
2445 		state.uaccess = func->uaccess_safe;
2446 
2447 		ret = validate_branch(file, func, insn, state);
2448 		if (ret && backtrace)
2449 			BT_FUNC("<=== (func)", insn);
2450 		warnings += ret;
2451 	}
2452 
2453 	return warnings;
2454 }
2455 
2456 static int validate_functions(struct objtool_file *file)
2457 {
2458 	struct section *sec;
2459 	int warnings = 0;
2460 
2461 	for_each_sec(file, sec)
2462 		warnings += validate_section(file, sec);
2463 
2464 	return warnings;
2465 }
2466 
2467 static int validate_reachable_instructions(struct objtool_file *file)
2468 {
2469 	struct instruction *insn;
2470 
2471 	if (file->ignore_unreachables)
2472 		return 0;
2473 
2474 	for_each_insn(file, insn) {
2475 		if (insn->visited || ignore_unreachable_insn(insn))
2476 			continue;
2477 
2478 		WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
2479 		return 1;
2480 	}
2481 
2482 	return 0;
2483 }
2484 
2485 static struct objtool_file file;
2486 
2487 int check(const char *_objname, bool orc)
2488 {
2489 	int ret, warnings = 0;
2490 
2491 	objname = _objname;
2492 
2493 	file.elf = elf_read(objname, orc ? O_RDWR : O_RDONLY);
2494 	if (!file.elf)
2495 		return 1;
2496 
2497 	INIT_LIST_HEAD(&file.insn_list);
2498 	hash_init(file.insn_hash);
2499 	file.c_file = find_section_by_name(file.elf, ".comment");
2500 	file.ignore_unreachables = no_unreachable;
2501 	file.hints = false;
2502 
2503 	arch_initial_func_cfi_state(&initial_func_cfi);
2504 
2505 	ret = decode_sections(&file);
2506 	if (ret < 0)
2507 		goto out;
2508 	warnings += ret;
2509 
2510 	if (list_empty(&file.insn_list))
2511 		goto out;
2512 
2513 	if (retpoline) {
2514 		ret = validate_retpoline(&file);
2515 		if (ret < 0)
2516 			return ret;
2517 		warnings += ret;
2518 	}
2519 
2520 	ret = validate_functions(&file);
2521 	if (ret < 0)
2522 		goto out;
2523 	warnings += ret;
2524 
2525 	ret = validate_unwind_hints(&file);
2526 	if (ret < 0)
2527 		goto out;
2528 	warnings += ret;
2529 
2530 	if (!warnings) {
2531 		ret = validate_reachable_instructions(&file);
2532 		if (ret < 0)
2533 			goto out;
2534 		warnings += ret;
2535 	}
2536 
2537 	if (orc) {
2538 		ret = create_orc(&file);
2539 		if (ret < 0)
2540 			goto out;
2541 
2542 		ret = create_orc_sections(&file);
2543 		if (ret < 0)
2544 			goto out;
2545 
2546 		ret = elf_write(file.elf);
2547 		if (ret < 0)
2548 			goto out;
2549 	}
2550 
2551 out:
2552 	if (ret < 0) {
2553 		/*
2554 		 *  Fatal error.  The binary is corrupt or otherwise broken in
2555 		 *  some way, or objtool itself is broken.  Fail the kernel
2556 		 *  build.
2557 		 */
2558 		return ret;
2559 	}
2560 
2561 	return 0;
2562 }
2563