]> git.ipfire.org Git - thirdparty/linux.git/blob - tools/objtool/check.c
Merge tag 'io_uring-5.7-2020-05-22' of git://git.kernel.dk/linux-block
[thirdparty/linux.git] / tools / objtool / check.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6 #include <string.h>
7 #include <stdlib.h>
8
9 #include "builtin.h"
10 #include "check.h"
11 #include "elf.h"
12 #include "special.h"
13 #include "arch.h"
14 #include "warn.h"
15
16 #include <linux/hashtable.h>
17 #include <linux/kernel.h>
18
19 #define FAKE_JUMP_OFFSET -1
20
21 #define C_JUMP_TABLE_SECTION ".rodata..c_jump_table"
22
23 struct alternative {
24 struct list_head list;
25 struct instruction *insn;
26 bool skip_orig;
27 };
28
29 const char *objname;
30 struct cfi_state initial_func_cfi;
31
32 struct instruction *find_insn(struct objtool_file *file,
33 struct section *sec, unsigned long offset)
34 {
35 struct instruction *insn;
36
37 hash_for_each_possible(file->insn_hash, insn, hash, offset)
38 if (insn->sec == sec && insn->offset == offset)
39 return insn;
40
41 return NULL;
42 }
43
44 static struct instruction *next_insn_same_sec(struct objtool_file *file,
45 struct instruction *insn)
46 {
47 struct instruction *next = list_next_entry(insn, list);
48
49 if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
50 return NULL;
51
52 return next;
53 }
54
55 static struct instruction *next_insn_same_func(struct objtool_file *file,
56 struct instruction *insn)
57 {
58 struct instruction *next = list_next_entry(insn, list);
59 struct symbol *func = insn->func;
60
61 if (!func)
62 return NULL;
63
64 if (&next->list != &file->insn_list && next->func == func)
65 return next;
66
67 /* Check if we're already in the subfunction: */
68 if (func == func->cfunc)
69 return NULL;
70
71 /* Move to the subfunction: */
72 return find_insn(file, func->cfunc->sec, func->cfunc->offset);
73 }
74
75 static struct instruction *prev_insn_same_sym(struct objtool_file *file,
76 struct instruction *insn)
77 {
78 struct instruction *prev = list_prev_entry(insn, list);
79
80 if (&prev->list != &file->insn_list && prev->func == insn->func)
81 return prev;
82
83 return NULL;
84 }
85
86 #define func_for_each_insn(file, func, insn) \
87 for (insn = find_insn(file, func->sec, func->offset); \
88 insn; \
89 insn = next_insn_same_func(file, insn))
90
91 #define sym_for_each_insn(file, sym, insn) \
92 for (insn = find_insn(file, sym->sec, sym->offset); \
93 insn && &insn->list != &file->insn_list && \
94 insn->sec == sym->sec && \
95 insn->offset < sym->offset + sym->len; \
96 insn = list_next_entry(insn, list))
97
98 #define sym_for_each_insn_continue_reverse(file, sym, insn) \
99 for (insn = list_prev_entry(insn, list); \
100 &insn->list != &file->insn_list && \
101 insn->sec == sym->sec && insn->offset >= sym->offset; \
102 insn = list_prev_entry(insn, list))
103
104 #define sec_for_each_insn_from(file, insn) \
105 for (; insn; insn = next_insn_same_sec(file, insn))
106
107 #define sec_for_each_insn_continue(file, insn) \
108 for (insn = next_insn_same_sec(file, insn); insn; \
109 insn = next_insn_same_sec(file, insn))
110
111 static bool is_static_jump(struct instruction *insn)
112 {
113 return insn->type == INSN_JUMP_CONDITIONAL ||
114 insn->type == INSN_JUMP_UNCONDITIONAL;
115 }
116
117 static bool is_sibling_call(struct instruction *insn)
118 {
119 /* An indirect jump is either a sibling call or a jump to a table. */
120 if (insn->type == INSN_JUMP_DYNAMIC)
121 return list_empty(&insn->alts);
122
123 if (!is_static_jump(insn))
124 return false;
125
126 /* add_jump_destinations() sets insn->call_dest for sibling calls. */
127 return !!insn->call_dest;
128 }
129
130 /*
131 * This checks to see if the given function is a "noreturn" function.
132 *
133 * For global functions which are outside the scope of this object file, we
134 * have to keep a manual list of them.
135 *
136 * For local functions, we have to detect them manually by simply looking for
137 * the lack of a return instruction.
138 */
139 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
140 int recursion)
141 {
142 int i;
143 struct instruction *insn;
144 bool empty = true;
145
146 /*
147 * Unfortunately these have to be hard coded because the noreturn
148 * attribute isn't provided in ELF data.
149 */
150 static const char * const global_noreturns[] = {
151 "__stack_chk_fail",
152 "panic",
153 "do_exit",
154 "do_task_dead",
155 "__module_put_and_exit",
156 "complete_and_exit",
157 "__reiserfs_panic",
158 "lbug_with_loc",
159 "fortify_panic",
160 "usercopy_abort",
161 "machine_real_restart",
162 "rewind_stack_do_exit",
163 "kunit_try_catch_throw",
164 };
165
166 if (!func)
167 return false;
168
169 if (func->bind == STB_WEAK)
170 return false;
171
172 if (func->bind == STB_GLOBAL)
173 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
174 if (!strcmp(func->name, global_noreturns[i]))
175 return true;
176
177 if (!func->len)
178 return false;
179
180 insn = find_insn(file, func->sec, func->offset);
181 if (!insn->func)
182 return false;
183
184 func_for_each_insn(file, func, insn) {
185 empty = false;
186
187 if (insn->type == INSN_RETURN)
188 return false;
189 }
190
191 if (empty)
192 return false;
193
194 /*
195 * A function can have a sibling call instead of a return. In that
196 * case, the function's dead-end status depends on whether the target
197 * of the sibling call returns.
198 */
199 func_for_each_insn(file, func, insn) {
200 if (is_sibling_call(insn)) {
201 struct instruction *dest = insn->jump_dest;
202
203 if (!dest)
204 /* sibling call to another file */
205 return false;
206
207 /* local sibling call */
208 if (recursion == 5) {
209 /*
210 * Infinite recursion: two functions have
211 * sibling calls to each other. This is a very
212 * rare case. It means they aren't dead ends.
213 */
214 return false;
215 }
216
217 return __dead_end_function(file, dest->func, recursion+1);
218 }
219 }
220
221 return true;
222 }
223
224 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
225 {
226 return __dead_end_function(file, func, 0);
227 }
228
229 static void clear_insn_state(struct insn_state *state)
230 {
231 int i;
232
233 memset(state, 0, sizeof(*state));
234 state->cfa.base = CFI_UNDEFINED;
235 for (i = 0; i < CFI_NUM_REGS; i++) {
236 state->regs[i].base = CFI_UNDEFINED;
237 state->vals[i].base = CFI_UNDEFINED;
238 }
239 state->drap_reg = CFI_UNDEFINED;
240 state->drap_offset = -1;
241 }
242
243 /*
244 * Call the arch-specific instruction decoder for all the instructions and add
245 * them to the global instruction list.
246 */
247 static int decode_instructions(struct objtool_file *file)
248 {
249 struct section *sec;
250 struct symbol *func;
251 unsigned long offset;
252 struct instruction *insn;
253 unsigned long nr_insns = 0;
254 int ret;
255
256 for_each_sec(file, sec) {
257
258 if (!(sec->sh.sh_flags & SHF_EXECINSTR))
259 continue;
260
261 if (strcmp(sec->name, ".altinstr_replacement") &&
262 strcmp(sec->name, ".altinstr_aux") &&
263 strncmp(sec->name, ".discard.", 9))
264 sec->text = true;
265
266 for (offset = 0; offset < sec->len; offset += insn->len) {
267 insn = malloc(sizeof(*insn));
268 if (!insn) {
269 WARN("malloc failed");
270 return -1;
271 }
272 memset(insn, 0, sizeof(*insn));
273 INIT_LIST_HEAD(&insn->alts);
274 clear_insn_state(&insn->state);
275
276 insn->sec = sec;
277 insn->offset = offset;
278
279 ret = arch_decode_instruction(file->elf, sec, offset,
280 sec->len - offset,
281 &insn->len, &insn->type,
282 &insn->immediate,
283 &insn->stack_op);
284 if (ret)
285 goto err;
286
287 hash_add(file->insn_hash, &insn->hash, insn->offset);
288 list_add_tail(&insn->list, &file->insn_list);
289 nr_insns++;
290 }
291
292 list_for_each_entry(func, &sec->symbol_list, list) {
293 if (func->type != STT_FUNC || func->alias != func)
294 continue;
295
296 if (!find_insn(file, sec, func->offset)) {
297 WARN("%s(): can't find starting instruction",
298 func->name);
299 return -1;
300 }
301
302 sym_for_each_insn(file, func, insn)
303 insn->func = func;
304 }
305 }
306
307 if (stats)
308 printf("nr_insns: %lu\n", nr_insns);
309
310 return 0;
311
312 err:
313 free(insn);
314 return ret;
315 }
316
317 /*
318 * Mark "ud2" instructions and manually annotated dead ends.
319 */
320 static int add_dead_ends(struct objtool_file *file)
321 {
322 struct section *sec;
323 struct rela *rela;
324 struct instruction *insn;
325 bool found;
326
327 /*
328 * By default, "ud2" is a dead end unless otherwise annotated, because
329 * GCC 7 inserts it for certain divide-by-zero cases.
330 */
331 for_each_insn(file, insn)
332 if (insn->type == INSN_BUG)
333 insn->dead_end = true;
334
335 /*
336 * Check for manually annotated dead ends.
337 */
338 sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
339 if (!sec)
340 goto reachable;
341
342 list_for_each_entry(rela, &sec->rela_list, list) {
343 if (rela->sym->type != STT_SECTION) {
344 WARN("unexpected relocation symbol type in %s", sec->name);
345 return -1;
346 }
347 insn = find_insn(file, rela->sym->sec, rela->addend);
348 if (insn)
349 insn = list_prev_entry(insn, list);
350 else if (rela->addend == rela->sym->sec->len) {
351 found = false;
352 list_for_each_entry_reverse(insn, &file->insn_list, list) {
353 if (insn->sec == rela->sym->sec) {
354 found = true;
355 break;
356 }
357 }
358
359 if (!found) {
360 WARN("can't find unreachable insn at %s+0x%x",
361 rela->sym->sec->name, rela->addend);
362 return -1;
363 }
364 } else {
365 WARN("can't find unreachable insn at %s+0x%x",
366 rela->sym->sec->name, rela->addend);
367 return -1;
368 }
369
370 insn->dead_end = true;
371 }
372
373 reachable:
374 /*
375 * These manually annotated reachable checks are needed for GCC 4.4,
376 * where the Linux unreachable() macro isn't supported. In that case
377 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
378 * not a dead end.
379 */
380 sec = find_section_by_name(file->elf, ".rela.discard.reachable");
381 if (!sec)
382 return 0;
383
384 list_for_each_entry(rela, &sec->rela_list, list) {
385 if (rela->sym->type != STT_SECTION) {
386 WARN("unexpected relocation symbol type in %s", sec->name);
387 return -1;
388 }
389 insn = find_insn(file, rela->sym->sec, rela->addend);
390 if (insn)
391 insn = list_prev_entry(insn, list);
392 else if (rela->addend == rela->sym->sec->len) {
393 found = false;
394 list_for_each_entry_reverse(insn, &file->insn_list, list) {
395 if (insn->sec == rela->sym->sec) {
396 found = true;
397 break;
398 }
399 }
400
401 if (!found) {
402 WARN("can't find reachable insn at %s+0x%x",
403 rela->sym->sec->name, rela->addend);
404 return -1;
405 }
406 } else {
407 WARN("can't find reachable insn at %s+0x%x",
408 rela->sym->sec->name, rela->addend);
409 return -1;
410 }
411
412 insn->dead_end = false;
413 }
414
415 return 0;
416 }
417
418 /*
419 * Warnings shouldn't be reported for ignored functions.
420 */
421 static void add_ignores(struct objtool_file *file)
422 {
423 struct instruction *insn;
424 struct section *sec;
425 struct symbol *func;
426 struct rela *rela;
427
428 sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
429 if (!sec)
430 return;
431
432 list_for_each_entry(rela, &sec->rela_list, list) {
433 switch (rela->sym->type) {
434 case STT_FUNC:
435 func = rela->sym;
436 break;
437
438 case STT_SECTION:
439 func = find_func_by_offset(rela->sym->sec, rela->addend);
440 if (!func)
441 continue;
442 break;
443
444 default:
445 WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type);
446 continue;
447 }
448
449 func_for_each_insn(file, func, insn)
450 insn->ignore = true;
451 }
452 }
453
454 /*
455 * This is a whitelist of functions that is allowed to be called with AC set.
456 * The list is meant to be minimal and only contains compiler instrumentation
457 * ABI and a few functions used to implement *_{to,from}_user() functions.
458 *
459 * These functions must not directly change AC, but may PUSHF/POPF.
460 */
461 static const char *uaccess_safe_builtin[] = {
462 /* KASAN */
463 "kasan_report",
464 "check_memory_region",
465 /* KASAN out-of-line */
466 "__asan_loadN_noabort",
467 "__asan_load1_noabort",
468 "__asan_load2_noabort",
469 "__asan_load4_noabort",
470 "__asan_load8_noabort",
471 "__asan_load16_noabort",
472 "__asan_storeN_noabort",
473 "__asan_store1_noabort",
474 "__asan_store2_noabort",
475 "__asan_store4_noabort",
476 "__asan_store8_noabort",
477 "__asan_store16_noabort",
478 /* KASAN in-line */
479 "__asan_report_load_n_noabort",
480 "__asan_report_load1_noabort",
481 "__asan_report_load2_noabort",
482 "__asan_report_load4_noabort",
483 "__asan_report_load8_noabort",
484 "__asan_report_load16_noabort",
485 "__asan_report_store_n_noabort",
486 "__asan_report_store1_noabort",
487 "__asan_report_store2_noabort",
488 "__asan_report_store4_noabort",
489 "__asan_report_store8_noabort",
490 "__asan_report_store16_noabort",
491 /* KCOV */
492 "write_comp_data",
493 "__sanitizer_cov_trace_pc",
494 "__sanitizer_cov_trace_const_cmp1",
495 "__sanitizer_cov_trace_const_cmp2",
496 "__sanitizer_cov_trace_const_cmp4",
497 "__sanitizer_cov_trace_const_cmp8",
498 "__sanitizer_cov_trace_cmp1",
499 "__sanitizer_cov_trace_cmp2",
500 "__sanitizer_cov_trace_cmp4",
501 "__sanitizer_cov_trace_cmp8",
502 "__sanitizer_cov_trace_switch",
503 /* UBSAN */
504 "ubsan_type_mismatch_common",
505 "__ubsan_handle_type_mismatch",
506 "__ubsan_handle_type_mismatch_v1",
507 "__ubsan_handle_shift_out_of_bounds",
508 /* misc */
509 "csum_partial_copy_generic",
510 "__memcpy_mcsafe",
511 "mcsafe_handle_tail",
512 "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
513 NULL
514 };
515
516 static void add_uaccess_safe(struct objtool_file *file)
517 {
518 struct symbol *func;
519 const char **name;
520
521 if (!uaccess)
522 return;
523
524 for (name = uaccess_safe_builtin; *name; name++) {
525 func = find_symbol_by_name(file->elf, *name);
526 if (!func)
527 continue;
528
529 func->uaccess_safe = true;
530 }
531 }
532
533 /*
534 * FIXME: For now, just ignore any alternatives which add retpolines. This is
535 * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
536 * But it at least allows objtool to understand the control flow *around* the
537 * retpoline.
538 */
539 static int add_ignore_alternatives(struct objtool_file *file)
540 {
541 struct section *sec;
542 struct rela *rela;
543 struct instruction *insn;
544
545 sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
546 if (!sec)
547 return 0;
548
549 list_for_each_entry(rela, &sec->rela_list, list) {
550 if (rela->sym->type != STT_SECTION) {
551 WARN("unexpected relocation symbol type in %s", sec->name);
552 return -1;
553 }
554
555 insn = find_insn(file, rela->sym->sec, rela->addend);
556 if (!insn) {
557 WARN("bad .discard.ignore_alts entry");
558 return -1;
559 }
560
561 insn->ignore_alts = true;
562 }
563
564 return 0;
565 }
566
567 /*
568 * Find the destination instructions for all jumps.
569 */
570 static int add_jump_destinations(struct objtool_file *file)
571 {
572 struct instruction *insn;
573 struct rela *rela;
574 struct section *dest_sec;
575 unsigned long dest_off;
576
577 for_each_insn(file, insn) {
578 if (!is_static_jump(insn))
579 continue;
580
581 if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET)
582 continue;
583
584 rela = find_rela_by_dest_range(file->elf, insn->sec,
585 insn->offset, insn->len);
586 if (!rela) {
587 dest_sec = insn->sec;
588 dest_off = insn->offset + insn->len + insn->immediate;
589 } else if (rela->sym->type == STT_SECTION) {
590 dest_sec = rela->sym->sec;
591 dest_off = rela->addend + 4;
592 } else if (rela->sym->sec->idx) {
593 dest_sec = rela->sym->sec;
594 dest_off = rela->sym->sym.st_value + rela->addend + 4;
595 } else if (strstr(rela->sym->name, "_indirect_thunk_")) {
596 /*
597 * Retpoline jumps are really dynamic jumps in
598 * disguise, so convert them accordingly.
599 */
600 if (insn->type == INSN_JUMP_UNCONDITIONAL)
601 insn->type = INSN_JUMP_DYNAMIC;
602 else
603 insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
604
605 insn->retpoline_safe = true;
606 continue;
607 } else {
608 /* external sibling call */
609 insn->call_dest = rela->sym;
610 continue;
611 }
612
613 insn->jump_dest = find_insn(file, dest_sec, dest_off);
614 if (!insn->jump_dest) {
615
616 /*
617 * This is a special case where an alt instruction
618 * jumps past the end of the section. These are
619 * handled later in handle_group_alt().
620 */
621 if (!strcmp(insn->sec->name, ".altinstr_replacement"))
622 continue;
623
624 WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
625 insn->sec, insn->offset, dest_sec->name,
626 dest_off);
627 return -1;
628 }
629
630 /*
631 * Cross-function jump.
632 */
633 if (insn->func && insn->jump_dest->func &&
634 insn->func != insn->jump_dest->func) {
635
636 /*
637 * For GCC 8+, create parent/child links for any cold
638 * subfunctions. This is _mostly_ redundant with a
639 * similar initialization in read_symbols().
640 *
641 * If a function has aliases, we want the *first* such
642 * function in the symbol table to be the subfunction's
643 * parent. In that case we overwrite the
644 * initialization done in read_symbols().
645 *
646 * However this code can't completely replace the
647 * read_symbols() code because this doesn't detect the
648 * case where the parent function's only reference to a
649 * subfunction is through a jump table.
650 */
651 if (!strstr(insn->func->name, ".cold.") &&
652 strstr(insn->jump_dest->func->name, ".cold.")) {
653 insn->func->cfunc = insn->jump_dest->func;
654 insn->jump_dest->func->pfunc = insn->func;
655
656 } else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
657 insn->jump_dest->offset == insn->jump_dest->func->offset) {
658
659 /* internal sibling call */
660 insn->call_dest = insn->jump_dest->func;
661 }
662 }
663 }
664
665 return 0;
666 }
667
668 /*
669 * Find the destination instructions for all calls.
670 */
671 static int add_call_destinations(struct objtool_file *file)
672 {
673 struct instruction *insn;
674 unsigned long dest_off;
675 struct rela *rela;
676
677 for_each_insn(file, insn) {
678 if (insn->type != INSN_CALL)
679 continue;
680
681 rela = find_rela_by_dest_range(file->elf, insn->sec,
682 insn->offset, insn->len);
683 if (!rela) {
684 dest_off = insn->offset + insn->len + insn->immediate;
685 insn->call_dest = find_func_by_offset(insn->sec, dest_off);
686 if (!insn->call_dest)
687 insn->call_dest = find_symbol_by_offset(insn->sec, dest_off);
688
689 if (insn->ignore)
690 continue;
691
692 if (!insn->call_dest) {
693 WARN_FUNC("unsupported intra-function call",
694 insn->sec, insn->offset);
695 if (retpoline)
696 WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE.");
697 return -1;
698 }
699
700 if (insn->func && insn->call_dest->type != STT_FUNC) {
701 WARN_FUNC("unsupported call to non-function",
702 insn->sec, insn->offset);
703 return -1;
704 }
705
706 } else if (rela->sym->type == STT_SECTION) {
707 insn->call_dest = find_func_by_offset(rela->sym->sec,
708 rela->addend+4);
709 if (!insn->call_dest) {
710 WARN_FUNC("can't find call dest symbol at %s+0x%x",
711 insn->sec, insn->offset,
712 rela->sym->sec->name,
713 rela->addend + 4);
714 return -1;
715 }
716 } else
717 insn->call_dest = rela->sym;
718 }
719
720 return 0;
721 }
722
723 /*
724 * The .alternatives section requires some extra special care, over and above
725 * what other special sections require:
726 *
727 * 1. Because alternatives are patched in-place, we need to insert a fake jump
728 * instruction at the end so that validate_branch() skips all the original
729 * replaced instructions when validating the new instruction path.
730 *
731 * 2. An added wrinkle is that the new instruction length might be zero. In
732 * that case the old instructions are replaced with noops. We simulate that
733 * by creating a fake jump as the only new instruction.
734 *
735 * 3. In some cases, the alternative section includes an instruction which
736 * conditionally jumps to the _end_ of the entry. We have to modify these
737 * jumps' destinations to point back to .text rather than the end of the
738 * entry in .altinstr_replacement.
739 */
740 static int handle_group_alt(struct objtool_file *file,
741 struct special_alt *special_alt,
742 struct instruction *orig_insn,
743 struct instruction **new_insn)
744 {
745 struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
746 unsigned long dest_off;
747
748 last_orig_insn = NULL;
749 insn = orig_insn;
750 sec_for_each_insn_from(file, insn) {
751 if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
752 break;
753
754 insn->alt_group = true;
755 last_orig_insn = insn;
756 }
757
758 if (next_insn_same_sec(file, last_orig_insn)) {
759 fake_jump = malloc(sizeof(*fake_jump));
760 if (!fake_jump) {
761 WARN("malloc failed");
762 return -1;
763 }
764 memset(fake_jump, 0, sizeof(*fake_jump));
765 INIT_LIST_HEAD(&fake_jump->alts);
766 clear_insn_state(&fake_jump->state);
767
768 fake_jump->sec = special_alt->new_sec;
769 fake_jump->offset = FAKE_JUMP_OFFSET;
770 fake_jump->type = INSN_JUMP_UNCONDITIONAL;
771 fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
772 fake_jump->func = orig_insn->func;
773 }
774
775 if (!special_alt->new_len) {
776 if (!fake_jump) {
777 WARN("%s: empty alternative at end of section",
778 special_alt->orig_sec->name);
779 return -1;
780 }
781
782 *new_insn = fake_jump;
783 return 0;
784 }
785
786 last_new_insn = NULL;
787 insn = *new_insn;
788 sec_for_each_insn_from(file, insn) {
789 if (insn->offset >= special_alt->new_off + special_alt->new_len)
790 break;
791
792 last_new_insn = insn;
793
794 insn->ignore = orig_insn->ignore_alts;
795 insn->func = orig_insn->func;
796
797 /*
798 * Since alternative replacement code is copy/pasted by the
799 * kernel after applying relocations, generally such code can't
800 * have relative-address relocation references to outside the
801 * .altinstr_replacement section, unless the arch's
802 * alternatives code can adjust the relative offsets
803 * accordingly.
804 *
805 * The x86 alternatives code adjusts the offsets only when it
806 * encounters a branch instruction at the very beginning of the
807 * replacement group.
808 */
809 if ((insn->offset != special_alt->new_off ||
810 (insn->type != INSN_CALL && !is_static_jump(insn))) &&
811 find_rela_by_dest_range(file->elf, insn->sec, insn->offset, insn->len)) {
812
813 WARN_FUNC("unsupported relocation in alternatives section",
814 insn->sec, insn->offset);
815 return -1;
816 }
817
818 if (!is_static_jump(insn))
819 continue;
820
821 if (!insn->immediate)
822 continue;
823
824 dest_off = insn->offset + insn->len + insn->immediate;
825 if (dest_off == special_alt->new_off + special_alt->new_len) {
826 if (!fake_jump) {
827 WARN("%s: alternative jump to end of section",
828 special_alt->orig_sec->name);
829 return -1;
830 }
831 insn->jump_dest = fake_jump;
832 }
833
834 if (!insn->jump_dest) {
835 WARN_FUNC("can't find alternative jump destination",
836 insn->sec, insn->offset);
837 return -1;
838 }
839 }
840
841 if (!last_new_insn) {
842 WARN_FUNC("can't find last new alternative instruction",
843 special_alt->new_sec, special_alt->new_off);
844 return -1;
845 }
846
847 if (fake_jump)
848 list_add(&fake_jump->list, &last_new_insn->list);
849
850 return 0;
851 }
852
853 /*
854 * A jump table entry can either convert a nop to a jump or a jump to a nop.
855 * If the original instruction is a jump, make the alt entry an effective nop
856 * by just skipping the original instruction.
857 */
858 static int handle_jump_alt(struct objtool_file *file,
859 struct special_alt *special_alt,
860 struct instruction *orig_insn,
861 struct instruction **new_insn)
862 {
863 if (orig_insn->type == INSN_NOP)
864 return 0;
865
866 if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
867 WARN_FUNC("unsupported instruction at jump label",
868 orig_insn->sec, orig_insn->offset);
869 return -1;
870 }
871
872 *new_insn = list_next_entry(orig_insn, list);
873 return 0;
874 }
875
876 /*
877 * Read all the special sections which have alternate instructions which can be
878 * patched in or redirected to at runtime. Each instruction having alternate
879 * instruction(s) has them added to its insn->alts list, which will be
880 * traversed in validate_branch().
881 */
882 static int add_special_section_alts(struct objtool_file *file)
883 {
884 struct list_head special_alts;
885 struct instruction *orig_insn, *new_insn;
886 struct special_alt *special_alt, *tmp;
887 struct alternative *alt;
888 int ret;
889
890 ret = special_get_alts(file->elf, &special_alts);
891 if (ret)
892 return ret;
893
894 list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
895
896 orig_insn = find_insn(file, special_alt->orig_sec,
897 special_alt->orig_off);
898 if (!orig_insn) {
899 WARN_FUNC("special: can't find orig instruction",
900 special_alt->orig_sec, special_alt->orig_off);
901 ret = -1;
902 goto out;
903 }
904
905 new_insn = NULL;
906 if (!special_alt->group || special_alt->new_len) {
907 new_insn = find_insn(file, special_alt->new_sec,
908 special_alt->new_off);
909 if (!new_insn) {
910 WARN_FUNC("special: can't find new instruction",
911 special_alt->new_sec,
912 special_alt->new_off);
913 ret = -1;
914 goto out;
915 }
916 }
917
918 if (special_alt->group) {
919 ret = handle_group_alt(file, special_alt, orig_insn,
920 &new_insn);
921 if (ret)
922 goto out;
923 } else if (special_alt->jump_or_nop) {
924 ret = handle_jump_alt(file, special_alt, orig_insn,
925 &new_insn);
926 if (ret)
927 goto out;
928 }
929
930 alt = malloc(sizeof(*alt));
931 if (!alt) {
932 WARN("malloc failed");
933 ret = -1;
934 goto out;
935 }
936
937 alt->insn = new_insn;
938 alt->skip_orig = special_alt->skip_orig;
939 orig_insn->ignore_alts |= special_alt->skip_alt;
940 list_add_tail(&alt->list, &orig_insn->alts);
941
942 list_del(&special_alt->list);
943 free(special_alt);
944 }
945
946 out:
947 return ret;
948 }
949
950 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
951 struct rela *table)
952 {
953 struct rela *rela = table;
954 struct instruction *dest_insn;
955 struct alternative *alt;
956 struct symbol *pfunc = insn->func->pfunc;
957 unsigned int prev_offset = 0;
958
959 /*
960 * Each @rela is a switch table relocation which points to the target
961 * instruction.
962 */
963 list_for_each_entry_from(rela, &table->sec->rela_list, list) {
964
965 /* Check for the end of the table: */
966 if (rela != table && rela->jump_table_start)
967 break;
968
969 /* Make sure the table entries are consecutive: */
970 if (prev_offset && rela->offset != prev_offset + 8)
971 break;
972
973 /* Detect function pointers from contiguous objects: */
974 if (rela->sym->sec == pfunc->sec &&
975 rela->addend == pfunc->offset)
976 break;
977
978 dest_insn = find_insn(file, rela->sym->sec, rela->addend);
979 if (!dest_insn)
980 break;
981
982 /* Make sure the destination is in the same function: */
983 if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
984 break;
985
986 alt = malloc(sizeof(*alt));
987 if (!alt) {
988 WARN("malloc failed");
989 return -1;
990 }
991
992 alt->insn = dest_insn;
993 list_add_tail(&alt->list, &insn->alts);
994 prev_offset = rela->offset;
995 }
996
997 if (!prev_offset) {
998 WARN_FUNC("can't find switch jump table",
999 insn->sec, insn->offset);
1000 return -1;
1001 }
1002
1003 return 0;
1004 }
1005
1006 /*
1007 * find_jump_table() - Given a dynamic jump, find the switch jump table in
1008 * .rodata associated with it.
1009 *
1010 * There are 3 basic patterns:
1011 *
1012 * 1. jmpq *[rodata addr](,%reg,8)
1013 *
1014 * This is the most common case by far. It jumps to an address in a simple
1015 * jump table which is stored in .rodata.
1016 *
1017 * 2. jmpq *[rodata addr](%rip)
1018 *
1019 * This is caused by a rare GCC quirk, currently only seen in three driver
1020 * functions in the kernel, only with certain obscure non-distro configs.
1021 *
1022 * As part of an optimization, GCC makes a copy of an existing switch jump
1023 * table, modifies it, and then hard-codes the jump (albeit with an indirect
1024 * jump) to use a single entry in the table. The rest of the jump table and
1025 * some of its jump targets remain as dead code.
1026 *
1027 * In such a case we can just crudely ignore all unreachable instruction
1028 * warnings for the entire object file. Ideally we would just ignore them
1029 * for the function, but that would require redesigning the code quite a
1030 * bit. And honestly that's just not worth doing: unreachable instruction
1031 * warnings are of questionable value anyway, and this is such a rare issue.
1032 *
1033 * 3. mov [rodata addr],%reg1
1034 * ... some instructions ...
1035 * jmpq *(%reg1,%reg2,8)
1036 *
1037 * This is a fairly uncommon pattern which is new for GCC 6. As of this
1038 * writing, there are 11 occurrences of it in the allmodconfig kernel.
1039 *
1040 * As of GCC 7 there are quite a few more of these and the 'in between' code
1041 * is significant. Esp. with KASAN enabled some of the code between the mov
1042 * and jmpq uses .rodata itself, which can confuse things.
1043 *
1044 * TODO: Once we have DWARF CFI and smarter instruction decoding logic,
1045 * ensure the same register is used in the mov and jump instructions.
1046 *
1047 * NOTE: RETPOLINE made it harder still to decode dynamic jumps.
1048 */
1049 static struct rela *find_jump_table(struct objtool_file *file,
1050 struct symbol *func,
1051 struct instruction *insn)
1052 {
1053 struct rela *text_rela, *table_rela;
1054 struct instruction *dest_insn, *orig_insn = insn;
1055 struct section *table_sec;
1056 unsigned long table_offset;
1057
1058 /*
1059 * Backward search using the @first_jump_src links, these help avoid
1060 * much of the 'in between' code. Which avoids us getting confused by
1061 * it.
1062 */
1063 for (;
1064 insn && insn->func && insn->func->pfunc == func;
1065 insn = insn->first_jump_src ?: prev_insn_same_sym(file, insn)) {
1066
1067 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1068 break;
1069
1070 /* allow small jumps within the range */
1071 if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1072 insn->jump_dest &&
1073 (insn->jump_dest->offset <= insn->offset ||
1074 insn->jump_dest->offset > orig_insn->offset))
1075 break;
1076
1077 /* look for a relocation which references .rodata */
1078 text_rela = find_rela_by_dest_range(file->elf, insn->sec,
1079 insn->offset, insn->len);
1080 if (!text_rela || text_rela->sym->type != STT_SECTION ||
1081 !text_rela->sym->sec->rodata)
1082 continue;
1083
1084 table_offset = text_rela->addend;
1085 table_sec = text_rela->sym->sec;
1086
1087 if (text_rela->type == R_X86_64_PC32)
1088 table_offset += 4;
1089
1090 /*
1091 * Make sure the .rodata address isn't associated with a
1092 * symbol. GCC jump tables are anonymous data.
1093 *
1094 * Also support C jump tables which are in the same format as
1095 * switch jump tables. For objtool to recognize them, they
1096 * need to be placed in the C_JUMP_TABLE_SECTION section. They
1097 * have symbols associated with them.
1098 */
1099 if (find_symbol_containing(table_sec, table_offset) &&
1100 strcmp(table_sec->name, C_JUMP_TABLE_SECTION))
1101 continue;
1102
1103 /*
1104 * Each table entry has a rela associated with it. The rela
1105 * should reference text in the same function as the original
1106 * instruction.
1107 */
1108 table_rela = find_rela_by_dest(file->elf, table_sec, table_offset);
1109 if (!table_rela)
1110 continue;
1111 dest_insn = find_insn(file, table_rela->sym->sec, table_rela->addend);
1112 if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func)
1113 continue;
1114
1115 /*
1116 * Use of RIP-relative switch jumps is quite rare, and
1117 * indicates a rare GCC quirk/bug which can leave dead code
1118 * behind.
1119 */
1120 if (text_rela->type == R_X86_64_PC32)
1121 file->ignore_unreachables = true;
1122
1123 return table_rela;
1124 }
1125
1126 return NULL;
1127 }
1128
1129 /*
1130 * First pass: Mark the head of each jump table so that in the next pass,
1131 * we know when a given jump table ends and the next one starts.
1132 */
1133 static void mark_func_jump_tables(struct objtool_file *file,
1134 struct symbol *func)
1135 {
1136 struct instruction *insn, *last = NULL;
1137 struct rela *rela;
1138
1139 func_for_each_insn(file, func, insn) {
1140 if (!last)
1141 last = insn;
1142
1143 /*
1144 * Store back-pointers for unconditional forward jumps such
1145 * that find_jump_table() can back-track using those and
1146 * avoid some potentially confusing code.
1147 */
1148 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1149 insn->offset > last->offset &&
1150 insn->jump_dest->offset > insn->offset &&
1151 !insn->jump_dest->first_jump_src) {
1152
1153 insn->jump_dest->first_jump_src = insn;
1154 last = insn->jump_dest;
1155 }
1156
1157 if (insn->type != INSN_JUMP_DYNAMIC)
1158 continue;
1159
1160 rela = find_jump_table(file, func, insn);
1161 if (rela) {
1162 rela->jump_table_start = true;
1163 insn->jump_table = rela;
1164 }
1165 }
1166 }
1167
1168 static int add_func_jump_tables(struct objtool_file *file,
1169 struct symbol *func)
1170 {
1171 struct instruction *insn;
1172 int ret;
1173
1174 func_for_each_insn(file, func, insn) {
1175 if (!insn->jump_table)
1176 continue;
1177
1178 ret = add_jump_table(file, insn, insn->jump_table);
1179 if (ret)
1180 return ret;
1181 }
1182
1183 return 0;
1184 }
1185
1186 /*
1187 * For some switch statements, gcc generates a jump table in the .rodata
1188 * section which contains a list of addresses within the function to jump to.
1189 * This finds these jump tables and adds them to the insn->alts lists.
1190 */
1191 static int add_jump_table_alts(struct objtool_file *file)
1192 {
1193 struct section *sec;
1194 struct symbol *func;
1195 int ret;
1196
1197 if (!file->rodata)
1198 return 0;
1199
1200 for_each_sec(file, sec) {
1201 list_for_each_entry(func, &sec->symbol_list, list) {
1202 if (func->type != STT_FUNC)
1203 continue;
1204
1205 mark_func_jump_tables(file, func);
1206 ret = add_func_jump_tables(file, func);
1207 if (ret)
1208 return ret;
1209 }
1210 }
1211
1212 return 0;
1213 }
1214
1215 static int read_unwind_hints(struct objtool_file *file)
1216 {
1217 struct section *sec, *relasec;
1218 struct rela *rela;
1219 struct unwind_hint *hint;
1220 struct instruction *insn;
1221 struct cfi_reg *cfa;
1222 int i;
1223
1224 sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1225 if (!sec)
1226 return 0;
1227
1228 relasec = sec->rela;
1229 if (!relasec) {
1230 WARN("missing .rela.discard.unwind_hints section");
1231 return -1;
1232 }
1233
1234 if (sec->len % sizeof(struct unwind_hint)) {
1235 WARN("struct unwind_hint size mismatch");
1236 return -1;
1237 }
1238
1239 file->hints = true;
1240
1241 for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1242 hint = (struct unwind_hint *)sec->data->d_buf + i;
1243
1244 rela = find_rela_by_dest(file->elf, sec, i * sizeof(*hint));
1245 if (!rela) {
1246 WARN("can't find rela for unwind_hints[%d]", i);
1247 return -1;
1248 }
1249
1250 insn = find_insn(file, rela->sym->sec, rela->addend);
1251 if (!insn) {
1252 WARN("can't find insn for unwind_hints[%d]", i);
1253 return -1;
1254 }
1255
1256 cfa = &insn->state.cfa;
1257
1258 if (hint->type == UNWIND_HINT_TYPE_SAVE) {
1259 insn->save = true;
1260 continue;
1261
1262 } else if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
1263 insn->restore = true;
1264 insn->hint = true;
1265 continue;
1266 }
1267
1268 insn->hint = true;
1269
1270 switch (hint->sp_reg) {
1271 case ORC_REG_UNDEFINED:
1272 cfa->base = CFI_UNDEFINED;
1273 break;
1274 case ORC_REG_SP:
1275 cfa->base = CFI_SP;
1276 break;
1277 case ORC_REG_BP:
1278 cfa->base = CFI_BP;
1279 break;
1280 case ORC_REG_SP_INDIRECT:
1281 cfa->base = CFI_SP_INDIRECT;
1282 break;
1283 case ORC_REG_R10:
1284 cfa->base = CFI_R10;
1285 break;
1286 case ORC_REG_R13:
1287 cfa->base = CFI_R13;
1288 break;
1289 case ORC_REG_DI:
1290 cfa->base = CFI_DI;
1291 break;
1292 case ORC_REG_DX:
1293 cfa->base = CFI_DX;
1294 break;
1295 default:
1296 WARN_FUNC("unsupported unwind_hint sp base reg %d",
1297 insn->sec, insn->offset, hint->sp_reg);
1298 return -1;
1299 }
1300
1301 cfa->offset = hint->sp_offset;
1302 insn->state.type = hint->type;
1303 insn->state.end = hint->end;
1304 }
1305
1306 return 0;
1307 }
1308
1309 static int read_retpoline_hints(struct objtool_file *file)
1310 {
1311 struct section *sec;
1312 struct instruction *insn;
1313 struct rela *rela;
1314
1315 sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1316 if (!sec)
1317 return 0;
1318
1319 list_for_each_entry(rela, &sec->rela_list, list) {
1320 if (rela->sym->type != STT_SECTION) {
1321 WARN("unexpected relocation symbol type in %s", sec->name);
1322 return -1;
1323 }
1324
1325 insn = find_insn(file, rela->sym->sec, rela->addend);
1326 if (!insn) {
1327 WARN("bad .discard.retpoline_safe entry");
1328 return -1;
1329 }
1330
1331 if (insn->type != INSN_JUMP_DYNAMIC &&
1332 insn->type != INSN_CALL_DYNAMIC) {
1333 WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1334 insn->sec, insn->offset);
1335 return -1;
1336 }
1337
1338 insn->retpoline_safe = true;
1339 }
1340
1341 return 0;
1342 }
1343
1344 static void mark_rodata(struct objtool_file *file)
1345 {
1346 struct section *sec;
1347 bool found = false;
1348
1349 /*
1350 * Search for the following rodata sections, each of which can
1351 * potentially contain jump tables:
1352 *
1353 * - .rodata: can contain GCC switch tables
1354 * - .rodata.<func>: same, if -fdata-sections is being used
1355 * - .rodata..c_jump_table: contains C annotated jump tables
1356 *
1357 * .rodata.str1.* sections are ignored; they don't contain jump tables.
1358 */
1359 for_each_sec(file, sec) {
1360 if ((!strncmp(sec->name, ".rodata", 7) && !strstr(sec->name, ".str1.")) ||
1361 !strcmp(sec->name, C_JUMP_TABLE_SECTION)) {
1362 sec->rodata = true;
1363 found = true;
1364 }
1365 }
1366
1367 file->rodata = found;
1368 }
1369
1370 static int decode_sections(struct objtool_file *file)
1371 {
1372 int ret;
1373
1374 mark_rodata(file);
1375
1376 ret = decode_instructions(file);
1377 if (ret)
1378 return ret;
1379
1380 ret = add_dead_ends(file);
1381 if (ret)
1382 return ret;
1383
1384 add_ignores(file);
1385 add_uaccess_safe(file);
1386
1387 ret = add_ignore_alternatives(file);
1388 if (ret)
1389 return ret;
1390
1391 ret = add_jump_destinations(file);
1392 if (ret)
1393 return ret;
1394
1395 ret = add_special_section_alts(file);
1396 if (ret)
1397 return ret;
1398
1399 ret = add_call_destinations(file);
1400 if (ret)
1401 return ret;
1402
1403 ret = add_jump_table_alts(file);
1404 if (ret)
1405 return ret;
1406
1407 ret = read_unwind_hints(file);
1408 if (ret)
1409 return ret;
1410
1411 ret = read_retpoline_hints(file);
1412 if (ret)
1413 return ret;
1414
1415 return 0;
1416 }
1417
1418 static bool is_fentry_call(struct instruction *insn)
1419 {
1420 if (insn->type == INSN_CALL &&
1421 insn->call_dest->type == STT_NOTYPE &&
1422 !strcmp(insn->call_dest->name, "__fentry__"))
1423 return true;
1424
1425 return false;
1426 }
1427
1428 static bool has_modified_stack_frame(struct insn_state *state)
1429 {
1430 int i;
1431
1432 if (state->cfa.base != initial_func_cfi.cfa.base ||
1433 state->cfa.offset != initial_func_cfi.cfa.offset ||
1434 state->stack_size != initial_func_cfi.cfa.offset ||
1435 state->drap)
1436 return true;
1437
1438 for (i = 0; i < CFI_NUM_REGS; i++)
1439 if (state->regs[i].base != initial_func_cfi.regs[i].base ||
1440 state->regs[i].offset != initial_func_cfi.regs[i].offset)
1441 return true;
1442
1443 return false;
1444 }
1445
1446 static bool has_valid_stack_frame(struct insn_state *state)
1447 {
1448 if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA &&
1449 state->regs[CFI_BP].offset == -16)
1450 return true;
1451
1452 if (state->drap && state->regs[CFI_BP].base == CFI_BP)
1453 return true;
1454
1455 return false;
1456 }
1457
1458 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state)
1459 {
1460 struct cfi_reg *cfa = &state->cfa;
1461 struct stack_op *op = &insn->stack_op;
1462
1463 if (cfa->base != CFI_SP && cfa->base != CFI_SP_INDIRECT)
1464 return 0;
1465
1466 /* push */
1467 if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1468 cfa->offset += 8;
1469
1470 /* pop */
1471 if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1472 cfa->offset -= 8;
1473
1474 /* add immediate to sp */
1475 if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1476 op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1477 cfa->offset -= op->src.offset;
1478
1479 return 0;
1480 }
1481
1482 static void save_reg(struct insn_state *state, unsigned char reg, int base,
1483 int offset)
1484 {
1485 if (arch_callee_saved_reg(reg) &&
1486 state->regs[reg].base == CFI_UNDEFINED) {
1487 state->regs[reg].base = base;
1488 state->regs[reg].offset = offset;
1489 }
1490 }
1491
1492 static void restore_reg(struct insn_state *state, unsigned char reg)
1493 {
1494 state->regs[reg].base = CFI_UNDEFINED;
1495 state->regs[reg].offset = 0;
1496 }
1497
1498 /*
1499 * A note about DRAP stack alignment:
1500 *
1501 * GCC has the concept of a DRAP register, which is used to help keep track of
1502 * the stack pointer when aligning the stack. r10 or r13 is used as the DRAP
1503 * register. The typical DRAP pattern is:
1504 *
1505 * 4c 8d 54 24 08 lea 0x8(%rsp),%r10
1506 * 48 83 e4 c0 and $0xffffffffffffffc0,%rsp
1507 * 41 ff 72 f8 pushq -0x8(%r10)
1508 * 55 push %rbp
1509 * 48 89 e5 mov %rsp,%rbp
1510 * (more pushes)
1511 * 41 52 push %r10
1512 * ...
1513 * 41 5a pop %r10
1514 * (more pops)
1515 * 5d pop %rbp
1516 * 49 8d 62 f8 lea -0x8(%r10),%rsp
1517 * c3 retq
1518 *
1519 * There are some variations in the epilogues, like:
1520 *
1521 * 5b pop %rbx
1522 * 41 5a pop %r10
1523 * 41 5c pop %r12
1524 * 41 5d pop %r13
1525 * 41 5e pop %r14
1526 * c9 leaveq
1527 * 49 8d 62 f8 lea -0x8(%r10),%rsp
1528 * c3 retq
1529 *
1530 * and:
1531 *
1532 * 4c 8b 55 e8 mov -0x18(%rbp),%r10
1533 * 48 8b 5d e0 mov -0x20(%rbp),%rbx
1534 * 4c 8b 65 f0 mov -0x10(%rbp),%r12
1535 * 4c 8b 6d f8 mov -0x8(%rbp),%r13
1536 * c9 leaveq
1537 * 49 8d 62 f8 lea -0x8(%r10),%rsp
1538 * c3 retq
1539 *
1540 * Sometimes r13 is used as the DRAP register, in which case it's saved and
1541 * restored beforehand:
1542 *
1543 * 41 55 push %r13
1544 * 4c 8d 6c 24 10 lea 0x10(%rsp),%r13
1545 * 48 83 e4 f0 and $0xfffffffffffffff0,%rsp
1546 * ...
1547 * 49 8d 65 f0 lea -0x10(%r13),%rsp
1548 * 41 5d pop %r13
1549 * c3 retq
1550 */
1551 static int update_insn_state(struct instruction *insn, struct insn_state *state)
1552 {
1553 struct stack_op *op = &insn->stack_op;
1554 struct cfi_reg *cfa = &state->cfa;
1555 struct cfi_reg *regs = state->regs;
1556
1557 /* stack operations don't make sense with an undefined CFA */
1558 if (cfa->base == CFI_UNDEFINED) {
1559 if (insn->func) {
1560 WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1561 return -1;
1562 }
1563 return 0;
1564 }
1565
1566 if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET)
1567 return update_insn_state_regs(insn, state);
1568
1569 switch (op->dest.type) {
1570
1571 case OP_DEST_REG:
1572 switch (op->src.type) {
1573
1574 case OP_SRC_REG:
1575 if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1576 cfa->base == CFI_SP &&
1577 regs[CFI_BP].base == CFI_CFA &&
1578 regs[CFI_BP].offset == -cfa->offset) {
1579
1580 /* mov %rsp, %rbp */
1581 cfa->base = op->dest.reg;
1582 state->bp_scratch = false;
1583 }
1584
1585 else if (op->src.reg == CFI_SP &&
1586 op->dest.reg == CFI_BP && state->drap) {
1587
1588 /* drap: mov %rsp, %rbp */
1589 regs[CFI_BP].base = CFI_BP;
1590 regs[CFI_BP].offset = -state->stack_size;
1591 state->bp_scratch = false;
1592 }
1593
1594 else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1595
1596 /*
1597 * mov %rsp, %reg
1598 *
1599 * This is needed for the rare case where GCC
1600 * does:
1601 *
1602 * mov %rsp, %rax
1603 * ...
1604 * mov %rax, %rsp
1605 */
1606 state->vals[op->dest.reg].base = CFI_CFA;
1607 state->vals[op->dest.reg].offset = -state->stack_size;
1608 }
1609
1610 else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
1611 cfa->base == CFI_BP) {
1612
1613 /*
1614 * mov %rbp, %rsp
1615 *
1616 * Restore the original stack pointer (Clang).
1617 */
1618 state->stack_size = -state->regs[CFI_BP].offset;
1619 }
1620
1621 else if (op->dest.reg == cfa->base) {
1622
1623 /* mov %reg, %rsp */
1624 if (cfa->base == CFI_SP &&
1625 state->vals[op->src.reg].base == CFI_CFA) {
1626
1627 /*
1628 * This is needed for the rare case
1629 * where GCC does something dumb like:
1630 *
1631 * lea 0x8(%rsp), %rcx
1632 * ...
1633 * mov %rcx, %rsp
1634 */
1635 cfa->offset = -state->vals[op->src.reg].offset;
1636 state->stack_size = cfa->offset;
1637
1638 } else {
1639 cfa->base = CFI_UNDEFINED;
1640 cfa->offset = 0;
1641 }
1642 }
1643
1644 break;
1645
1646 case OP_SRC_ADD:
1647 if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
1648
1649 /* add imm, %rsp */
1650 state->stack_size -= op->src.offset;
1651 if (cfa->base == CFI_SP)
1652 cfa->offset -= op->src.offset;
1653 break;
1654 }
1655
1656 if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
1657
1658 /* lea disp(%rbp), %rsp */
1659 state->stack_size = -(op->src.offset + regs[CFI_BP].offset);
1660 break;
1661 }
1662
1663 if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1664
1665 /* drap: lea disp(%rsp), %drap */
1666 state->drap_reg = op->dest.reg;
1667
1668 /*
1669 * lea disp(%rsp), %reg
1670 *
1671 * This is needed for the rare case where GCC
1672 * does something dumb like:
1673 *
1674 * lea 0x8(%rsp), %rcx
1675 * ...
1676 * mov %rcx, %rsp
1677 */
1678 state->vals[op->dest.reg].base = CFI_CFA;
1679 state->vals[op->dest.reg].offset = \
1680 -state->stack_size + op->src.offset;
1681
1682 break;
1683 }
1684
1685 if (state->drap && op->dest.reg == CFI_SP &&
1686 op->src.reg == state->drap_reg) {
1687
1688 /* drap: lea disp(%drap), %rsp */
1689 cfa->base = CFI_SP;
1690 cfa->offset = state->stack_size = -op->src.offset;
1691 state->drap_reg = CFI_UNDEFINED;
1692 state->drap = false;
1693 break;
1694 }
1695
1696 if (op->dest.reg == state->cfa.base) {
1697 WARN_FUNC("unsupported stack register modification",
1698 insn->sec, insn->offset);
1699 return -1;
1700 }
1701
1702 break;
1703
1704 case OP_SRC_AND:
1705 if (op->dest.reg != CFI_SP ||
1706 (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
1707 (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
1708 WARN_FUNC("unsupported stack pointer realignment",
1709 insn->sec, insn->offset);
1710 return -1;
1711 }
1712
1713 if (state->drap_reg != CFI_UNDEFINED) {
1714 /* drap: and imm, %rsp */
1715 cfa->base = state->drap_reg;
1716 cfa->offset = state->stack_size = 0;
1717 state->drap = true;
1718 }
1719
1720 /*
1721 * Older versions of GCC (4.8ish) realign the stack
1722 * without DRAP, with a frame pointer.
1723 */
1724
1725 break;
1726
1727 case OP_SRC_POP:
1728 case OP_SRC_POPF:
1729 if (!state->drap && op->dest.type == OP_DEST_REG &&
1730 op->dest.reg == cfa->base) {
1731
1732 /* pop %rbp */
1733 cfa->base = CFI_SP;
1734 }
1735
1736 if (state->drap && cfa->base == CFI_BP_INDIRECT &&
1737 op->dest.type == OP_DEST_REG &&
1738 op->dest.reg == state->drap_reg &&
1739 state->drap_offset == -state->stack_size) {
1740
1741 /* drap: pop %drap */
1742 cfa->base = state->drap_reg;
1743 cfa->offset = 0;
1744 state->drap_offset = -1;
1745
1746 } else if (regs[op->dest.reg].offset == -state->stack_size) {
1747
1748 /* pop %reg */
1749 restore_reg(state, op->dest.reg);
1750 }
1751
1752 state->stack_size -= 8;
1753 if (cfa->base == CFI_SP)
1754 cfa->offset -= 8;
1755
1756 break;
1757
1758 case OP_SRC_REG_INDIRECT:
1759 if (state->drap && op->src.reg == CFI_BP &&
1760 op->src.offset == state->drap_offset) {
1761
1762 /* drap: mov disp(%rbp), %drap */
1763 cfa->base = state->drap_reg;
1764 cfa->offset = 0;
1765 state->drap_offset = -1;
1766 }
1767
1768 if (state->drap && op->src.reg == CFI_BP &&
1769 op->src.offset == regs[op->dest.reg].offset) {
1770
1771 /* drap: mov disp(%rbp), %reg */
1772 restore_reg(state, op->dest.reg);
1773
1774 } else if (op->src.reg == cfa->base &&
1775 op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
1776
1777 /* mov disp(%rbp), %reg */
1778 /* mov disp(%rsp), %reg */
1779 restore_reg(state, op->dest.reg);
1780 }
1781
1782 break;
1783
1784 default:
1785 WARN_FUNC("unknown stack-related instruction",
1786 insn->sec, insn->offset);
1787 return -1;
1788 }
1789
1790 break;
1791
1792 case OP_DEST_PUSH:
1793 case OP_DEST_PUSHF:
1794 state->stack_size += 8;
1795 if (cfa->base == CFI_SP)
1796 cfa->offset += 8;
1797
1798 if (op->src.type != OP_SRC_REG)
1799 break;
1800
1801 if (state->drap) {
1802 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1803
1804 /* drap: push %drap */
1805 cfa->base = CFI_BP_INDIRECT;
1806 cfa->offset = -state->stack_size;
1807
1808 /* save drap so we know when to restore it */
1809 state->drap_offset = -state->stack_size;
1810
1811 } else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) {
1812
1813 /* drap: push %rbp */
1814 state->stack_size = 0;
1815
1816 } else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1817
1818 /* drap: push %reg */
1819 save_reg(state, op->src.reg, CFI_BP, -state->stack_size);
1820 }
1821
1822 } else {
1823
1824 /* push %reg */
1825 save_reg(state, op->src.reg, CFI_CFA, -state->stack_size);
1826 }
1827
1828 /* detect when asm code uses rbp as a scratch register */
1829 if (!no_fp && insn->func && op->src.reg == CFI_BP &&
1830 cfa->base != CFI_BP)
1831 state->bp_scratch = true;
1832 break;
1833
1834 case OP_DEST_REG_INDIRECT:
1835
1836 if (state->drap) {
1837 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1838
1839 /* drap: mov %drap, disp(%rbp) */
1840 cfa->base = CFI_BP_INDIRECT;
1841 cfa->offset = op->dest.offset;
1842
1843 /* save drap offset so we know when to restore it */
1844 state->drap_offset = op->dest.offset;
1845 }
1846
1847 else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1848
1849 /* drap: mov reg, disp(%rbp) */
1850 save_reg(state, op->src.reg, CFI_BP, op->dest.offset);
1851 }
1852
1853 } else if (op->dest.reg == cfa->base) {
1854
1855 /* mov reg, disp(%rbp) */
1856 /* mov reg, disp(%rsp) */
1857 save_reg(state, op->src.reg, CFI_CFA,
1858 op->dest.offset - state->cfa.offset);
1859 }
1860
1861 break;
1862
1863 case OP_DEST_LEAVE:
1864 if ((!state->drap && cfa->base != CFI_BP) ||
1865 (state->drap && cfa->base != state->drap_reg)) {
1866 WARN_FUNC("leave instruction with modified stack frame",
1867 insn->sec, insn->offset);
1868 return -1;
1869 }
1870
1871 /* leave (mov %rbp, %rsp; pop %rbp) */
1872
1873 state->stack_size = -state->regs[CFI_BP].offset - 8;
1874 restore_reg(state, CFI_BP);
1875
1876 if (!state->drap) {
1877 cfa->base = CFI_SP;
1878 cfa->offset -= 8;
1879 }
1880
1881 break;
1882
1883 case OP_DEST_MEM:
1884 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
1885 WARN_FUNC("unknown stack-related memory operation",
1886 insn->sec, insn->offset);
1887 return -1;
1888 }
1889
1890 /* pop mem */
1891 state->stack_size -= 8;
1892 if (cfa->base == CFI_SP)
1893 cfa->offset -= 8;
1894
1895 break;
1896
1897 default:
1898 WARN_FUNC("unknown stack-related instruction",
1899 insn->sec, insn->offset);
1900 return -1;
1901 }
1902
1903 return 0;
1904 }
1905
1906 static bool insn_state_match(struct instruction *insn, struct insn_state *state)
1907 {
1908 struct insn_state *state1 = &insn->state, *state2 = state;
1909 int i;
1910
1911 if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) {
1912 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
1913 insn->sec, insn->offset,
1914 state1->cfa.base, state1->cfa.offset,
1915 state2->cfa.base, state2->cfa.offset);
1916
1917 } else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) {
1918 for (i = 0; i < CFI_NUM_REGS; i++) {
1919 if (!memcmp(&state1->regs[i], &state2->regs[i],
1920 sizeof(struct cfi_reg)))
1921 continue;
1922
1923 WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
1924 insn->sec, insn->offset,
1925 i, state1->regs[i].base, state1->regs[i].offset,
1926 i, state2->regs[i].base, state2->regs[i].offset);
1927 break;
1928 }
1929
1930 } else if (state1->type != state2->type) {
1931 WARN_FUNC("stack state mismatch: type1=%d type2=%d",
1932 insn->sec, insn->offset, state1->type, state2->type);
1933
1934 } else if (state1->drap != state2->drap ||
1935 (state1->drap && state1->drap_reg != state2->drap_reg) ||
1936 (state1->drap && state1->drap_offset != state2->drap_offset)) {
1937 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
1938 insn->sec, insn->offset,
1939 state1->drap, state1->drap_reg, state1->drap_offset,
1940 state2->drap, state2->drap_reg, state2->drap_offset);
1941
1942 } else
1943 return true;
1944
1945 return false;
1946 }
1947
1948 static inline bool func_uaccess_safe(struct symbol *func)
1949 {
1950 if (func)
1951 return func->uaccess_safe;
1952
1953 return false;
1954 }
1955
1956 static inline const char *call_dest_name(struct instruction *insn)
1957 {
1958 if (insn->call_dest)
1959 return insn->call_dest->name;
1960
1961 return "{dynamic}";
1962 }
1963
1964 static int validate_call(struct instruction *insn, struct insn_state *state)
1965 {
1966 if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
1967 WARN_FUNC("call to %s() with UACCESS enabled",
1968 insn->sec, insn->offset, call_dest_name(insn));
1969 return 1;
1970 }
1971
1972 if (state->df) {
1973 WARN_FUNC("call to %s() with DF set",
1974 insn->sec, insn->offset, call_dest_name(insn));
1975 return 1;
1976 }
1977
1978 return 0;
1979 }
1980
1981 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
1982 {
1983 if (has_modified_stack_frame(state)) {
1984 WARN_FUNC("sibling call from callable instruction with modified stack frame",
1985 insn->sec, insn->offset);
1986 return 1;
1987 }
1988
1989 return validate_call(insn, state);
1990 }
1991
1992 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
1993 {
1994 if (state->uaccess && !func_uaccess_safe(func)) {
1995 WARN_FUNC("return with UACCESS enabled",
1996 insn->sec, insn->offset);
1997 return 1;
1998 }
1999
2000 if (!state->uaccess && func_uaccess_safe(func)) {
2001 WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function",
2002 insn->sec, insn->offset);
2003 return 1;
2004 }
2005
2006 if (state->df) {
2007 WARN_FUNC("return with DF set",
2008 insn->sec, insn->offset);
2009 return 1;
2010 }
2011
2012 if (func && has_modified_stack_frame(state)) {
2013 WARN_FUNC("return with modified stack frame",
2014 insn->sec, insn->offset);
2015 return 1;
2016 }
2017
2018 if (state->bp_scratch) {
2019 WARN_FUNC("BP used as a scratch register",
2020 insn->sec, insn->offset);
2021 return 1;
2022 }
2023
2024 return 0;
2025 }
2026
2027 /*
2028 * Follow the branch starting at the given instruction, and recursively follow
2029 * any other branches (jumps). Meanwhile, track the frame pointer state at
2030 * each instruction and validate all the rules described in
2031 * tools/objtool/Documentation/stack-validation.txt.
2032 */
2033 static int validate_branch(struct objtool_file *file, struct symbol *func,
2034 struct instruction *first, struct insn_state state)
2035 {
2036 struct alternative *alt;
2037 struct instruction *insn, *next_insn;
2038 struct section *sec;
2039 u8 visited;
2040 int ret;
2041
2042 insn = first;
2043 sec = insn->sec;
2044
2045 if (insn->alt_group && list_empty(&insn->alts)) {
2046 WARN_FUNC("don't know how to handle branch to middle of alternative instruction group",
2047 sec, insn->offset);
2048 return 1;
2049 }
2050
2051 while (1) {
2052 next_insn = next_insn_same_sec(file, insn);
2053
2054 if (file->c_file && func && insn->func && func != insn->func->pfunc) {
2055 WARN("%s() falls through to next function %s()",
2056 func->name, insn->func->name);
2057 return 1;
2058 }
2059
2060 if (func && insn->ignore) {
2061 WARN_FUNC("BUG: why am I validating an ignored function?",
2062 sec, insn->offset);
2063 return 1;
2064 }
2065
2066 visited = 1 << state.uaccess;
2067 if (insn->visited) {
2068 if (!insn->hint && !insn_state_match(insn, &state))
2069 return 1;
2070
2071 if (insn->visited & visited)
2072 return 0;
2073 }
2074
2075 if (insn->hint) {
2076 if (insn->restore) {
2077 struct instruction *save_insn, *i;
2078
2079 i = insn;
2080 save_insn = NULL;
2081 sym_for_each_insn_continue_reverse(file, func, i) {
2082 if (i->save) {
2083 save_insn = i;
2084 break;
2085 }
2086 }
2087
2088 if (!save_insn) {
2089 WARN_FUNC("no corresponding CFI save for CFI restore",
2090 sec, insn->offset);
2091 return 1;
2092 }
2093
2094 if (!save_insn->visited) {
2095 /*
2096 * Oops, no state to copy yet.
2097 * Hopefully we can reach this
2098 * instruction from another branch
2099 * after the save insn has been
2100 * visited.
2101 */
2102 if (insn == first)
2103 return 0;
2104
2105 WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
2106 sec, insn->offset);
2107 return 1;
2108 }
2109
2110 insn->state = save_insn->state;
2111 }
2112
2113 state = insn->state;
2114
2115 } else
2116 insn->state = state;
2117
2118 insn->visited |= visited;
2119
2120 if (!insn->ignore_alts) {
2121 bool skip_orig = false;
2122
2123 list_for_each_entry(alt, &insn->alts, list) {
2124 if (alt->skip_orig)
2125 skip_orig = true;
2126
2127 ret = validate_branch(file, func, alt->insn, state);
2128 if (ret) {
2129 if (backtrace)
2130 BT_FUNC("(alt)", insn);
2131 return ret;
2132 }
2133 }
2134
2135 if (skip_orig)
2136 return 0;
2137 }
2138
2139 switch (insn->type) {
2140
2141 case INSN_RETURN:
2142 return validate_return(func, insn, &state);
2143
2144 case INSN_CALL:
2145 case INSN_CALL_DYNAMIC:
2146 ret = validate_call(insn, &state);
2147 if (ret)
2148 return ret;
2149
2150 if (!no_fp && func && !is_fentry_call(insn) &&
2151 !has_valid_stack_frame(&state)) {
2152 WARN_FUNC("call without frame pointer save/setup",
2153 sec, insn->offset);
2154 return 1;
2155 }
2156
2157 if (dead_end_function(file, insn->call_dest))
2158 return 0;
2159
2160 break;
2161
2162 case INSN_JUMP_CONDITIONAL:
2163 case INSN_JUMP_UNCONDITIONAL:
2164 if (func && is_sibling_call(insn)) {
2165 ret = validate_sibling_call(insn, &state);
2166 if (ret)
2167 return ret;
2168
2169 } else if (insn->jump_dest) {
2170 ret = validate_branch(file, func,
2171 insn->jump_dest, state);
2172 if (ret) {
2173 if (backtrace)
2174 BT_FUNC("(branch)", insn);
2175 return ret;
2176 }
2177 }
2178
2179 if (insn->type == INSN_JUMP_UNCONDITIONAL)
2180 return 0;
2181
2182 break;
2183
2184 case INSN_JUMP_DYNAMIC:
2185 case INSN_JUMP_DYNAMIC_CONDITIONAL:
2186 if (func && is_sibling_call(insn)) {
2187 ret = validate_sibling_call(insn, &state);
2188 if (ret)
2189 return ret;
2190 }
2191
2192 if (insn->type == INSN_JUMP_DYNAMIC)
2193 return 0;
2194
2195 break;
2196
2197 case INSN_CONTEXT_SWITCH:
2198 if (func && (!next_insn || !next_insn->hint)) {
2199 WARN_FUNC("unsupported instruction in callable function",
2200 sec, insn->offset);
2201 return 1;
2202 }
2203 return 0;
2204
2205 case INSN_STACK:
2206 if (update_insn_state(insn, &state))
2207 return 1;
2208
2209 if (insn->stack_op.dest.type == OP_DEST_PUSHF) {
2210 if (!state.uaccess_stack) {
2211 state.uaccess_stack = 1;
2212 } else if (state.uaccess_stack >> 31) {
2213 WARN_FUNC("PUSHF stack exhausted", sec, insn->offset);
2214 return 1;
2215 }
2216 state.uaccess_stack <<= 1;
2217 state.uaccess_stack |= state.uaccess;
2218 }
2219
2220 if (insn->stack_op.src.type == OP_SRC_POPF) {
2221 if (state.uaccess_stack) {
2222 state.uaccess = state.uaccess_stack & 1;
2223 state.uaccess_stack >>= 1;
2224 if (state.uaccess_stack == 1)
2225 state.uaccess_stack = 0;
2226 }
2227 }
2228
2229 break;
2230
2231 case INSN_STAC:
2232 if (state.uaccess) {
2233 WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2234 return 1;
2235 }
2236
2237 state.uaccess = true;
2238 break;
2239
2240 case INSN_CLAC:
2241 if (!state.uaccess && func) {
2242 WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2243 return 1;
2244 }
2245
2246 if (func_uaccess_safe(func) && !state.uaccess_stack) {
2247 WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2248 return 1;
2249 }
2250
2251 state.uaccess = false;
2252 break;
2253
2254 case INSN_STD:
2255 if (state.df)
2256 WARN_FUNC("recursive STD", sec, insn->offset);
2257
2258 state.df = true;
2259 break;
2260
2261 case INSN_CLD:
2262 if (!state.df && func)
2263 WARN_FUNC("redundant CLD", sec, insn->offset);
2264
2265 state.df = false;
2266 break;
2267
2268 default:
2269 break;
2270 }
2271
2272 if (insn->dead_end)
2273 return 0;
2274
2275 if (!next_insn) {
2276 if (state.cfa.base == CFI_UNDEFINED)
2277 return 0;
2278 WARN("%s: unexpected end of section", sec->name);
2279 return 1;
2280 }
2281
2282 insn = next_insn;
2283 }
2284
2285 return 0;
2286 }
2287
2288 static int validate_unwind_hints(struct objtool_file *file)
2289 {
2290 struct instruction *insn;
2291 int ret, warnings = 0;
2292 struct insn_state state;
2293
2294 if (!file->hints)
2295 return 0;
2296
2297 clear_insn_state(&state);
2298
2299 for_each_insn(file, insn) {
2300 if (insn->hint && !insn->visited) {
2301 ret = validate_branch(file, insn->func, insn, state);
2302 if (ret && backtrace)
2303 BT_FUNC("<=== (hint)", insn);
2304 warnings += ret;
2305 }
2306 }
2307
2308 return warnings;
2309 }
2310
2311 static int validate_retpoline(struct objtool_file *file)
2312 {
2313 struct instruction *insn;
2314 int warnings = 0;
2315
2316 for_each_insn(file, insn) {
2317 if (insn->type != INSN_JUMP_DYNAMIC &&
2318 insn->type != INSN_CALL_DYNAMIC)
2319 continue;
2320
2321 if (insn->retpoline_safe)
2322 continue;
2323
2324 /*
2325 * .init.text code is ran before userspace and thus doesn't
2326 * strictly need retpolines, except for modules which are
2327 * loaded late, they very much do need retpoline in their
2328 * .init.text
2329 */
2330 if (!strcmp(insn->sec->name, ".init.text") && !module)
2331 continue;
2332
2333 WARN_FUNC("indirect %s found in RETPOLINE build",
2334 insn->sec, insn->offset,
2335 insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2336
2337 warnings++;
2338 }
2339
2340 return warnings;
2341 }
2342
2343 static bool is_kasan_insn(struct instruction *insn)
2344 {
2345 return (insn->type == INSN_CALL &&
2346 !strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2347 }
2348
2349 static bool is_ubsan_insn(struct instruction *insn)
2350 {
2351 return (insn->type == INSN_CALL &&
2352 !strcmp(insn->call_dest->name,
2353 "__ubsan_handle_builtin_unreachable"));
2354 }
2355
2356 static bool ignore_unreachable_insn(struct instruction *insn)
2357 {
2358 int i;
2359
2360 if (insn->ignore || insn->type == INSN_NOP)
2361 return true;
2362
2363 /*
2364 * Ignore any unused exceptions. This can happen when a whitelisted
2365 * function has an exception table entry.
2366 *
2367 * Also ignore alternative replacement instructions. This can happen
2368 * when a whitelisted function uses one of the ALTERNATIVE macros.
2369 */
2370 if (!strcmp(insn->sec->name, ".fixup") ||
2371 !strcmp(insn->sec->name, ".altinstr_replacement") ||
2372 !strcmp(insn->sec->name, ".altinstr_aux"))
2373 return true;
2374
2375 if (!insn->func)
2376 return false;
2377
2378 /*
2379 * CONFIG_UBSAN_TRAP inserts a UD2 when it sees
2380 * __builtin_unreachable(). The BUG() macro has an unreachable() after
2381 * the UD2, which causes GCC's undefined trap logic to emit another UD2
2382 * (or occasionally a JMP to UD2).
2383 */
2384 if (list_prev_entry(insn, list)->dead_end &&
2385 (insn->type == INSN_BUG ||
2386 (insn->type == INSN_JUMP_UNCONDITIONAL &&
2387 insn->jump_dest && insn->jump_dest->type == INSN_BUG)))
2388 return true;
2389
2390 /*
2391 * Check if this (or a subsequent) instruction is related to
2392 * CONFIG_UBSAN or CONFIG_KASAN.
2393 *
2394 * End the search at 5 instructions to avoid going into the weeds.
2395 */
2396 for (i = 0; i < 5; i++) {
2397
2398 if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2399 return true;
2400
2401 if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2402 if (insn->jump_dest &&
2403 insn->jump_dest->func == insn->func) {
2404 insn = insn->jump_dest;
2405 continue;
2406 }
2407
2408 break;
2409 }
2410
2411 if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2412 break;
2413
2414 insn = list_next_entry(insn, list);
2415 }
2416
2417 return false;
2418 }
2419
2420 static int validate_section(struct objtool_file *file, struct section *sec)
2421 {
2422 struct symbol *func;
2423 struct instruction *insn;
2424 struct insn_state state;
2425 int ret, warnings = 0;
2426
2427 clear_insn_state(&state);
2428
2429 state.cfa = initial_func_cfi.cfa;
2430 memcpy(&state.regs, &initial_func_cfi.regs,
2431 CFI_NUM_REGS * sizeof(struct cfi_reg));
2432 state.stack_size = initial_func_cfi.cfa.offset;
2433
2434 list_for_each_entry(func, &sec->symbol_list, list) {
2435 if (func->type != STT_FUNC)
2436 continue;
2437
2438 if (!func->len) {
2439 WARN("%s() is missing an ELF size annotation",
2440 func->name);
2441 warnings++;
2442 }
2443
2444 if (func->pfunc != func || func->alias != func)
2445 continue;
2446
2447 insn = find_insn(file, sec, func->offset);
2448 if (!insn || insn->ignore || insn->visited)
2449 continue;
2450
2451 state.uaccess = func->uaccess_safe;
2452
2453 ret = validate_branch(file, func, insn, state);
2454 if (ret && backtrace)
2455 BT_FUNC("<=== (func)", insn);
2456 warnings += ret;
2457 }
2458
2459 return warnings;
2460 }
2461
2462 static int validate_functions(struct objtool_file *file)
2463 {
2464 struct section *sec;
2465 int warnings = 0;
2466
2467 for_each_sec(file, sec)
2468 warnings += validate_section(file, sec);
2469
2470 return warnings;
2471 }
2472
2473 static int validate_reachable_instructions(struct objtool_file *file)
2474 {
2475 struct instruction *insn;
2476
2477 if (file->ignore_unreachables)
2478 return 0;
2479
2480 for_each_insn(file, insn) {
2481 if (insn->visited || ignore_unreachable_insn(insn))
2482 continue;
2483
2484 WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
2485 return 1;
2486 }
2487
2488 return 0;
2489 }
2490
2491 static struct objtool_file file;
2492
2493 int check(const char *_objname, bool orc)
2494 {
2495 int ret, warnings = 0;
2496
2497 objname = _objname;
2498
2499 file.elf = elf_read(objname, orc ? O_RDWR : O_RDONLY);
2500 if (!file.elf)
2501 return 1;
2502
2503 INIT_LIST_HEAD(&file.insn_list);
2504 hash_init(file.insn_hash);
2505 file.c_file = find_section_by_name(file.elf, ".comment");
2506 file.ignore_unreachables = no_unreachable;
2507 file.hints = false;
2508
2509 arch_initial_func_cfi_state(&initial_func_cfi);
2510
2511 ret = decode_sections(&file);
2512 if (ret < 0)
2513 goto out;
2514 warnings += ret;
2515
2516 if (list_empty(&file.insn_list))
2517 goto out;
2518
2519 if (retpoline) {
2520 ret = validate_retpoline(&file);
2521 if (ret < 0)
2522 return ret;
2523 warnings += ret;
2524 }
2525
2526 ret = validate_functions(&file);
2527 if (ret < 0)
2528 goto out;
2529 warnings += ret;
2530
2531 ret = validate_unwind_hints(&file);
2532 if (ret < 0)
2533 goto out;
2534 warnings += ret;
2535
2536 if (!warnings) {
2537 ret = validate_reachable_instructions(&file);
2538 if (ret < 0)
2539 goto out;
2540 warnings += ret;
2541 }
2542
2543 if (orc) {
2544 ret = create_orc(&file);
2545 if (ret < 0)
2546 goto out;
2547
2548 ret = create_orc_sections(&file);
2549 if (ret < 0)
2550 goto out;
2551
2552 ret = elf_write(file.elf);
2553 if (ret < 0)
2554 goto out;
2555 }
2556
2557 out:
2558 if (ret < 0) {
2559 /*
2560 * Fatal error. The binary is corrupt or otherwise broken in
2561 * some way, or objtool itself is broken. Fail the kernel
2562 * build.
2563 */
2564 return ret;
2565 }
2566
2567 return 0;
2568 }