hc
2024-05-10 37f49e37ab4cb5d0bc4c60eb5c6d4dd57db767bb
kernel/tools/objtool/check.c
....@@ -1,51 +1,47 @@
1
+// SPDX-License-Identifier: GPL-2.0-or-later
12 /*
23 * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
3
- *
4
- * This program is free software; you can redistribute it and/or
5
- * modify it under the terms of the GNU General Public License
6
- * as published by the Free Software Foundation; either version 2
7
- * of the License, or (at your option) any later version.
8
- *
9
- * This program is distributed in the hope that it will be useful,
10
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
11
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12
- * GNU General Public License for more details.
13
- *
14
- * You should have received a copy of the GNU General Public License
15
- * along with this program; if not, see <http://www.gnu.org/licenses/>.
164 */
175
186 #include <string.h>
197 #include <stdlib.h>
8
+#include <inttypes.h>
9
+#include <sys/mman.h>
2010
2111 #include "builtin.h"
22
-#include "check.h"
23
-#include "elf.h"
24
-#include "special.h"
12
+#include "cfi.h"
2513 #include "arch.h"
14
+#include "check.h"
15
+#include "special.h"
2616 #include "warn.h"
17
+#include "arch_elf.h"
2718
19
+#include <linux/objtool.h>
2820 #include <linux/hashtable.h>
2921 #include <linux/kernel.h>
30
-
31
-#define FAKE_JUMP_OFFSET -1
22
+#include <linux/static_call_types.h>
3223
3324 struct alternative {
3425 struct list_head list;
3526 struct instruction *insn;
27
+ bool skip_orig;
3628 };
3729
38
-const char *objname;
39
-struct cfi_state initial_func_cfi;
30
+static unsigned long nr_cfi, nr_cfi_reused, nr_cfi_cache;
31
+
32
+static struct cfi_init_state initial_func_cfi;
33
+static struct cfi_state init_cfi;
34
+static struct cfi_state func_cfi;
4035
4136 struct instruction *find_insn(struct objtool_file *file,
4237 struct section *sec, unsigned long offset)
4338 {
4439 struct instruction *insn;
4540
46
- hash_for_each_possible(file->insn_hash, insn, hash, offset)
41
+ hash_for_each_possible(file->insn_hash, insn, hash, sec_offset_hash(sec, offset)) {
4742 if (insn->sec == sec && insn->offset == offset)
4843 return insn;
44
+ }
4945
5046 return NULL;
5147 }
....@@ -81,22 +77,33 @@
8177 return find_insn(file, func->cfunc->sec, func->cfunc->offset);
8278 }
8379
84
-#define func_for_each_insn_all(file, func, insn) \
80
+static struct instruction *prev_insn_same_sym(struct objtool_file *file,
81
+ struct instruction *insn)
82
+{
83
+ struct instruction *prev = list_prev_entry(insn, list);
84
+
85
+ if (&prev->list != &file->insn_list && prev->func == insn->func)
86
+ return prev;
87
+
88
+ return NULL;
89
+}
90
+
91
+#define func_for_each_insn(file, func, insn) \
8592 for (insn = find_insn(file, func->sec, func->offset); \
8693 insn; \
8794 insn = next_insn_same_func(file, insn))
8895
89
-#define func_for_each_insn(file, func, insn) \
90
- for (insn = find_insn(file, func->sec, func->offset); \
96
+#define sym_for_each_insn(file, sym, insn) \
97
+ for (insn = find_insn(file, sym->sec, sym->offset); \
9198 insn && &insn->list != &file->insn_list && \
92
- insn->sec == func->sec && \
93
- insn->offset < func->offset + func->len; \
99
+ insn->sec == sym->sec && \
100
+ insn->offset < sym->offset + sym->len; \
94101 insn = list_next_entry(insn, list))
95102
96
-#define func_for_each_insn_continue_reverse(file, func, insn) \
103
+#define sym_for_each_insn_continue_reverse(file, sym, insn) \
97104 for (insn = list_prev_entry(insn, list); \
98105 &insn->list != &file->insn_list && \
99
- insn->sec == func->sec && insn->offset >= func->offset; \
106
+ insn->sec == sym->sec && insn->offset >= sym->offset; \
100107 insn = list_prev_entry(insn, list))
101108
102109 #define sec_for_each_insn_from(file, insn) \
....@@ -106,27 +113,34 @@
106113 for (insn = next_insn_same_sec(file, insn); insn; \
107114 insn = next_insn_same_sec(file, insn))
108115
109
-/*
110
- * Check if the function has been manually whitelisted with the
111
- * STACK_FRAME_NON_STANDARD macro, or if it should be automatically whitelisted
112
- * due to its use of a context switching instruction.
113
- */
114
-static bool ignore_func(struct objtool_file *file, struct symbol *func)
116
+static bool is_jump_table_jump(struct instruction *insn)
115117 {
116
- struct rela *rela;
118
+ struct alt_group *alt_group = insn->alt_group;
117119
118
- /* check for STACK_FRAME_NON_STANDARD */
119
- if (file->whitelist && file->whitelist->rela)
120
- list_for_each_entry(rela, &file->whitelist->rela->rela_list, list) {
121
- if (rela->sym->type == STT_SECTION &&
122
- rela->sym->sec == func->sec &&
123
- rela->addend == func->offset)
124
- return true;
125
- if (rela->sym->type == STT_FUNC && rela->sym == func)
126
- return true;
127
- }
120
+ if (insn->jump_table)
121
+ return true;
128122
129
- return false;
123
+ /* Retpoline alternative for a jump table? */
124
+ return alt_group && alt_group->orig_group &&
125
+ alt_group->orig_group->first_insn->jump_table;
126
+}
127
+
128
+static bool is_sibling_call(struct instruction *insn)
129
+{
130
+ /*
131
+ * Assume only ELF functions can make sibling calls. This ensures
132
+ * sibling call detection consistency between vmlinux.o and individual
133
+ * objects.
134
+ */
135
+ if (!insn->func)
136
+ return false;
137
+
138
+ /* An indirect jump is either a sibling call or a jump to a table. */
139
+ if (insn->type == INSN_JUMP_DYNAMIC)
140
+ return !is_jump_table_jump(insn);
141
+
142
+ /* add_jump_destinations() sets insn->call_dest for sibling calls. */
143
+ return (is_static_jump(insn) && insn->call_dest);
130144 }
131145
132146 /*
....@@ -137,14 +151,9 @@
137151 *
138152 * For local functions, we have to detect them manually by simply looking for
139153 * the lack of a return instruction.
140
- *
141
- * Returns:
142
- * -1: error
143
- * 0: no dead end
144
- * 1: dead end
145154 */
146
-static int __dead_end_function(struct objtool_file *file, struct symbol *func,
147
- int recursion)
155
+static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
156
+ int recursion)
148157 {
149158 int i;
150159 struct instruction *insn;
....@@ -159,99 +168,182 @@
159168 "panic",
160169 "do_exit",
161170 "do_task_dead",
171
+ "make_task_dead",
162172 "__module_put_and_exit",
163173 "complete_and_exit",
164
- "kvm_spurious_fault",
165174 "__reiserfs_panic",
166175 "lbug_with_loc",
167176 "fortify_panic",
168177 "usercopy_abort",
169178 "machine_real_restart",
170
- "rewind_stack_do_exit",
179
+ "rewind_stack_and_make_dead",
180
+ "kunit_try_catch_throw",
181
+ "xen_start_kernel",
182
+ "cpu_bringup_and_idle",
183
+ "stop_this_cpu",
171184 };
172185
186
+ if (!func)
187
+ return false;
188
+
173189 if (func->bind == STB_WEAK)
174
- return 0;
190
+ return false;
175191
176192 if (func->bind == STB_GLOBAL)
177193 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
178194 if (!strcmp(func->name, global_noreturns[i]))
179
- return 1;
195
+ return true;
180196
181197 if (!func->len)
182
- return 0;
198
+ return false;
183199
184200 insn = find_insn(file, func->sec, func->offset);
185
- if (!insn->func)
186
- return 0;
201
+ if (!insn || !insn->func)
202
+ return false;
187203
188
- func_for_each_insn_all(file, func, insn) {
204
+ func_for_each_insn(file, func, insn) {
189205 empty = false;
190206
191207 if (insn->type == INSN_RETURN)
192
- return 0;
208
+ return false;
193209 }
194210
195211 if (empty)
196
- return 0;
212
+ return false;
197213
198214 /*
199215 * A function can have a sibling call instead of a return. In that
200216 * case, the function's dead-end status depends on whether the target
201217 * of the sibling call returns.
202218 */
203
- func_for_each_insn_all(file, func, insn) {
204
- if (insn->type == INSN_JUMP_UNCONDITIONAL) {
219
+ func_for_each_insn(file, func, insn) {
220
+ if (is_sibling_call(insn)) {
205221 struct instruction *dest = insn->jump_dest;
206222
207223 if (!dest)
208224 /* sibling call to another file */
209
- return 0;
225
+ return false;
210226
211
- if (dest->func && dest->func->pfunc != insn->func->pfunc) {
212
-
213
- /* local sibling call */
214
- if (recursion == 5) {
215
- /*
216
- * Infinite recursion: two functions
217
- * have sibling calls to each other.
218
- * This is a very rare case. It means
219
- * they aren't dead ends.
220
- */
221
- return 0;
222
- }
223
-
224
- return __dead_end_function(file, dest->func,
225
- recursion + 1);
227
+ /* local sibling call */
228
+ if (recursion == 5) {
229
+ /*
230
+ * Infinite recursion: two functions have
231
+ * sibling calls to each other. This is a very
232
+ * rare case. It means they aren't dead ends.
233
+ */
234
+ return false;
226235 }
227
- }
228236
229
- if (insn->type == INSN_JUMP_DYNAMIC && list_empty(&insn->alts))
230
- /* sibling call */
231
- return 0;
237
+ return __dead_end_function(file, dest->func, recursion+1);
238
+ }
232239 }
233240
234
- return 1;
241
+ return true;
235242 }
236243
237
-static int dead_end_function(struct objtool_file *file, struct symbol *func)
244
+static bool dead_end_function(struct objtool_file *file, struct symbol *func)
238245 {
239246 return __dead_end_function(file, func, 0);
240247 }
241248
242
-static void clear_insn_state(struct insn_state *state)
249
+static void init_cfi_state(struct cfi_state *cfi)
243250 {
244251 int i;
245252
246
- memset(state, 0, sizeof(*state));
247
- state->cfa.base = CFI_UNDEFINED;
248253 for (i = 0; i < CFI_NUM_REGS; i++) {
249
- state->regs[i].base = CFI_UNDEFINED;
250
- state->vals[i].base = CFI_UNDEFINED;
254
+ cfi->regs[i].base = CFI_UNDEFINED;
255
+ cfi->vals[i].base = CFI_UNDEFINED;
251256 }
252
- state->drap_reg = CFI_UNDEFINED;
253
- state->drap_offset = -1;
257
+ cfi->cfa.base = CFI_UNDEFINED;
258
+ cfi->drap_reg = CFI_UNDEFINED;
259
+ cfi->drap_offset = -1;
254260 }
261
+
262
+static void init_insn_state(struct insn_state *state, struct section *sec)
263
+{
264
+ memset(state, 0, sizeof(*state));
265
+ init_cfi_state(&state->cfi);
266
+
267
+ /*
268
+ * We need the full vmlinux for noinstr validation, otherwise we can
269
+ * not correctly determine insn->call_dest->sec (external symbols do
270
+ * not have a section).
271
+ */
272
+ if (vmlinux && noinstr && sec)
273
+ state->noinstr = sec->noinstr;
274
+}
275
+
276
+static struct cfi_state *cfi_alloc(void)
277
+{
278
+ struct cfi_state *cfi = calloc(sizeof(struct cfi_state), 1);
279
+ if (!cfi) {
280
+ WARN("calloc failed");
281
+ exit(1);
282
+ }
283
+ nr_cfi++;
284
+ return cfi;
285
+}
286
+
287
+static int cfi_bits;
288
+static struct hlist_head *cfi_hash;
289
+
290
+static inline bool cficmp(struct cfi_state *cfi1, struct cfi_state *cfi2)
291
+{
292
+ return memcmp((void *)cfi1 + sizeof(cfi1->hash),
293
+ (void *)cfi2 + sizeof(cfi2->hash),
294
+ sizeof(struct cfi_state) - sizeof(struct hlist_node));
295
+}
296
+
297
+static inline u32 cfi_key(struct cfi_state *cfi)
298
+{
299
+ return jhash((void *)cfi + sizeof(cfi->hash),
300
+ sizeof(*cfi) - sizeof(cfi->hash), 0);
301
+}
302
+
303
+static struct cfi_state *cfi_hash_find_or_add(struct cfi_state *cfi)
304
+{
305
+ struct hlist_head *head = &cfi_hash[hash_min(cfi_key(cfi), cfi_bits)];
306
+ struct cfi_state *obj;
307
+
308
+ hlist_for_each_entry(obj, head, hash) {
309
+ if (!cficmp(cfi, obj)) {
310
+ nr_cfi_cache++;
311
+ return obj;
312
+ }
313
+ }
314
+
315
+ obj = cfi_alloc();
316
+ *obj = *cfi;
317
+ hlist_add_head(&obj->hash, head);
318
+
319
+ return obj;
320
+}
321
+
322
+static void cfi_hash_add(struct cfi_state *cfi)
323
+{
324
+ struct hlist_head *head = &cfi_hash[hash_min(cfi_key(cfi), cfi_bits)];
325
+
326
+ hlist_add_head(&cfi->hash, head);
327
+}
328
+
329
+static void *cfi_hash_alloc(void)
330
+{
331
+ cfi_bits = vmlinux ? ELF_HASH_BITS - 3 : 13;
332
+ cfi_hash = mmap(NULL, sizeof(struct hlist_head) << cfi_bits,
333
+ PROT_READ|PROT_WRITE,
334
+ MAP_PRIVATE|MAP_ANON, -1, 0);
335
+ if (cfi_hash == (void *)-1L) {
336
+ WARN("mmap fail cfi_hash");
337
+ cfi_hash = NULL;
338
+ } else if (stats) {
339
+ printf("cfi_bits: %d\n", cfi_bits);
340
+ }
341
+
342
+ return cfi_hash;
343
+}
344
+
345
+static unsigned long nr_insns;
346
+static unsigned long nr_insns_visited;
255347
256348 /*
257349 * Call the arch-specific instruction decoder for all the instructions and add
....@@ -275,6 +367,11 @@
275367 strncmp(sec->name, ".discard.", 9))
276368 sec->text = true;
277369
370
+ if (!strcmp(sec->name, ".noinstr.text") ||
371
+ !strcmp(sec->name, ".entry.text") ||
372
+ !strncmp(sec->name, ".text..__x86.", 13))
373
+ sec->noinstr = true;
374
+
278375 for (offset = 0; offset < sec->len; offset += insn->len) {
279376 insn = malloc(sizeof(*insn));
280377 if (!insn) {
....@@ -283,7 +380,7 @@
283380 }
284381 memset(insn, 0, sizeof(*insn));
285382 INIT_LIST_HEAD(&insn->alts);
286
- clear_insn_state(&insn->state);
383
+ INIT_LIST_HEAD(&insn->stack_ops);
287384
288385 insn->sec = sec;
289386 insn->offset = offset;
....@@ -292,23 +389,17 @@
292389 sec->len - offset,
293390 &insn->len, &insn->type,
294391 &insn->immediate,
295
- &insn->stack_op);
392
+ &insn->stack_ops);
296393 if (ret)
297394 goto err;
298395
299
- if (!insn->type || insn->type > INSN_LAST) {
300
- WARN_FUNC("invalid instruction type %d",
301
- insn->sec, insn->offset, insn->type);
302
- ret = -1;
303
- goto err;
304
- }
305
-
306
- hash_add(file->insn_hash, &insn->hash, insn->offset);
396
+ hash_add(file->insn_hash, &insn->hash, sec_offset_hash(sec, insn->offset));
307397 list_add_tail(&insn->list, &file->insn_list);
398
+ nr_insns++;
308399 }
309400
310401 list_for_each_entry(func, &sec->symbol_list, list) {
311
- if (func->type != STT_FUNC)
402
+ if (func->type != STT_FUNC || func->alias != func)
312403 continue;
313404
314405 if (!find_insn(file, sec, func->offset)) {
....@@ -317,11 +408,13 @@
317408 return -1;
318409 }
319410
320
- func_for_each_insn(file, func, insn)
321
- if (!insn->func)
322
- insn->func = func;
411
+ sym_for_each_insn(file, func, insn)
412
+ insn->func = func;
323413 }
324414 }
415
+
416
+ if (stats)
417
+ printf("nr_insns: %lu\n", nr_insns);
325418
326419 return 0;
327420
....@@ -330,15 +423,27 @@
330423 return ret;
331424 }
332425
426
+static struct instruction *find_last_insn(struct objtool_file *file,
427
+ struct section *sec)
428
+{
429
+ struct instruction *insn = NULL;
430
+ unsigned int offset;
431
+ unsigned int end = (sec->len > 10) ? sec->len - 10 : 0;
432
+
433
+ for (offset = sec->len - 1; offset >= end && !insn; offset--)
434
+ insn = find_insn(file, sec, offset);
435
+
436
+ return insn;
437
+}
438
+
333439 /*
334440 * Mark "ud2" instructions and manually annotated dead ends.
335441 */
336442 static int add_dead_ends(struct objtool_file *file)
337443 {
338444 struct section *sec;
339
- struct rela *rela;
445
+ struct reloc *reloc;
340446 struct instruction *insn;
341
- bool found;
342447
343448 /*
344449 * By default, "ud2" is a dead end unless otherwise annotated, because
....@@ -355,31 +460,24 @@
355460 if (!sec)
356461 goto reachable;
357462
358
- list_for_each_entry(rela, &sec->rela_list, list) {
359
- if (rela->sym->type != STT_SECTION) {
463
+ list_for_each_entry(reloc, &sec->reloc_list, list) {
464
+ if (reloc->sym->type != STT_SECTION) {
360465 WARN("unexpected relocation symbol type in %s", sec->name);
361466 return -1;
362467 }
363
- insn = find_insn(file, rela->sym->sec, rela->addend);
468
+ insn = find_insn(file, reloc->sym->sec, reloc->addend);
364469 if (insn)
365470 insn = list_prev_entry(insn, list);
366
- else if (rela->addend == rela->sym->sec->len) {
367
- found = false;
368
- list_for_each_entry_reverse(insn, &file->insn_list, list) {
369
- if (insn->sec == rela->sym->sec) {
370
- found = true;
371
- break;
372
- }
373
- }
374
-
375
- if (!found) {
376
- WARN("can't find unreachable insn at %s+0x%x",
377
- rela->sym->sec->name, rela->addend);
471
+ else if (reloc->addend == reloc->sym->sec->len) {
472
+ insn = find_last_insn(file, reloc->sym->sec);
473
+ if (!insn) {
474
+ WARN("can't find unreachable insn at %s+0x%" PRIx64,
475
+ reloc->sym->sec->name, reloc->addend);
378476 return -1;
379477 }
380478 } else {
381
- WARN("can't find unreachable insn at %s+0x%x",
382
- rela->sym->sec->name, rela->addend);
479
+ WARN("can't find unreachable insn at %s+0x%" PRIx64,
480
+ reloc->sym->sec->name, reloc->addend);
383481 return -1;
384482 }
385483
....@@ -397,35 +495,253 @@
397495 if (!sec)
398496 return 0;
399497
400
- list_for_each_entry(rela, &sec->rela_list, list) {
401
- if (rela->sym->type != STT_SECTION) {
498
+ list_for_each_entry(reloc, &sec->reloc_list, list) {
499
+ if (reloc->sym->type != STT_SECTION) {
402500 WARN("unexpected relocation symbol type in %s", sec->name);
403501 return -1;
404502 }
405
- insn = find_insn(file, rela->sym->sec, rela->addend);
503
+ insn = find_insn(file, reloc->sym->sec, reloc->addend);
406504 if (insn)
407505 insn = list_prev_entry(insn, list);
408
- else if (rela->addend == rela->sym->sec->len) {
409
- found = false;
410
- list_for_each_entry_reverse(insn, &file->insn_list, list) {
411
- if (insn->sec == rela->sym->sec) {
412
- found = true;
413
- break;
414
- }
415
- }
416
-
417
- if (!found) {
418
- WARN("can't find reachable insn at %s+0x%x",
419
- rela->sym->sec->name, rela->addend);
506
+ else if (reloc->addend == reloc->sym->sec->len) {
507
+ insn = find_last_insn(file, reloc->sym->sec);
508
+ if (!insn) {
509
+ WARN("can't find reachable insn at %s+0x%" PRIx64,
510
+ reloc->sym->sec->name, reloc->addend);
420511 return -1;
421512 }
422513 } else {
423
- WARN("can't find reachable insn at %s+0x%x",
424
- rela->sym->sec->name, rela->addend);
514
+ WARN("can't find reachable insn at %s+0x%" PRIx64,
515
+ reloc->sym->sec->name, reloc->addend);
425516 return -1;
426517 }
427518
428519 insn->dead_end = false;
520
+ }
521
+
522
+ return 0;
523
+}
524
+
525
+static int create_static_call_sections(struct objtool_file *file)
526
+{
527
+ struct section *sec;
528
+ struct static_call_site *site;
529
+ struct instruction *insn;
530
+ struct symbol *key_sym;
531
+ char *key_name, *tmp;
532
+ int idx;
533
+
534
+ sec = find_section_by_name(file->elf, ".static_call_sites");
535
+ if (sec) {
536
+ INIT_LIST_HEAD(&file->static_call_list);
537
+ WARN("file already has .static_call_sites section, skipping");
538
+ return 0;
539
+ }
540
+
541
+ if (list_empty(&file->static_call_list))
542
+ return 0;
543
+
544
+ idx = 0;
545
+ list_for_each_entry(insn, &file->static_call_list, call_node)
546
+ idx++;
547
+
548
+ sec = elf_create_section(file->elf, ".static_call_sites", SHF_WRITE,
549
+ sizeof(struct static_call_site), idx);
550
+ if (!sec)
551
+ return -1;
552
+
553
+ idx = 0;
554
+ list_for_each_entry(insn, &file->static_call_list, call_node) {
555
+
556
+ site = (struct static_call_site *)sec->data->d_buf + idx;
557
+ memset(site, 0, sizeof(struct static_call_site));
558
+
559
+ /* populate reloc for 'addr' */
560
+ if (elf_add_reloc_to_insn(file->elf, sec,
561
+ idx * sizeof(struct static_call_site),
562
+ R_X86_64_PC32,
563
+ insn->sec, insn->offset))
564
+ return -1;
565
+
566
+ /* find key symbol */
567
+ key_name = strdup(insn->call_dest->name);
568
+ if (!key_name) {
569
+ perror("strdup");
570
+ return -1;
571
+ }
572
+ if (strncmp(key_name, STATIC_CALL_TRAMP_PREFIX_STR,
573
+ STATIC_CALL_TRAMP_PREFIX_LEN)) {
574
+ WARN("static_call: trampoline name malformed: %s", key_name);
575
+ free(key_name);
576
+ return -1;
577
+ }
578
+ tmp = key_name + STATIC_CALL_TRAMP_PREFIX_LEN - STATIC_CALL_KEY_PREFIX_LEN;
579
+ memcpy(tmp, STATIC_CALL_KEY_PREFIX_STR, STATIC_CALL_KEY_PREFIX_LEN);
580
+
581
+ key_sym = find_symbol_by_name(file->elf, tmp);
582
+ if (!key_sym) {
583
+ if (!module) {
584
+ WARN("static_call: can't find static_call_key symbol: %s", tmp);
585
+ free(key_name);
586
+ return -1;
587
+ }
588
+
589
+ /*
590
+ * For modules(), the key might not be exported, which
591
+ * means the module can make static calls but isn't
592
+ * allowed to change them.
593
+ *
594
+ * In that case we temporarily set the key to be the
595
+ * trampoline address. This is fixed up in
596
+ * static_call_add_module().
597
+ */
598
+ key_sym = insn->call_dest;
599
+ }
600
+ free(key_name);
601
+
602
+ /* populate reloc for 'key' */
603
+ if (elf_add_reloc(file->elf, sec,
604
+ idx * sizeof(struct static_call_site) + 4,
605
+ R_X86_64_PC32, key_sym,
606
+ is_sibling_call(insn) * STATIC_CALL_SITE_TAIL))
607
+ return -1;
608
+
609
+ idx++;
610
+ }
611
+
612
+ return 0;
613
+}
614
+
615
+static int create_retpoline_sites_sections(struct objtool_file *file)
616
+{
617
+ struct instruction *insn;
618
+ struct section *sec;
619
+ int idx;
620
+
621
+ sec = find_section_by_name(file->elf, ".retpoline_sites");
622
+ if (sec) {
623
+ WARN("file already has .retpoline_sites, skipping");
624
+ return 0;
625
+ }
626
+
627
+ idx = 0;
628
+ list_for_each_entry(insn, &file->retpoline_call_list, call_node)
629
+ idx++;
630
+
631
+ if (!idx)
632
+ return 0;
633
+
634
+ sec = elf_create_section(file->elf, ".retpoline_sites", 0,
635
+ sizeof(int), idx);
636
+ if (!sec) {
637
+ WARN("elf_create_section: .retpoline_sites");
638
+ return -1;
639
+ }
640
+
641
+ idx = 0;
642
+ list_for_each_entry(insn, &file->retpoline_call_list, call_node) {
643
+
644
+ int *site = (int *)sec->data->d_buf + idx;
645
+ *site = 0;
646
+
647
+ if (elf_add_reloc_to_insn(file->elf, sec,
648
+ idx * sizeof(int),
649
+ R_X86_64_PC32,
650
+ insn->sec, insn->offset)) {
651
+ WARN("elf_add_reloc_to_insn: .retpoline_sites");
652
+ return -1;
653
+ }
654
+
655
+ idx++;
656
+ }
657
+
658
+ return 0;
659
+}
660
+
661
+static int create_return_sites_sections(struct objtool_file *file)
662
+{
663
+ struct instruction *insn;
664
+ struct section *sec;
665
+ int idx;
666
+
667
+ sec = find_section_by_name(file->elf, ".return_sites");
668
+ if (sec) {
669
+ WARN("file already has .return_sites, skipping");
670
+ return 0;
671
+ }
672
+
673
+ idx = 0;
674
+ list_for_each_entry(insn, &file->return_thunk_list, call_node)
675
+ idx++;
676
+
677
+ if (!idx)
678
+ return 0;
679
+
680
+ sec = elf_create_section(file->elf, ".return_sites", 0,
681
+ sizeof(int), idx);
682
+ if (!sec) {
683
+ WARN("elf_create_section: .return_sites");
684
+ return -1;
685
+ }
686
+
687
+ idx = 0;
688
+ list_for_each_entry(insn, &file->return_thunk_list, call_node) {
689
+
690
+ int *site = (int *)sec->data->d_buf + idx;
691
+ *site = 0;
692
+
693
+ if (elf_add_reloc_to_insn(file->elf, sec,
694
+ idx * sizeof(int),
695
+ R_X86_64_PC32,
696
+ insn->sec, insn->offset)) {
697
+ WARN("elf_add_reloc_to_insn: .return_sites");
698
+ return -1;
699
+ }
700
+
701
+ idx++;
702
+ }
703
+
704
+ return 0;
705
+}
706
+
707
+static int create_mcount_loc_sections(struct objtool_file *file)
708
+{
709
+ struct section *sec;
710
+ unsigned long *loc;
711
+ struct instruction *insn;
712
+ int idx;
713
+
714
+ sec = find_section_by_name(file->elf, "__mcount_loc");
715
+ if (sec) {
716
+ INIT_LIST_HEAD(&file->mcount_loc_list);
717
+ WARN("file already has __mcount_loc section, skipping");
718
+ return 0;
719
+ }
720
+
721
+ if (list_empty(&file->mcount_loc_list))
722
+ return 0;
723
+
724
+ idx = 0;
725
+ list_for_each_entry(insn, &file->mcount_loc_list, mcount_loc_node)
726
+ idx++;
727
+
728
+ sec = elf_create_section(file->elf, "__mcount_loc", 0, sizeof(unsigned long), idx);
729
+ if (!sec)
730
+ return -1;
731
+
732
+ idx = 0;
733
+ list_for_each_entry(insn, &file->mcount_loc_list, mcount_loc_node) {
734
+
735
+ loc = (unsigned long *)sec->data->d_buf + idx;
736
+ memset(loc, 0, sizeof(unsigned long));
737
+
738
+ if (elf_add_reloc_to_insn(file->elf, sec,
739
+ idx * sizeof(unsigned long),
740
+ R_X86_64_64,
741
+ insn->sec, insn->offset))
742
+ return -1;
743
+
744
+ idx++;
429745 }
430746
431747 return 0;
....@@ -439,18 +755,203 @@
439755 struct instruction *insn;
440756 struct section *sec;
441757 struct symbol *func;
758
+ struct reloc *reloc;
442759
443
- for_each_sec(file, sec) {
444
- list_for_each_entry(func, &sec->symbol_list, list) {
445
- if (func->type != STT_FUNC)
760
+ sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
761
+ if (!sec)
762
+ return;
763
+
764
+ list_for_each_entry(reloc, &sec->reloc_list, list) {
765
+ switch (reloc->sym->type) {
766
+ case STT_FUNC:
767
+ func = reloc->sym;
768
+ break;
769
+
770
+ case STT_SECTION:
771
+ func = find_func_by_offset(reloc->sym->sec, reloc->addend);
772
+ if (!func)
446773 continue;
774
+ break;
447775
448
- if (!ignore_func(file, func))
449
- continue;
450
-
451
- func_for_each_insn_all(file, func, insn)
452
- insn->ignore = true;
776
+ default:
777
+ WARN("unexpected relocation symbol type in %s: %d", sec->name, reloc->sym->type);
778
+ continue;
453779 }
780
+
781
+ func_for_each_insn(file, func, insn)
782
+ insn->ignore = true;
783
+ }
784
+}
785
+
786
+/*
787
+ * This is a whitelist of functions that is allowed to be called with AC set.
788
+ * The list is meant to be minimal and only contains compiler instrumentation
789
+ * ABI and a few functions used to implement *_{to,from}_user() functions.
790
+ *
791
+ * These functions must not directly change AC, but may PUSHF/POPF.
792
+ */
793
+static const char *uaccess_safe_builtin[] = {
794
+ /* KASAN */
795
+ "kasan_report",
796
+ "kasan_check_range",
797
+ /* KASAN out-of-line */
798
+ "__asan_loadN_noabort",
799
+ "__asan_load1_noabort",
800
+ "__asan_load2_noabort",
801
+ "__asan_load4_noabort",
802
+ "__asan_load8_noabort",
803
+ "__asan_load16_noabort",
804
+ "__asan_storeN_noabort",
805
+ "__asan_store1_noabort",
806
+ "__asan_store2_noabort",
807
+ "__asan_store4_noabort",
808
+ "__asan_store8_noabort",
809
+ "__asan_store16_noabort",
810
+ "__kasan_check_read",
811
+ "__kasan_check_write",
812
+ /* KASAN in-line */
813
+ "__asan_report_load_n_noabort",
814
+ "__asan_report_load1_noabort",
815
+ "__asan_report_load2_noabort",
816
+ "__asan_report_load4_noabort",
817
+ "__asan_report_load8_noabort",
818
+ "__asan_report_load16_noabort",
819
+ "__asan_report_store_n_noabort",
820
+ "__asan_report_store1_noabort",
821
+ "__asan_report_store2_noabort",
822
+ "__asan_report_store4_noabort",
823
+ "__asan_report_store8_noabort",
824
+ "__asan_report_store16_noabort",
825
+ /* KCSAN */
826
+ "__kcsan_check_access",
827
+ "kcsan_found_watchpoint",
828
+ "kcsan_setup_watchpoint",
829
+ "kcsan_check_scoped_accesses",
830
+ "kcsan_disable_current",
831
+ "kcsan_enable_current_nowarn",
832
+ /* KCSAN/TSAN */
833
+ "__tsan_func_entry",
834
+ "__tsan_func_exit",
835
+ "__tsan_read_range",
836
+ "__tsan_write_range",
837
+ "__tsan_read1",
838
+ "__tsan_read2",
839
+ "__tsan_read4",
840
+ "__tsan_read8",
841
+ "__tsan_read16",
842
+ "__tsan_write1",
843
+ "__tsan_write2",
844
+ "__tsan_write4",
845
+ "__tsan_write8",
846
+ "__tsan_write16",
847
+ "__tsan_read_write1",
848
+ "__tsan_read_write2",
849
+ "__tsan_read_write4",
850
+ "__tsan_read_write8",
851
+ "__tsan_read_write16",
852
+ "__tsan_volatile_read1",
853
+ "__tsan_volatile_read2",
854
+ "__tsan_volatile_read4",
855
+ "__tsan_volatile_read8",
856
+ "__tsan_volatile_read16",
857
+ "__tsan_volatile_write1",
858
+ "__tsan_volatile_write2",
859
+ "__tsan_volatile_write4",
860
+ "__tsan_volatile_write8",
861
+ "__tsan_volatile_write16",
862
+ "__tsan_atomic8_load",
863
+ "__tsan_atomic16_load",
864
+ "__tsan_atomic32_load",
865
+ "__tsan_atomic64_load",
866
+ "__tsan_atomic8_store",
867
+ "__tsan_atomic16_store",
868
+ "__tsan_atomic32_store",
869
+ "__tsan_atomic64_store",
870
+ "__tsan_atomic8_exchange",
871
+ "__tsan_atomic16_exchange",
872
+ "__tsan_atomic32_exchange",
873
+ "__tsan_atomic64_exchange",
874
+ "__tsan_atomic8_fetch_add",
875
+ "__tsan_atomic16_fetch_add",
876
+ "__tsan_atomic32_fetch_add",
877
+ "__tsan_atomic64_fetch_add",
878
+ "__tsan_atomic8_fetch_sub",
879
+ "__tsan_atomic16_fetch_sub",
880
+ "__tsan_atomic32_fetch_sub",
881
+ "__tsan_atomic64_fetch_sub",
882
+ "__tsan_atomic8_fetch_and",
883
+ "__tsan_atomic16_fetch_and",
884
+ "__tsan_atomic32_fetch_and",
885
+ "__tsan_atomic64_fetch_and",
886
+ "__tsan_atomic8_fetch_or",
887
+ "__tsan_atomic16_fetch_or",
888
+ "__tsan_atomic32_fetch_or",
889
+ "__tsan_atomic64_fetch_or",
890
+ "__tsan_atomic8_fetch_xor",
891
+ "__tsan_atomic16_fetch_xor",
892
+ "__tsan_atomic32_fetch_xor",
893
+ "__tsan_atomic64_fetch_xor",
894
+ "__tsan_atomic8_fetch_nand",
895
+ "__tsan_atomic16_fetch_nand",
896
+ "__tsan_atomic32_fetch_nand",
897
+ "__tsan_atomic64_fetch_nand",
898
+ "__tsan_atomic8_compare_exchange_strong",
899
+ "__tsan_atomic16_compare_exchange_strong",
900
+ "__tsan_atomic32_compare_exchange_strong",
901
+ "__tsan_atomic64_compare_exchange_strong",
902
+ "__tsan_atomic8_compare_exchange_weak",
903
+ "__tsan_atomic16_compare_exchange_weak",
904
+ "__tsan_atomic32_compare_exchange_weak",
905
+ "__tsan_atomic64_compare_exchange_weak",
906
+ "__tsan_atomic8_compare_exchange_val",
907
+ "__tsan_atomic16_compare_exchange_val",
908
+ "__tsan_atomic32_compare_exchange_val",
909
+ "__tsan_atomic64_compare_exchange_val",
910
+ "__tsan_atomic_thread_fence",
911
+ "__tsan_atomic_signal_fence",
912
+ "__tsan_unaligned_read16",
913
+ "__tsan_unaligned_write16",
914
+ /* KCOV */
915
+ "write_comp_data",
916
+ "check_kcov_mode",
917
+ "__sanitizer_cov_trace_pc",
918
+ "__sanitizer_cov_trace_const_cmp1",
919
+ "__sanitizer_cov_trace_const_cmp2",
920
+ "__sanitizer_cov_trace_const_cmp4",
921
+ "__sanitizer_cov_trace_const_cmp8",
922
+ "__sanitizer_cov_trace_cmp1",
923
+ "__sanitizer_cov_trace_cmp2",
924
+ "__sanitizer_cov_trace_cmp4",
925
+ "__sanitizer_cov_trace_cmp8",
926
+ "__sanitizer_cov_trace_switch",
927
+ /* UBSAN */
928
+ "ubsan_type_mismatch_common",
929
+ "__ubsan_handle_type_mismatch",
930
+ "__ubsan_handle_type_mismatch_v1",
931
+ "__ubsan_handle_shift_out_of_bounds",
932
+ /* misc */
933
+ "csum_partial_copy_generic",
934
+ "copy_mc_fragile",
935
+ "copy_mc_fragile_handle_tail",
936
+ "copy_mc_enhanced_fast_string",
937
+ "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
938
+ NULL
939
+};
940
+
941
+static void add_uaccess_safe(struct objtool_file *file)
942
+{
943
+ struct symbol *func;
944
+ const char **name;
945
+
946
+ if (!uaccess)
947
+ return;
948
+
949
+ for (name = uaccess_safe_builtin; *name; name++) {
950
+ func = find_symbol_by_name(file->elf, *name);
951
+ if (!func)
952
+ continue;
953
+
954
+ func->uaccess_safe = true;
454955 }
455956 }
456957
....@@ -460,25 +961,25 @@
460961 * But it at least allows objtool to understand the control flow *around* the
461962 * retpoline.
462963 */
463
-static int add_nospec_ignores(struct objtool_file *file)
964
+static int add_ignore_alternatives(struct objtool_file *file)
464965 {
465966 struct section *sec;
466
- struct rela *rela;
967
+ struct reloc *reloc;
467968 struct instruction *insn;
468969
469
- sec = find_section_by_name(file->elf, ".rela.discard.nospec");
970
+ sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
470971 if (!sec)
471972 return 0;
472973
473
- list_for_each_entry(rela, &sec->rela_list, list) {
474
- if (rela->sym->type != STT_SECTION) {
974
+ list_for_each_entry(reloc, &sec->reloc_list, list) {
975
+ if (reloc->sym->type != STT_SECTION) {
475976 WARN("unexpected relocation symbol type in %s", sec->name);
476977 return -1;
477978 }
478979
479
- insn = find_insn(file, rela->sym->sec, rela->addend);
980
+ insn = find_insn(file, reloc->sym->sec, reloc->addend);
480981 if (!insn) {
481
- WARN("bad .discard.nospec entry");
982
+ WARN("bad .discard.ignore_alts entry");
482983 return -1;
483984 }
484985
....@@ -489,50 +990,266 @@
489990 }
490991
491992 /*
993
+ * Symbols that replace INSN_CALL_DYNAMIC, every (tail) call to such a symbol
994
+ * will be added to the .retpoline_sites section.
995
+ */
996
+__weak bool arch_is_retpoline(struct symbol *sym)
997
+{
998
+ return false;
999
+}
1000
+
1001
+/*
1002
+ * Symbols that replace INSN_RETURN, every (tail) call to such a symbol
1003
+ * will be added to the .return_sites section.
1004
+ */
1005
+__weak bool arch_is_rethunk(struct symbol *sym)
1006
+{
1007
+ return false;
1008
+}
1009
+
1010
+/*
1011
+ * Symbols that are embedded inside other instructions, because sometimes crazy
1012
+ * code exists. These are mostly ignored for validation purposes.
1013
+ */
1014
+__weak bool arch_is_embedded_insn(struct symbol *sym)
1015
+{
1016
+ return false;
1017
+}
1018
+
1019
+#define NEGATIVE_RELOC ((void *)-1L)
1020
+
1021
+static struct reloc *insn_reloc(struct objtool_file *file, struct instruction *insn)
1022
+{
1023
+ if (insn->reloc == NEGATIVE_RELOC)
1024
+ return NULL;
1025
+
1026
+ if (!insn->reloc) {
1027
+ insn->reloc = find_reloc_by_dest_range(file->elf, insn->sec,
1028
+ insn->offset, insn->len);
1029
+ if (!insn->reloc) {
1030
+ insn->reloc = NEGATIVE_RELOC;
1031
+ return NULL;
1032
+ }
1033
+ }
1034
+
1035
+ return insn->reloc;
1036
+}
1037
+
1038
+static void remove_insn_ops(struct instruction *insn)
1039
+{
1040
+ struct stack_op *op, *tmp;
1041
+
1042
+ list_for_each_entry_safe(op, tmp, &insn->stack_ops, list) {
1043
+ list_del(&op->list);
1044
+ free(op);
1045
+ }
1046
+}
1047
+
1048
+static void annotate_call_site(struct objtool_file *file,
1049
+ struct instruction *insn, bool sibling)
1050
+{
1051
+ struct reloc *reloc = insn_reloc(file, insn);
1052
+ struct symbol *sym = insn->call_dest;
1053
+
1054
+ if (!sym)
1055
+ sym = reloc->sym;
1056
+
1057
+ /*
1058
+ * Alternative replacement code is just template code which is
1059
+ * sometimes copied to the original instruction. For now, don't
1060
+ * annotate it. (In the future we might consider annotating the
1061
+ * original instruction if/when it ever makes sense to do so.)
1062
+ */
1063
+ if (!strcmp(insn->sec->name, ".altinstr_replacement"))
1064
+ return;
1065
+
1066
+ if (sym->static_call_tramp) {
1067
+ list_add_tail(&insn->call_node, &file->static_call_list);
1068
+ return;
1069
+ }
1070
+
1071
+ if (sym->retpoline_thunk) {
1072
+ list_add_tail(&insn->call_node, &file->retpoline_call_list);
1073
+ return;
1074
+ }
1075
+
1076
+ /*
1077
+ * Many compilers cannot disable KCOV with a function attribute
1078
+ * so they need a little help, NOP out any KCOV calls from noinstr
1079
+ * text.
1080
+ */
1081
+ if (insn->sec->noinstr && sym->kcov) {
1082
+ if (reloc) {
1083
+ reloc->type = R_NONE;
1084
+ elf_write_reloc(file->elf, reloc);
1085
+ }
1086
+
1087
+ elf_write_insn(file->elf, insn->sec,
1088
+ insn->offset, insn->len,
1089
+ sibling ? arch_ret_insn(insn->len)
1090
+ : arch_nop_insn(insn->len));
1091
+
1092
+ insn->type = sibling ? INSN_RETURN : INSN_NOP;
1093
+
1094
+ if (sibling) {
1095
+ /*
1096
+ * We've replaced the tail-call JMP insn by two new
1097
+ * insn: RET; INT3, except we only have a single struct
1098
+ * insn here. Mark it retpoline_safe to avoid the SLS
1099
+ * warning, instead of adding another insn.
1100
+ */
1101
+ insn->retpoline_safe = true;
1102
+ }
1103
+
1104
+ return;
1105
+ }
1106
+}
1107
+
1108
+static void add_call_dest(struct objtool_file *file, struct instruction *insn,
1109
+ struct symbol *dest, bool sibling)
1110
+{
1111
+ insn->call_dest = dest;
1112
+ if (!dest)
1113
+ return;
1114
+
1115
+ /*
1116
+ * Whatever stack impact regular CALLs have, should be undone
1117
+ * by the RETURN of the called function.
1118
+ *
1119
+ * Annotated intra-function calls retain the stack_ops but
1120
+ * are converted to JUMP, see read_intra_function_calls().
1121
+ */
1122
+ remove_insn_ops(insn);
1123
+
1124
+ annotate_call_site(file, insn, sibling);
1125
+}
1126
+
1127
+static void add_retpoline_call(struct objtool_file *file, struct instruction *insn)
1128
+{
1129
+ /*
1130
+ * Retpoline calls/jumps are really dynamic calls/jumps in disguise,
1131
+ * so convert them accordingly.
1132
+ */
1133
+ switch (insn->type) {
1134
+ case INSN_CALL:
1135
+ insn->type = INSN_CALL_DYNAMIC;
1136
+ break;
1137
+ case INSN_JUMP_UNCONDITIONAL:
1138
+ insn->type = INSN_JUMP_DYNAMIC;
1139
+ break;
1140
+ case INSN_JUMP_CONDITIONAL:
1141
+ insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
1142
+ break;
1143
+ default:
1144
+ return;
1145
+ }
1146
+
1147
+ insn->retpoline_safe = true;
1148
+
1149
+ /*
1150
+ * Whatever stack impact regular CALLs have, should be undone
1151
+ * by the RETURN of the called function.
1152
+ *
1153
+ * Annotated intra-function calls retain the stack_ops but
1154
+ * are converted to JUMP, see read_intra_function_calls().
1155
+ */
1156
+ remove_insn_ops(insn);
1157
+
1158
+ annotate_call_site(file, insn, false);
1159
+}
1160
+
1161
+static void add_return_call(struct objtool_file *file, struct instruction *insn, bool add)
1162
+{
1163
+ /*
1164
+ * Return thunk tail calls are really just returns in disguise,
1165
+ * so convert them accordingly.
1166
+ */
1167
+ insn->type = INSN_RETURN;
1168
+ insn->retpoline_safe = true;
1169
+
1170
+ /* Skip the non-text sections, specially .discard ones */
1171
+ if (add && insn->sec->text)
1172
+ list_add_tail(&insn->call_node, &file->return_thunk_list);
1173
+}
1174
+
1175
+/*
1176
+ * CONFIG_CFI_CLANG: Check if the section is a CFI jump table or a
1177
+ * compiler-generated CFI handler.
1178
+ */
1179
+static bool is_cfi_section(struct section *sec)
1180
+{
1181
+ return (sec->name &&
1182
+ (!strncmp(sec->name, ".text..L.cfi.jumptable", 22) ||
1183
+ !strcmp(sec->name, ".text.__cfi_check")));
1184
+}
1185
+
1186
+/*
1187
+ * CONFIG_CFI_CLANG: Ignore CFI jump tables.
1188
+ */
1189
+static void add_cfi_jumptables(struct objtool_file *file)
1190
+{
1191
+ struct section *sec;
1192
+ struct symbol *func;
1193
+ struct instruction *insn;
1194
+
1195
+ for_each_sec(file, sec) {
1196
+ if (!is_cfi_section(sec))
1197
+ continue;
1198
+
1199
+ list_for_each_entry(func, &sec->symbol_list, list) {
1200
+ sym_for_each_insn(file, func, insn)
1201
+ insn->ignore = true;
1202
+ }
1203
+ }
1204
+}
1205
+
1206
+/*
4921207 * Find the destination instructions for all jumps.
4931208 */
4941209 static int add_jump_destinations(struct objtool_file *file)
4951210 {
4961211 struct instruction *insn;
497
- struct rela *rela;
1212
+ struct reloc *reloc;
4981213 struct section *dest_sec;
4991214 unsigned long dest_off;
5001215
5011216 for_each_insn(file, insn) {
502
- if (insn->type != INSN_JUMP_CONDITIONAL &&
503
- insn->type != INSN_JUMP_UNCONDITIONAL)
1217
+ if (!is_static_jump(insn))
5041218 continue;
5051219
506
- if (insn->offset == FAKE_JUMP_OFFSET)
507
- continue;
508
-
509
- rela = find_rela_by_dest_range(insn->sec, insn->offset,
510
- insn->len);
511
- if (!rela) {
1220
+ reloc = insn_reloc(file, insn);
1221
+ if (!reloc) {
5121222 dest_sec = insn->sec;
513
- dest_off = insn->offset + insn->len + insn->immediate;
514
- } else if (rela->sym->type == STT_SECTION) {
515
- dest_sec = rela->sym->sec;
516
- dest_off = rela->addend + 4;
517
- } else if (rela->sym->sec->idx) {
518
- dest_sec = rela->sym->sec;
519
- dest_off = rela->sym->sym.st_value + rela->addend + 4;
520
- } else if (strstr(rela->sym->name, "_indirect_thunk_")) {
521
- /*
522
- * Retpoline jumps are really dynamic jumps in
523
- * disguise, so convert them accordingly.
524
- */
525
- insn->type = INSN_JUMP_DYNAMIC;
526
- insn->retpoline_safe = true;
1223
+ dest_off = arch_jump_destination(insn);
1224
+ } else if (reloc->sym->type == STT_SECTION) {
1225
+ dest_sec = reloc->sym->sec;
1226
+ dest_off = arch_dest_reloc_offset(reloc->addend);
1227
+ } else if (reloc->sym->retpoline_thunk) {
1228
+ add_retpoline_call(file, insn);
5271229 continue;
1230
+ } else if (reloc->sym->return_thunk) {
1231
+ add_return_call(file, insn, true);
1232
+ continue;
1233
+ } else if (insn->func) {
1234
+ /* internal or external sibling call (with reloc) */
1235
+ add_call_dest(file, insn, reloc->sym, true);
1236
+ continue;
1237
+ } else if (reloc->sym->sec->idx) {
1238
+ dest_sec = reloc->sym->sec;
1239
+ dest_off = reloc->sym->sym.st_value +
1240
+ arch_dest_reloc_offset(reloc->addend);
5281241 } else {
529
- /* sibling call */
530
- insn->jump_dest = 0;
1242
+ /* non-func asm code jumping to another file */
5311243 continue;
5321244 }
5331245
5341246 insn->jump_dest = find_insn(file, dest_sec, dest_off);
1247
+
1248
+ if (!insn->jump_dest && dest_sec->len == dest_off)
1249
+ insn->jump_dest = find_last_insn(file, dest_sec);
1250
+
5351251 if (!insn->jump_dest) {
1252
+ struct symbol *sym = find_symbol_by_offset(dest_sec, dest_off);
5361253
5371254 /*
5381255 * This is a special case where an alt instruction
....@@ -542,6 +1259,22 @@
5421259 if (!strcmp(insn->sec->name, ".altinstr_replacement"))
5431260 continue;
5441261
1262
+ if (is_cfi_section(insn->sec))
1263
+ continue;
1264
+
1265
+ /*
1266
+ * This is a special case for retbleed_untrain_ret().
1267
+ * It jumps to __x86_return_thunk(), but objtool
1268
+ * can't find the thunk's starting RET
1269
+ * instruction, because the RET is also in the
1270
+ * middle of another instruction. Objtool only
1271
+ * knows about the outer instruction.
1272
+ */
1273
+ if (sym && sym->embedded_insn) {
1274
+ add_return_call(file, insn, false);
1275
+ continue;
1276
+ }
1277
+
5451278 WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
5461279 insn->sec, insn->offset, dest_sec->name,
5471280 dest_off);
....@@ -549,29 +1282,51 @@
5491282 }
5501283
5511284 /*
552
- * For GCC 8+, create parent/child links for any cold
553
- * subfunctions. This is _mostly_ redundant with a similar
554
- * initialization in read_symbols().
555
- *
556
- * If a function has aliases, we want the *first* such function
557
- * in the symbol table to be the subfunction's parent. In that
558
- * case we overwrite the initialization done in read_symbols().
559
- *
560
- * However this code can't completely replace the
561
- * read_symbols() code because this doesn't detect the case
562
- * where the parent function's only reference to a subfunction
563
- * is through a switch table.
1285
+ * Cross-function jump.
5641286 */
5651287 if (insn->func && insn->jump_dest->func &&
566
- insn->func != insn->jump_dest->func &&
567
- !strstr(insn->func->name, ".cold.") &&
568
- strstr(insn->jump_dest->func->name, ".cold.")) {
569
- insn->func->cfunc = insn->jump_dest->func;
570
- insn->jump_dest->func->pfunc = insn->func;
1288
+ insn->func != insn->jump_dest->func) {
1289
+
1290
+ /*
1291
+ * For GCC 8+, create parent/child links for any cold
1292
+ * subfunctions. This is _mostly_ redundant with a
1293
+ * similar initialization in read_symbols().
1294
+ *
1295
+ * If a function has aliases, we want the *first* such
1296
+ * function in the symbol table to be the subfunction's
1297
+ * parent. In that case we overwrite the
1298
+ * initialization done in read_symbols().
1299
+ *
1300
+ * However this code can't completely replace the
1301
+ * read_symbols() code because this doesn't detect the
1302
+ * case where the parent function's only reference to a
1303
+ * subfunction is through a jump table.
1304
+ */
1305
+ if (!strstr(insn->func->name, ".cold") &&
1306
+ strstr(insn->jump_dest->func->name, ".cold")) {
1307
+ insn->func->cfunc = insn->jump_dest->func;
1308
+ insn->jump_dest->func->pfunc = insn->func;
1309
+
1310
+ } else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
1311
+ insn->jump_dest->offset == insn->jump_dest->func->offset) {
1312
+ /* internal sibling call (without reloc) */
1313
+ add_call_dest(file, insn, insn->jump_dest->func, true);
1314
+ }
5711315 }
5721316 }
5731317
5741318 return 0;
1319
+}
1320
+
1321
+static struct symbol *find_call_destination(struct section *sec, unsigned long offset)
1322
+{
1323
+ struct symbol *call_dest;
1324
+
1325
+ call_dest = find_func_by_offset(sec, offset);
1326
+ if (!call_dest)
1327
+ call_dest = find_symbol_by_offset(sec, offset);
1328
+
1329
+ return call_dest;
5751330 }
5761331
5771332 /*
....@@ -581,72 +1336,85 @@
5811336 {
5821337 struct instruction *insn;
5831338 unsigned long dest_off;
584
- struct rela *rela;
1339
+ struct symbol *dest;
1340
+ struct reloc *reloc;
5851341
5861342 for_each_insn(file, insn) {
5871343 if (insn->type != INSN_CALL)
5881344 continue;
5891345
590
- rela = find_rela_by_dest_range(insn->sec, insn->offset,
591
- insn->len);
592
- if (!rela) {
593
- dest_off = insn->offset + insn->len + insn->immediate;
594
- insn->call_dest = find_symbol_by_offset(insn->sec,
595
- dest_off);
1346
+ reloc = insn_reloc(file, insn);
1347
+ if (!reloc) {
1348
+ dest_off = arch_jump_destination(insn);
1349
+ dest = find_call_destination(insn->sec, dest_off);
5961350
597
- if (!insn->call_dest && !insn->ignore) {
598
- WARN_FUNC("unsupported intra-function call",
1351
+ add_call_dest(file, insn, dest, false);
1352
+
1353
+ if (insn->ignore)
1354
+ continue;
1355
+
1356
+ if (!insn->call_dest) {
1357
+ WARN_FUNC("unannotated intra-function call", insn->sec, insn->offset);
1358
+ return -1;
1359
+ }
1360
+
1361
+ if (insn->func && insn->call_dest->type != STT_FUNC) {
1362
+ WARN_FUNC("unsupported call to non-function",
5991363 insn->sec, insn->offset);
600
- if (retpoline)
601
- WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE.");
6021364 return -1;
6031365 }
6041366
605
- } else if (rela->sym->type == STT_SECTION) {
606
- insn->call_dest = find_symbol_by_offset(rela->sym->sec,
607
- rela->addend+4);
608
- if (!insn->call_dest ||
609
- insn->call_dest->type != STT_FUNC) {
610
- WARN_FUNC("can't find call dest symbol at %s+0x%x",
1367
+ } else if (reloc->sym->type == STT_SECTION) {
1368
+ dest_off = arch_dest_reloc_offset(reloc->addend);
1369
+ dest = find_call_destination(reloc->sym->sec, dest_off);
1370
+ if (!dest) {
1371
+ if (is_cfi_section(reloc->sym->sec))
1372
+ continue;
1373
+
1374
+ WARN_FUNC("can't find call dest symbol at %s+0x%lx",
6111375 insn->sec, insn->offset,
612
- rela->sym->sec->name,
613
- rela->addend + 4);
1376
+ reloc->sym->sec->name,
1377
+ dest_off);
6141378 return -1;
6151379 }
1380
+
1381
+ add_call_dest(file, insn, dest, false);
1382
+
1383
+ } else if (reloc->sym->retpoline_thunk) {
1384
+ add_retpoline_call(file, insn);
1385
+
6161386 } else
617
- insn->call_dest = rela->sym;
1387
+ add_call_dest(file, insn, reloc->sym, false);
6181388 }
6191389
6201390 return 0;
6211391 }
6221392
6231393 /*
624
- * The .alternatives section requires some extra special care, over and above
625
- * what other special sections require:
626
- *
627
- * 1. Because alternatives are patched in-place, we need to insert a fake jump
628
- * instruction at the end so that validate_branch() skips all the original
629
- * replaced instructions when validating the new instruction path.
630
- *
631
- * 2. An added wrinkle is that the new instruction length might be zero. In
632
- * that case the old instructions are replaced with noops. We simulate that
633
- * by creating a fake jump as the only new instruction.
634
- *
635
- * 3. In some cases, the alternative section includes an instruction which
636
- * conditionally jumps to the _end_ of the entry. We have to modify these
637
- * jumps' destinations to point back to .text rather than the end of the
638
- * entry in .altinstr_replacement.
639
- *
640
- * 4. It has been requested that we don't validate the !POPCNT feature path
641
- * which is a "very very small percentage of machines".
1394
+ * The .alternatives section requires some extra special care over and above
1395
+ * other special sections because alternatives are patched in place.
6421396 */
6431397 static int handle_group_alt(struct objtool_file *file,
6441398 struct special_alt *special_alt,
6451399 struct instruction *orig_insn,
6461400 struct instruction **new_insn)
6471401 {
648
- struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
1402
+ struct instruction *last_orig_insn, *last_new_insn = NULL, *insn, *nop = NULL;
1403
+ struct alt_group *orig_alt_group, *new_alt_group;
6491404 unsigned long dest_off;
1405
+
1406
+
1407
+ orig_alt_group = malloc(sizeof(*orig_alt_group));
1408
+ if (!orig_alt_group) {
1409
+ WARN("malloc failed");
1410
+ return -1;
1411
+ }
1412
+ orig_alt_group->cfi = calloc(special_alt->orig_len,
1413
+ sizeof(struct cfi_state *));
1414
+ if (!orig_alt_group->cfi) {
1415
+ WARN("calloc failed");
1416
+ return -1;
1417
+ }
6501418
6511419 last_orig_insn = NULL;
6521420 insn = orig_insn;
....@@ -654,67 +1422,90 @@
6541422 if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
6551423 break;
6561424
657
- if (special_alt->skip_orig)
658
- insn->type = INSN_NOP;
659
-
660
- insn->alt_group = true;
1425
+ insn->alt_group = orig_alt_group;
6611426 last_orig_insn = insn;
6621427 }
1428
+ orig_alt_group->orig_group = NULL;
1429
+ orig_alt_group->first_insn = orig_insn;
1430
+ orig_alt_group->last_insn = last_orig_insn;
6631431
664
- if (next_insn_same_sec(file, last_orig_insn)) {
665
- fake_jump = malloc(sizeof(*fake_jump));
666
- if (!fake_jump) {
1432
+
1433
+ new_alt_group = malloc(sizeof(*new_alt_group));
1434
+ if (!new_alt_group) {
1435
+ WARN("malloc failed");
1436
+ return -1;
1437
+ }
1438
+
1439
+ if (special_alt->new_len < special_alt->orig_len) {
1440
+ /*
1441
+ * Insert a fake nop at the end to make the replacement
1442
+ * alt_group the same size as the original. This is needed to
1443
+ * allow propagate_alt_cfi() to do its magic. When the last
1444
+ * instruction affects the stack, the instruction after it (the
1445
+ * nop) will propagate the new state to the shared CFI array.
1446
+ */
1447
+ nop = malloc(sizeof(*nop));
1448
+ if (!nop) {
6671449 WARN("malloc failed");
6681450 return -1;
6691451 }
670
- memset(fake_jump, 0, sizeof(*fake_jump));
671
- INIT_LIST_HEAD(&fake_jump->alts);
672
- clear_insn_state(&fake_jump->state);
1452
+ memset(nop, 0, sizeof(*nop));
1453
+ INIT_LIST_HEAD(&nop->alts);
1454
+ INIT_LIST_HEAD(&nop->stack_ops);
6731455
674
- fake_jump->sec = special_alt->new_sec;
675
- fake_jump->offset = FAKE_JUMP_OFFSET;
676
- fake_jump->type = INSN_JUMP_UNCONDITIONAL;
677
- fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
678
- fake_jump->func = orig_insn->func;
1456
+ nop->sec = special_alt->new_sec;
1457
+ nop->offset = special_alt->new_off + special_alt->new_len;
1458
+ nop->len = special_alt->orig_len - special_alt->new_len;
1459
+ nop->type = INSN_NOP;
1460
+ nop->func = orig_insn->func;
1461
+ nop->alt_group = new_alt_group;
1462
+ nop->ignore = orig_insn->ignore_alts;
6791463 }
6801464
6811465 if (!special_alt->new_len) {
682
- if (!fake_jump) {
683
- WARN("%s: empty alternative at end of section",
684
- special_alt->orig_sec->name);
685
- return -1;
686
- }
687
-
688
- *new_insn = fake_jump;
689
- return 0;
1466
+ *new_insn = nop;
1467
+ goto end;
6901468 }
6911469
692
- last_new_insn = NULL;
6931470 insn = *new_insn;
6941471 sec_for_each_insn_from(file, insn) {
1472
+ struct reloc *alt_reloc;
1473
+
6951474 if (insn->offset >= special_alt->new_off + special_alt->new_len)
6961475 break;
6971476
6981477 last_new_insn = insn;
6991478
7001479 insn->ignore = orig_insn->ignore_alts;
1480
+ insn->func = orig_insn->func;
1481
+ insn->alt_group = new_alt_group;
7011482
702
- if (insn->type != INSN_JUMP_CONDITIONAL &&
703
- insn->type != INSN_JUMP_UNCONDITIONAL)
1483
+ /*
1484
+ * Since alternative replacement code is copy/pasted by the
1485
+ * kernel after applying relocations, generally such code can't
1486
+ * have relative-address relocation references to outside the
1487
+ * .altinstr_replacement section, unless the arch's
1488
+ * alternatives code can adjust the relative offsets
1489
+ * accordingly.
1490
+ */
1491
+ alt_reloc = insn_reloc(file, insn);
1492
+ if (alt_reloc &&
1493
+ !arch_support_alt_relocation(special_alt, insn, alt_reloc)) {
1494
+
1495
+ WARN_FUNC("unsupported relocation in alternatives section",
1496
+ insn->sec, insn->offset);
1497
+ return -1;
1498
+ }
1499
+
1500
+ if (!is_static_jump(insn))
7041501 continue;
7051502
7061503 if (!insn->immediate)
7071504 continue;
7081505
709
- dest_off = insn->offset + insn->len + insn->immediate;
710
- if (dest_off == special_alt->new_off + special_alt->new_len) {
711
- if (!fake_jump) {
712
- WARN("%s: alternative jump to end of section",
713
- special_alt->orig_sec->name);
714
- return -1;
715
- }
716
- insn->jump_dest = fake_jump;
717
- }
1506
+ dest_off = arch_jump_destination(insn);
1507
+ if (dest_off == special_alt->new_off + special_alt->new_len)
1508
+ insn->jump_dest = next_insn_same_sec(file, last_orig_insn);
7181509
7191510 if (!insn->jump_dest) {
7201511 WARN_FUNC("can't find alternative jump destination",
....@@ -729,9 +1520,13 @@
7291520 return -1;
7301521 }
7311522
732
- if (fake_jump)
733
- list_add(&fake_jump->list, &last_new_insn->list);
734
-
1523
+ if (nop)
1524
+ list_add(&nop->list, &last_new_insn->list);
1525
+end:
1526
+ new_alt_group->orig_group = orig_alt_group;
1527
+ new_alt_group->first_insn = *new_insn;
1528
+ new_alt_group->last_insn = nop ? : last_new_insn;
1529
+ new_alt_group->cfi = orig_alt_group->cfi;
7351530 return 0;
7361531 }
7371532
....@@ -826,6 +1621,8 @@
8261621 }
8271622
8281623 alt->insn = new_insn;
1624
+ alt->skip_orig = special_alt->skip_orig;
1625
+ orig_insn->ignore_alts |= special_alt->skip_alt;
8291626 list_add_tail(&alt->list, &orig_insn->alts);
8301627
8311628 list_del(&special_alt->list);
....@@ -836,34 +1633,40 @@
8361633 return ret;
8371634 }
8381635
839
-static int add_switch_table(struct objtool_file *file, struct instruction *insn,
840
- struct rela *table, struct rela *next_table)
1636
+static int add_jump_table(struct objtool_file *file, struct instruction *insn,
1637
+ struct reloc *table)
8411638 {
842
- struct rela *rela = table;
843
- struct instruction *alt_insn;
1639
+ struct reloc *reloc = table;
1640
+ struct instruction *dest_insn;
8441641 struct alternative *alt;
8451642 struct symbol *pfunc = insn->func->pfunc;
8461643 unsigned int prev_offset = 0;
8471644
848
- list_for_each_entry_from(rela, &table->rela_sec->rela_list, list) {
849
- if (rela == next_table)
1645
+ /*
1646
+ * Each @reloc is a switch table relocation which points to the target
1647
+ * instruction.
1648
+ */
1649
+ list_for_each_entry_from(reloc, &table->sec->reloc_list, list) {
1650
+
1651
+ /* Check for the end of the table: */
1652
+ if (reloc != table && reloc->jump_table_start)
8501653 break;
8511654
852
- /* Make sure the switch table entries are consecutive: */
853
- if (prev_offset && rela->offset != prev_offset + 8)
1655
+ /* Make sure the table entries are consecutive: */
1656
+ if (prev_offset && reloc->offset != prev_offset + 8)
8541657 break;
8551658
8561659 /* Detect function pointers from contiguous objects: */
857
- if (rela->sym->sec == pfunc->sec &&
858
- rela->addend == pfunc->offset)
1660
+ if (reloc->sym->sec == pfunc->sec &&
1661
+ reloc->addend == pfunc->offset)
8591662 break;
8601663
861
- alt_insn = find_insn(file, rela->sym->sec, rela->addend);
862
- if (!alt_insn)
1664
+ dest_insn = find_insn(file, reloc->sym->sec, reloc->addend);
1665
+ if (!dest_insn)
8631666 break;
8641667
865
- /* Make sure the jmp dest is in the function or subfunction: */
866
- if (alt_insn->func->pfunc != pfunc)
1668
+ /* Make sure the destination is in the same function: */
1669
+ if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
8671670 break;
8681671
8691672 alt = malloc(sizeof(*alt));
....@@ -872,9 +1675,9 @@
8721675 return -1;
8731676 }
8741677
875
- alt->insn = alt_insn;
1678
+ alt->insn = dest_insn;
8761679 list_add_tail(&alt->list, &insn->alts);
877
- prev_offset = rela->offset;
1680
+ prev_offset = reloc->offset;
8781681 }
8791682
8801683 if (!prev_offset) {
....@@ -887,56 +1690,15 @@
8871690 }
8881691
8891692 /*
890
- * find_switch_table() - Given a dynamic jump, find the switch jump table in
891
- * .rodata associated with it.
892
- *
893
- * There are 3 basic patterns:
894
- *
895
- * 1. jmpq *[rodata addr](,%reg,8)
896
- *
897
- * This is the most common case by far. It jumps to an address in a simple
898
- * jump table which is stored in .rodata.
899
- *
900
- * 2. jmpq *[rodata addr](%rip)
901
- *
902
- * This is caused by a rare GCC quirk, currently only seen in three driver
903
- * functions in the kernel, only with certain obscure non-distro configs.
904
- *
905
- * As part of an optimization, GCC makes a copy of an existing switch jump
906
- * table, modifies it, and then hard-codes the jump (albeit with an indirect
907
- * jump) to use a single entry in the table. The rest of the jump table and
908
- * some of its jump targets remain as dead code.
909
- *
910
- * In such a case we can just crudely ignore all unreachable instruction
911
- * warnings for the entire object file. Ideally we would just ignore them
912
- * for the function, but that would require redesigning the code quite a
913
- * bit. And honestly that's just not worth doing: unreachable instruction
914
- * warnings are of questionable value anyway, and this is such a rare issue.
915
- *
916
- * 3. mov [rodata addr],%reg1
917
- * ... some instructions ...
918
- * jmpq *(%reg1,%reg2,8)
919
- *
920
- * This is a fairly uncommon pattern which is new for GCC 6. As of this
921
- * writing, there are 11 occurrences of it in the allmodconfig kernel.
922
- *
923
- * As of GCC 7 there are quite a few more of these and the 'in between' code
924
- * is significant. Esp. with KASAN enabled some of the code between the mov
925
- * and jmpq uses .rodata itself, which can confuse things.
926
- *
927
- * TODO: Once we have DWARF CFI and smarter instruction decoding logic,
928
- * ensure the same register is used in the mov and jump instructions.
929
- *
930
- * NOTE: RETPOLINE made it harder still to decode dynamic jumps.
1693
+ * find_jump_table() - Given a dynamic jump, find the switch jump table
1694
+ * associated with it.
9311695 */
932
-static struct rela *find_switch_table(struct objtool_file *file,
1696
+static struct reloc *find_jump_table(struct objtool_file *file,
9331697 struct symbol *func,
9341698 struct instruction *insn)
9351699 {
936
- struct rela *text_rela, *rodata_rela;
937
- struct instruction *orig_insn = insn;
938
- struct section *rodata_sec;
939
- unsigned long table_offset;
1700
+ struct reloc *table_reloc;
1701
+ struct instruction *dest_insn, *orig_insn = insn;
9401702
9411703 /*
9421704 * Backward search using the @first_jump_src links, these help avoid
....@@ -944,8 +1706,8 @@
9441706 * it.
9451707 */
9461708 for (;
947
- &insn->list != &file->insn_list && insn->func && insn->func->pfunc == func;
948
- insn = insn->first_jump_src ?: list_prev_entry(insn, list)) {
1709
+ insn && insn->func && insn->func->pfunc == func;
1710
+ insn = insn->first_jump_src ?: prev_insn_same_sym(file, insn)) {
9491711
9501712 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
9511713 break;
....@@ -957,58 +1719,36 @@
9571719 insn->jump_dest->offset > orig_insn->offset))
9581720 break;
9591721
960
- /* look for a relocation which references .rodata */
961
- text_rela = find_rela_by_dest_range(insn->sec, insn->offset,
962
- insn->len);
963
- if (!text_rela || text_rela->sym->type != STT_SECTION ||
964
- !text_rela->sym->sec->rodata)
1722
+ table_reloc = arch_find_switch_table(file, insn);
1723
+ if (!table_reloc)
1724
+ continue;
1725
+ dest_insn = find_insn(file, table_reloc->sym->sec, table_reloc->addend);
1726
+ if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func)
9651727 continue;
9661728
967
- table_offset = text_rela->addend;
968
- rodata_sec = text_rela->sym->sec;
969
-
970
- if (text_rela->type == R_X86_64_PC32)
971
- table_offset += 4;
972
-
973
- /*
974
- * Make sure the .rodata address isn't associated with a
975
- * symbol. gcc jump tables are anonymous data.
976
- */
977
- if (find_symbol_containing(rodata_sec, table_offset))
978
- continue;
979
-
980
- rodata_rela = find_rela_by_dest(rodata_sec, table_offset);
981
- if (rodata_rela) {
982
- /*
983
- * Use of RIP-relative switch jumps is quite rare, and
984
- * indicates a rare GCC quirk/bug which can leave dead
985
- * code behind.
986
- */
987
- if (text_rela->type == R_X86_64_PC32)
988
- file->ignore_unreachables = true;
989
-
990
- return rodata_rela;
991
- }
1729
+ return table_reloc;
9921730 }
9931731
9941732 return NULL;
9951733 }
9961734
997
-
998
-static int add_func_switch_tables(struct objtool_file *file,
999
- struct symbol *func)
1735
+/*
1736
+ * First pass: Mark the head of each jump table so that in the next pass,
1737
+ * we know when a given jump table ends and the next one starts.
1738
+ */
1739
+static void mark_func_jump_tables(struct objtool_file *file,
1740
+ struct symbol *func)
10001741 {
1001
- struct instruction *insn, *last = NULL, *prev_jump = NULL;
1002
- struct rela *rela, *prev_rela = NULL;
1003
- int ret;
1742
+ struct instruction *insn, *last = NULL;
1743
+ struct reloc *reloc;
10041744
1005
- func_for_each_insn_all(file, func, insn) {
1745
+ func_for_each_insn(file, func, insn) {
10061746 if (!last)
10071747 last = insn;
10081748
10091749 /*
10101750 * Store back-pointers for unconditional forward jumps such
1011
- * that find_switch_table() can back-track using those and
1751
+ * that find_jump_table() can back-track using those and
10121752 * avoid some potentially confusing code.
10131753 */
10141754 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
....@@ -1023,27 +1763,25 @@
10231763 if (insn->type != INSN_JUMP_DYNAMIC)
10241764 continue;
10251765
1026
- rela = find_switch_table(file, func, insn);
1027
- if (!rela)
1766
+ reloc = find_jump_table(file, func, insn);
1767
+ if (reloc) {
1768
+ reloc->jump_table_start = true;
1769
+ insn->jump_table = reloc;
1770
+ }
1771
+ }
1772
+}
1773
+
1774
+static int add_func_jump_tables(struct objtool_file *file,
1775
+ struct symbol *func)
1776
+{
1777
+ struct instruction *insn;
1778
+ int ret;
1779
+
1780
+ func_for_each_insn(file, func, insn) {
1781
+ if (!insn->jump_table)
10281782 continue;
10291783
1030
- /*
1031
- * We found a switch table, but we don't know yet how big it
1032
- * is. Don't add it until we reach the end of the function or
1033
- * the beginning of another switch table in the same function.
1034
- */
1035
- if (prev_jump) {
1036
- ret = add_switch_table(file, prev_jump, prev_rela, rela);
1037
- if (ret)
1038
- return ret;
1039
- }
1040
-
1041
- prev_jump = insn;
1042
- prev_rela = rela;
1043
- }
1044
-
1045
- if (prev_jump) {
1046
- ret = add_switch_table(file, prev_jump, prev_rela, NULL);
1784
+ ret = add_jump_table(file, insn, insn->jump_table);
10471785 if (ret)
10481786 return ret;
10491787 }
....@@ -1056,7 +1794,7 @@
10561794 * section which contains a list of addresses within the function to jump to.
10571795 * This finds these jump tables and adds them to the insn->alts lists.
10581796 */
1059
-static int add_switch_table_alts(struct objtool_file *file)
1797
+static int add_jump_table_alts(struct objtool_file *file)
10601798 {
10611799 struct section *sec;
10621800 struct symbol *func;
....@@ -1070,7 +1808,8 @@
10701808 if (func->type != STT_FUNC)
10711809 continue;
10721810
1073
- ret = add_func_switch_tables(file, func);
1811
+ mark_func_jump_tables(file, func);
1812
+ ret = add_func_jump_tables(file, func);
10741813 if (ret)
10751814 return ret;
10761815 }
....@@ -1079,21 +1818,29 @@
10791818 return 0;
10801819 }
10811820
1821
+static void set_func_state(struct cfi_state *state)
1822
+{
1823
+ state->cfa = initial_func_cfi.cfa;
1824
+ memcpy(&state->regs, &initial_func_cfi.regs,
1825
+ CFI_NUM_REGS * sizeof(struct cfi_reg));
1826
+ state->stack_size = initial_func_cfi.cfa.offset;
1827
+}
1828
+
10821829 static int read_unwind_hints(struct objtool_file *file)
10831830 {
1084
- struct section *sec, *relasec;
1085
- struct rela *rela;
1831
+ struct cfi_state cfi = init_cfi;
1832
+ struct section *sec, *relocsec;
10861833 struct unwind_hint *hint;
10871834 struct instruction *insn;
1088
- struct cfi_reg *cfa;
1835
+ struct reloc *reloc;
10891836 int i;
10901837
10911838 sec = find_section_by_name(file->elf, ".discard.unwind_hints");
10921839 if (!sec)
10931840 return 0;
10941841
1095
- relasec = sec->rela;
1096
- if (!relasec) {
1842
+ relocsec = sec->reloc;
1843
+ if (!relocsec) {
10971844 WARN("missing .rela.discard.unwind_hints section");
10981845 return -1;
10991846 }
....@@ -1108,66 +1855,63 @@
11081855 for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
11091856 hint = (struct unwind_hint *)sec->data->d_buf + i;
11101857
1111
- rela = find_rela_by_dest(sec, i * sizeof(*hint));
1112
- if (!rela) {
1113
- WARN("can't find rela for unwind_hints[%d]", i);
1858
+ reloc = find_reloc_by_dest(file->elf, sec, i * sizeof(*hint));
1859
+ if (!reloc) {
1860
+ WARN("can't find reloc for unwind_hints[%d]", i);
11141861 return -1;
11151862 }
11161863
1117
- insn = find_insn(file, rela->sym->sec, rela->addend);
1864
+ insn = find_insn(file, reloc->sym->sec, reloc->addend);
11181865 if (!insn) {
11191866 WARN("can't find insn for unwind_hints[%d]", i);
11201867 return -1;
11211868 }
11221869
1123
- cfa = &insn->state.cfa;
1870
+ insn->hint = true;
11241871
11251872 if (hint->type == UNWIND_HINT_TYPE_SAVE) {
1873
+ insn->hint = false;
11261874 insn->save = true;
1127
- continue;
1128
-
1129
- } else if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
1130
- insn->restore = true;
1131
- insn->hint = true;
11321875 continue;
11331876 }
11341877
1135
- insn->hint = true;
1878
+ if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
1879
+ insn->restore = true;
1880
+ continue;
1881
+ }
11361882
1137
- switch (hint->sp_reg) {
1138
- case ORC_REG_UNDEFINED:
1139
- cfa->base = CFI_UNDEFINED;
1140
- break;
1141
- case ORC_REG_SP:
1142
- cfa->base = CFI_SP;
1143
- break;
1144
- case ORC_REG_BP:
1145
- cfa->base = CFI_BP;
1146
- break;
1147
- case ORC_REG_SP_INDIRECT:
1148
- cfa->base = CFI_SP_INDIRECT;
1149
- break;
1150
- case ORC_REG_R10:
1151
- cfa->base = CFI_R10;
1152
- break;
1153
- case ORC_REG_R13:
1154
- cfa->base = CFI_R13;
1155
- break;
1156
- case ORC_REG_DI:
1157
- cfa->base = CFI_DI;
1158
- break;
1159
- case ORC_REG_DX:
1160
- cfa->base = CFI_DX;
1161
- break;
1162
- default:
1883
+ if (hint->type == UNWIND_HINT_TYPE_REGS_PARTIAL) {
1884
+ struct symbol *sym = find_symbol_by_offset(insn->sec, insn->offset);
1885
+
1886
+ if (sym && sym->bind == STB_GLOBAL) {
1887
+ insn->entry = 1;
1888
+ }
1889
+ }
1890
+
1891
+ if (hint->type == UNWIND_HINT_TYPE_ENTRY) {
1892
+ hint->type = UNWIND_HINT_TYPE_CALL;
1893
+ insn->entry = 1;
1894
+ }
1895
+
1896
+ if (hint->type == UNWIND_HINT_TYPE_FUNC) {
1897
+ insn->cfi = &func_cfi;
1898
+ continue;
1899
+ }
1900
+
1901
+ if (insn->cfi)
1902
+ cfi = *(insn->cfi);
1903
+
1904
+ if (arch_decode_hint_reg(hint->sp_reg, &cfi.cfa.base)) {
11631905 WARN_FUNC("unsupported unwind_hint sp base reg %d",
11641906 insn->sec, insn->offset, hint->sp_reg);
11651907 return -1;
11661908 }
11671909
1168
- cfa->offset = hint->sp_offset;
1169
- insn->state.type = hint->type;
1170
- insn->state.end = hint->end;
1910
+ cfi.cfa.offset = hint->sp_offset;
1911
+ cfi.type = hint->type;
1912
+ cfi.end = hint->end;
1913
+
1914
+ insn->cfi = cfi_hash_find_or_add(&cfi);
11711915 }
11721916
11731917 return 0;
....@@ -1177,32 +1921,166 @@
11771921 {
11781922 struct section *sec;
11791923 struct instruction *insn;
1180
- struct rela *rela;
1924
+ struct reloc *reloc;
11811925
11821926 sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
11831927 if (!sec)
11841928 return 0;
11851929
1186
- list_for_each_entry(rela, &sec->rela_list, list) {
1187
- if (rela->sym->type != STT_SECTION) {
1930
+ list_for_each_entry(reloc, &sec->reloc_list, list) {
1931
+ if (reloc->sym->type != STT_SECTION) {
11881932 WARN("unexpected relocation symbol type in %s", sec->name);
11891933 return -1;
11901934 }
11911935
1192
- insn = find_insn(file, rela->sym->sec, rela->addend);
1936
+ insn = find_insn(file, reloc->sym->sec, reloc->addend);
11931937 if (!insn) {
11941938 WARN("bad .discard.retpoline_safe entry");
11951939 return -1;
11961940 }
11971941
11981942 if (insn->type != INSN_JUMP_DYNAMIC &&
1199
- insn->type != INSN_CALL_DYNAMIC) {
1200
- WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1943
+ insn->type != INSN_CALL_DYNAMIC &&
1944
+ insn->type != INSN_RETURN &&
1945
+ insn->type != INSN_NOP) {
1946
+ WARN_FUNC("retpoline_safe hint not an indirect jump/call/ret/nop",
12011947 insn->sec, insn->offset);
12021948 return -1;
12031949 }
12041950
12051951 insn->retpoline_safe = true;
1952
+ }
1953
+
1954
+ return 0;
1955
+}
1956
+
1957
+static int read_instr_hints(struct objtool_file *file)
1958
+{
1959
+ struct section *sec;
1960
+ struct instruction *insn;
1961
+ struct reloc *reloc;
1962
+
1963
+ sec = find_section_by_name(file->elf, ".rela.discard.instr_end");
1964
+ if (!sec)
1965
+ return 0;
1966
+
1967
+ list_for_each_entry(reloc, &sec->reloc_list, list) {
1968
+ if (reloc->sym->type != STT_SECTION) {
1969
+ WARN("unexpected relocation symbol type in %s", sec->name);
1970
+ return -1;
1971
+ }
1972
+
1973
+ insn = find_insn(file, reloc->sym->sec, reloc->addend);
1974
+ if (!insn) {
1975
+ WARN("bad .discard.instr_end entry");
1976
+ return -1;
1977
+ }
1978
+
1979
+ insn->instr--;
1980
+ }
1981
+
1982
+ sec = find_section_by_name(file->elf, ".rela.discard.instr_begin");
1983
+ if (!sec)
1984
+ return 0;
1985
+
1986
+ list_for_each_entry(reloc, &sec->reloc_list, list) {
1987
+ if (reloc->sym->type != STT_SECTION) {
1988
+ WARN("unexpected relocation symbol type in %s", sec->name);
1989
+ return -1;
1990
+ }
1991
+
1992
+ insn = find_insn(file, reloc->sym->sec, reloc->addend);
1993
+ if (!insn) {
1994
+ WARN("bad .discard.instr_begin entry");
1995
+ return -1;
1996
+ }
1997
+
1998
+ insn->instr++;
1999
+ }
2000
+
2001
+ return 0;
2002
+}
2003
+
2004
+static int read_intra_function_calls(struct objtool_file *file)
2005
+{
2006
+ struct instruction *insn;
2007
+ struct section *sec;
2008
+ struct reloc *reloc;
2009
+
2010
+ sec = find_section_by_name(file->elf, ".rela.discard.intra_function_calls");
2011
+ if (!sec)
2012
+ return 0;
2013
+
2014
+ list_for_each_entry(reloc, &sec->reloc_list, list) {
2015
+ unsigned long dest_off;
2016
+
2017
+ if (reloc->sym->type != STT_SECTION) {
2018
+ WARN("unexpected relocation symbol type in %s",
2019
+ sec->name);
2020
+ return -1;
2021
+ }
2022
+
2023
+ insn = find_insn(file, reloc->sym->sec, reloc->addend);
2024
+ if (!insn) {
2025
+ WARN("bad .discard.intra_function_call entry");
2026
+ return -1;
2027
+ }
2028
+
2029
+ if (insn->type != INSN_CALL) {
2030
+ WARN_FUNC("intra_function_call not a direct call",
2031
+ insn->sec, insn->offset);
2032
+ return -1;
2033
+ }
2034
+
2035
+ /*
2036
+ * Treat intra-function CALLs as JMPs, but with a stack_op.
2037
+ * See add_call_destinations(), which strips stack_ops from
2038
+ * normal CALLs.
2039
+ */
2040
+ insn->type = INSN_JUMP_UNCONDITIONAL;
2041
+
2042
+ dest_off = insn->offset + insn->len + insn->immediate;
2043
+ insn->jump_dest = find_insn(file, insn->sec, dest_off);
2044
+ if (!insn->jump_dest) {
2045
+ WARN_FUNC("can't find call dest at %s+0x%lx",
2046
+ insn->sec, insn->offset,
2047
+ insn->sec->name, dest_off);
2048
+ return -1;
2049
+ }
2050
+ }
2051
+
2052
+ return 0;
2053
+}
2054
+
2055
+static int classify_symbols(struct objtool_file *file)
2056
+{
2057
+ struct section *sec;
2058
+ struct symbol *func;
2059
+
2060
+ for_each_sec(file, sec) {
2061
+ list_for_each_entry(func, &sec->symbol_list, list) {
2062
+ if (func->bind != STB_GLOBAL)
2063
+ continue;
2064
+
2065
+ if (!strncmp(func->name, STATIC_CALL_TRAMP_PREFIX_STR,
2066
+ strlen(STATIC_CALL_TRAMP_PREFIX_STR)))
2067
+ func->static_call_tramp = true;
2068
+
2069
+ if (arch_is_retpoline(func))
2070
+ func->retpoline_thunk = true;
2071
+
2072
+ if (arch_is_rethunk(func))
2073
+ func->return_thunk = true;
2074
+
2075
+ if (arch_is_embedded_insn(func))
2076
+ func->embedded_insn = true;
2077
+
2078
+ if (!strcmp(func->name, "__fentry__"))
2079
+ func->fentry = true;
2080
+
2081
+ if (!strncmp(func->name, "__sanitizer_cov_", 16))
2082
+ func->kcov = true;
2083
+ }
12062084 }
12072085
12082086 return 0;
....@@ -1214,9 +2092,14 @@
12142092 bool found = false;
12152093
12162094 /*
1217
- * This searches for the .rodata section or multiple .rodata.func_name
1218
- * sections if -fdata-sections is being used. The .str.1.1 and .str.1.8
1219
- * rodata sections are ignored as they don't contain jump tables.
2095
+ * Search for the following rodata sections, each of which can
2096
+ * potentially contain jump tables:
2097
+ *
2098
+ * - .rodata: can contain GCC switch tables
2099
+ * - .rodata.<func>: same, if -fdata-sections is being used
2100
+ * - .rodata..c_jump_table: contains C annotated jump tables
2101
+ *
2102
+ * .rodata.str1.* sections are ignored; they don't contain jump tables.
12202103 */
12212104 for_each_sec(file, sec) {
12222105 if (!strncmp(sec->name, ".rodata", 7) &&
....@@ -1244,11 +2127,24 @@
12442127 return ret;
12452128
12462129 add_ignores(file);
2130
+ add_uaccess_safe(file);
2131
+ add_cfi_jumptables(file);
12472132
1248
- ret = add_nospec_ignores(file);
2133
+ ret = add_ignore_alternatives(file);
12492134 if (ret)
12502135 return ret;
12512136
2137
+ /*
2138
+ * Must be before add_{jump_call}_destination.
2139
+ */
2140
+ ret = classify_symbols(file);
2141
+ if (ret)
2142
+ return ret;
2143
+
2144
+ /*
2145
+ * Must be before add_special_section_alts() as that depends on
2146
+ * jump_dest being set.
2147
+ */
12522148 ret = add_jump_destinations(file);
12532149 if (ret)
12542150 return ret;
....@@ -1257,11 +2153,19 @@
12572153 if (ret)
12582154 return ret;
12592155
2156
+ /*
2157
+ * Must be before add_call_destination(); it changes INSN_CALL to
2158
+ * INSN_JUMP.
2159
+ */
2160
+ ret = read_intra_function_calls(file);
2161
+ if (ret)
2162
+ return ret;
2163
+
12602164 ret = add_call_destinations(file);
12612165 if (ret)
12622166 return ret;
12632167
1264
- ret = add_switch_table_alts(file);
2168
+ ret = add_jump_table_alts(file);
12652169 if (ret)
12662170 return ret;
12672171
....@@ -1273,63 +2177,80 @@
12732177 if (ret)
12742178 return ret;
12752179
2180
+ ret = read_instr_hints(file);
2181
+ if (ret)
2182
+ return ret;
2183
+
12762184 return 0;
12772185 }
12782186
1279
-static bool is_fentry_call(struct instruction *insn)
2187
+static bool is_special_call(struct instruction *insn)
12802188 {
1281
- if (insn->type == INSN_CALL &&
1282
- insn->call_dest->type == STT_NOTYPE &&
1283
- !strcmp(insn->call_dest->name, "__fentry__"))
1284
- return true;
2189
+ if (insn->type == INSN_CALL) {
2190
+ struct symbol *dest = insn->call_dest;
2191
+
2192
+ if (!dest)
2193
+ return false;
2194
+
2195
+ if (dest->fentry)
2196
+ return true;
2197
+ }
12852198
12862199 return false;
12872200 }
12882201
1289
-static bool has_modified_stack_frame(struct insn_state *state)
2202
+static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state)
12902203 {
2204
+ struct cfi_state *cfi = &state->cfi;
12912205 int i;
12922206
1293
- if (state->cfa.base != initial_func_cfi.cfa.base ||
1294
- state->cfa.offset != initial_func_cfi.cfa.offset ||
1295
- state->stack_size != initial_func_cfi.cfa.offset ||
1296
- state->drap)
2207
+ if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap)
12972208 return true;
12982209
1299
- for (i = 0; i < CFI_NUM_REGS; i++)
1300
- if (state->regs[i].base != initial_func_cfi.regs[i].base ||
1301
- state->regs[i].offset != initial_func_cfi.regs[i].offset)
2210
+ if (cfi->cfa.offset != initial_func_cfi.cfa.offset)
2211
+ return true;
2212
+
2213
+ if (cfi->stack_size != initial_func_cfi.cfa.offset)
2214
+ return true;
2215
+
2216
+ for (i = 0; i < CFI_NUM_REGS; i++) {
2217
+ if (cfi->regs[i].base != initial_func_cfi.regs[i].base ||
2218
+ cfi->regs[i].offset != initial_func_cfi.regs[i].offset)
13022219 return true;
2220
+ }
13032221
13042222 return false;
13052223 }
13062224
13072225 static bool has_valid_stack_frame(struct insn_state *state)
13082226 {
1309
- if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA &&
1310
- state->regs[CFI_BP].offset == -16)
2227
+ struct cfi_state *cfi = &state->cfi;
2228
+
2229
+ if (cfi->cfa.base == CFI_BP && cfi->regs[CFI_BP].base == CFI_CFA &&
2230
+ cfi->regs[CFI_BP].offset == -16)
13112231 return true;
13122232
1313
- if (state->drap && state->regs[CFI_BP].base == CFI_BP)
2233
+ if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP)
13142234 return true;
13152235
13162236 return false;
13172237 }
13182238
1319
-static int update_insn_state_regs(struct instruction *insn, struct insn_state *state)
2239
+static int update_cfi_state_regs(struct instruction *insn,
2240
+ struct cfi_state *cfi,
2241
+ struct stack_op *op)
13202242 {
1321
- struct cfi_reg *cfa = &state->cfa;
1322
- struct stack_op *op = &insn->stack_op;
2243
+ struct cfi_reg *cfa = &cfi->cfa;
13232244
13242245 if (cfa->base != CFI_SP && cfa->base != CFI_SP_INDIRECT)
13252246 return 0;
13262247
13272248 /* push */
1328
- if (op->dest.type == OP_DEST_PUSH)
2249
+ if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
13292250 cfa->offset += 8;
13302251
13312252 /* pop */
1332
- if (op->src.type == OP_SRC_POP)
2253
+ if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
13332254 cfa->offset -= 8;
13342255
13352256 /* add immediate to sp */
....@@ -1340,20 +2261,19 @@
13402261 return 0;
13412262 }
13422263
1343
-static void save_reg(struct insn_state *state, unsigned char reg, int base,
1344
- int offset)
2264
+static void save_reg(struct cfi_state *cfi, unsigned char reg, int base, int offset)
13452265 {
13462266 if (arch_callee_saved_reg(reg) &&
1347
- state->regs[reg].base == CFI_UNDEFINED) {
1348
- state->regs[reg].base = base;
1349
- state->regs[reg].offset = offset;
2267
+ cfi->regs[reg].base == CFI_UNDEFINED) {
2268
+ cfi->regs[reg].base = base;
2269
+ cfi->regs[reg].offset = offset;
13502270 }
13512271 }
13522272
1353
-static void restore_reg(struct insn_state *state, unsigned char reg)
2273
+static void restore_reg(struct cfi_state *cfi, unsigned char reg)
13542274 {
1355
- state->regs[reg].base = CFI_UNDEFINED;
1356
- state->regs[reg].offset = 0;
2275
+ cfi->regs[reg].base = initial_func_cfi.regs[reg].base;
2276
+ cfi->regs[reg].offset = initial_func_cfi.regs[reg].offset;
13572277 }
13582278
13592279 /*
....@@ -1409,11 +2329,11 @@
14092329 * 41 5d pop %r13
14102330 * c3 retq
14112331 */
1412
-static int update_insn_state(struct instruction *insn, struct insn_state *state)
2332
+static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
2333
+ struct stack_op *op)
14132334 {
1414
- struct stack_op *op = &insn->stack_op;
1415
- struct cfi_reg *cfa = &state->cfa;
1416
- struct cfi_reg *regs = state->regs;
2335
+ struct cfi_reg *cfa = &cfi->cfa;
2336
+ struct cfi_reg *regs = cfi->regs;
14172337
14182338 /* stack operations don't make sense with an undefined CFA */
14192339 if (cfa->base == CFI_UNDEFINED) {
....@@ -1424,8 +2344,9 @@
14242344 return 0;
14252345 }
14262346
1427
- if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET)
1428
- return update_insn_state_regs(insn, state);
2347
+ if (cfi->type == UNWIND_HINT_TYPE_REGS ||
2348
+ cfi->type == UNWIND_HINT_TYPE_REGS_PARTIAL)
2349
+ return update_cfi_state_regs(insn, cfi, op);
14292350
14302351 switch (op->dest.type) {
14312352
....@@ -1440,16 +2361,16 @@
14402361
14412362 /* mov %rsp, %rbp */
14422363 cfa->base = op->dest.reg;
1443
- state->bp_scratch = false;
2364
+ cfi->bp_scratch = false;
14442365 }
14452366
14462367 else if (op->src.reg == CFI_SP &&
1447
- op->dest.reg == CFI_BP && state->drap) {
2368
+ op->dest.reg == CFI_BP && cfi->drap) {
14482369
14492370 /* drap: mov %rsp, %rbp */
14502371 regs[CFI_BP].base = CFI_BP;
1451
- regs[CFI_BP].offset = -state->stack_size;
1452
- state->bp_scratch = false;
2372
+ regs[CFI_BP].offset = -cfi->stack_size;
2373
+ cfi->bp_scratch = false;
14532374 }
14542375
14552376 else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
....@@ -1464,8 +2385,8 @@
14642385 * ...
14652386 * mov %rax, %rsp
14662387 */
1467
- state->vals[op->dest.reg].base = CFI_CFA;
1468
- state->vals[op->dest.reg].offset = -state->stack_size;
2388
+ cfi->vals[op->dest.reg].base = CFI_CFA;
2389
+ cfi->vals[op->dest.reg].offset = -cfi->stack_size;
14692390 }
14702391
14712392 else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
....@@ -1476,14 +2397,14 @@
14762397 *
14772398 * Restore the original stack pointer (Clang).
14782399 */
1479
- state->stack_size = -state->regs[CFI_BP].offset;
2400
+ cfi->stack_size = -cfi->regs[CFI_BP].offset;
14802401 }
14812402
14822403 else if (op->dest.reg == cfa->base) {
14832404
14842405 /* mov %reg, %rsp */
14852406 if (cfa->base == CFI_SP &&
1486
- state->vals[op->src.reg].base == CFI_CFA) {
2407
+ cfi->vals[op->src.reg].base == CFI_CFA) {
14872408
14882409 /*
14892410 * This is needed for the rare case
....@@ -1493,8 +2414,8 @@
14932414 * ...
14942415 * mov %rcx, %rsp
14952416 */
1496
- cfa->offset = -state->vals[op->src.reg].offset;
1497
- state->stack_size = cfa->offset;
2417
+ cfa->offset = -cfi->vals[op->src.reg].offset;
2418
+ cfi->stack_size = cfa->offset;
14982419
14992420 } else {
15002421 cfa->base = CFI_UNDEFINED;
....@@ -1508,7 +2429,7 @@
15082429 if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
15092430
15102431 /* add imm, %rsp */
1511
- state->stack_size -= op->src.offset;
2432
+ cfi->stack_size -= op->src.offset;
15122433 if (cfa->base == CFI_SP)
15132434 cfa->offset -= op->src.offset;
15142435 break;
....@@ -1517,14 +2438,14 @@
15172438 if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
15182439
15192440 /* lea disp(%rbp), %rsp */
1520
- state->stack_size = -(op->src.offset + regs[CFI_BP].offset);
2441
+ cfi->stack_size = -(op->src.offset + regs[CFI_BP].offset);
15212442 break;
15222443 }
15232444
15242445 if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
15252446
15262447 /* drap: lea disp(%rsp), %drap */
1527
- state->drap_reg = op->dest.reg;
2448
+ cfi->drap_reg = op->dest.reg;
15282449
15292450 /*
15302451 * lea disp(%rsp), %reg
....@@ -1536,25 +2457,25 @@
15362457 * ...
15372458 * mov %rcx, %rsp
15382459 */
1539
- state->vals[op->dest.reg].base = CFI_CFA;
1540
- state->vals[op->dest.reg].offset = \
1541
- -state->stack_size + op->src.offset;
2460
+ cfi->vals[op->dest.reg].base = CFI_CFA;
2461
+ cfi->vals[op->dest.reg].offset = \
2462
+ -cfi->stack_size + op->src.offset;
15422463
15432464 break;
15442465 }
15452466
1546
- if (state->drap && op->dest.reg == CFI_SP &&
1547
- op->src.reg == state->drap_reg) {
2467
+ if (cfi->drap && op->dest.reg == CFI_SP &&
2468
+ op->src.reg == cfi->drap_reg) {
15482469
15492470 /* drap: lea disp(%drap), %rsp */
15502471 cfa->base = CFI_SP;
1551
- cfa->offset = state->stack_size = -op->src.offset;
1552
- state->drap_reg = CFI_UNDEFINED;
1553
- state->drap = false;
2472
+ cfa->offset = cfi->stack_size = -op->src.offset;
2473
+ cfi->drap_reg = CFI_UNDEFINED;
2474
+ cfi->drap = false;
15542475 break;
15552476 }
15562477
1557
- if (op->dest.reg == state->cfa.base) {
2478
+ if (op->dest.reg == cfi->cfa.base) {
15582479 WARN_FUNC("unsupported stack register modification",
15592480 insn->sec, insn->offset);
15602481 return -1;
....@@ -1564,18 +2485,18 @@
15642485
15652486 case OP_SRC_AND:
15662487 if (op->dest.reg != CFI_SP ||
1567
- (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
1568
- (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
2488
+ (cfi->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
2489
+ (cfi->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
15692490 WARN_FUNC("unsupported stack pointer realignment",
15702491 insn->sec, insn->offset);
15712492 return -1;
15722493 }
15732494
1574
- if (state->drap_reg != CFI_UNDEFINED) {
2495
+ if (cfi->drap_reg != CFI_UNDEFINED) {
15752496 /* drap: and imm, %rsp */
1576
- cfa->base = state->drap_reg;
1577
- cfa->offset = state->stack_size = 0;
1578
- state->drap = true;
2497
+ cfa->base = cfi->drap_reg;
2498
+ cfa->offset = cfi->stack_size = 0;
2499
+ cfi->drap = true;
15792500 }
15802501
15812502 /*
....@@ -1586,57 +2507,56 @@
15862507 break;
15872508
15882509 case OP_SRC_POP:
1589
- if (!state->drap && op->dest.type == OP_DEST_REG &&
1590
- op->dest.reg == cfa->base) {
2510
+ case OP_SRC_POPF:
2511
+ if (!cfi->drap && op->dest.reg == cfa->base) {
15912512
15922513 /* pop %rbp */
15932514 cfa->base = CFI_SP;
15942515 }
15952516
1596
- if (state->drap && cfa->base == CFI_BP_INDIRECT &&
1597
- op->dest.type == OP_DEST_REG &&
1598
- op->dest.reg == state->drap_reg &&
1599
- state->drap_offset == -state->stack_size) {
2517
+ if (cfi->drap && cfa->base == CFI_BP_INDIRECT &&
2518
+ op->dest.reg == cfi->drap_reg &&
2519
+ cfi->drap_offset == -cfi->stack_size) {
16002520
16012521 /* drap: pop %drap */
1602
- cfa->base = state->drap_reg;
2522
+ cfa->base = cfi->drap_reg;
16032523 cfa->offset = 0;
1604
- state->drap_offset = -1;
2524
+ cfi->drap_offset = -1;
16052525
1606
- } else if (regs[op->dest.reg].offset == -state->stack_size) {
2526
+ } else if (regs[op->dest.reg].offset == -cfi->stack_size) {
16072527
16082528 /* pop %reg */
1609
- restore_reg(state, op->dest.reg);
2529
+ restore_reg(cfi, op->dest.reg);
16102530 }
16112531
1612
- state->stack_size -= 8;
2532
+ cfi->stack_size -= 8;
16132533 if (cfa->base == CFI_SP)
16142534 cfa->offset -= 8;
16152535
16162536 break;
16172537
16182538 case OP_SRC_REG_INDIRECT:
1619
- if (state->drap && op->src.reg == CFI_BP &&
1620
- op->src.offset == state->drap_offset) {
2539
+ if (cfi->drap && op->src.reg == CFI_BP &&
2540
+ op->src.offset == cfi->drap_offset) {
16212541
16222542 /* drap: mov disp(%rbp), %drap */
1623
- cfa->base = state->drap_reg;
2543
+ cfa->base = cfi->drap_reg;
16242544 cfa->offset = 0;
1625
- state->drap_offset = -1;
2545
+ cfi->drap_offset = -1;
16262546 }
16272547
1628
- if (state->drap && op->src.reg == CFI_BP &&
2548
+ if (cfi->drap && op->src.reg == CFI_BP &&
16292549 op->src.offset == regs[op->dest.reg].offset) {
16302550
16312551 /* drap: mov disp(%rbp), %reg */
1632
- restore_reg(state, op->dest.reg);
2552
+ restore_reg(cfi, op->dest.reg);
16332553
16342554 } else if (op->src.reg == cfa->base &&
16352555 op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
16362556
16372557 /* mov disp(%rbp), %reg */
16382558 /* mov disp(%rsp), %reg */
1639
- restore_reg(state, op->dest.reg);
2559
+ restore_reg(cfi, op->dest.reg);
16402560 }
16412561
16422562 break;
....@@ -1650,78 +2570,77 @@
16502570 break;
16512571
16522572 case OP_DEST_PUSH:
1653
- state->stack_size += 8;
2573
+ case OP_DEST_PUSHF:
2574
+ cfi->stack_size += 8;
16542575 if (cfa->base == CFI_SP)
16552576 cfa->offset += 8;
16562577
16572578 if (op->src.type != OP_SRC_REG)
16582579 break;
16592580
1660
- if (state->drap) {
1661
- if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
2581
+ if (cfi->drap) {
2582
+ if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
16622583
16632584 /* drap: push %drap */
16642585 cfa->base = CFI_BP_INDIRECT;
1665
- cfa->offset = -state->stack_size;
2586
+ cfa->offset = -cfi->stack_size;
16662587
16672588 /* save drap so we know when to restore it */
1668
- state->drap_offset = -state->stack_size;
2589
+ cfi->drap_offset = -cfi->stack_size;
16692590
1670
- } else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) {
2591
+ } else if (op->src.reg == CFI_BP && cfa->base == cfi->drap_reg) {
16712592
16722593 /* drap: push %rbp */
1673
- state->stack_size = 0;
2594
+ cfi->stack_size = 0;
16742595
1675
- } else if (regs[op->src.reg].base == CFI_UNDEFINED) {
2596
+ } else {
16762597
16772598 /* drap: push %reg */
1678
- save_reg(state, op->src.reg, CFI_BP, -state->stack_size);
2599
+ save_reg(cfi, op->src.reg, CFI_BP, -cfi->stack_size);
16792600 }
16802601
16812602 } else {
16822603
16832604 /* push %reg */
1684
- save_reg(state, op->src.reg, CFI_CFA, -state->stack_size);
2605
+ save_reg(cfi, op->src.reg, CFI_CFA, -cfi->stack_size);
16852606 }
16862607
16872608 /* detect when asm code uses rbp as a scratch register */
16882609 if (!no_fp && insn->func && op->src.reg == CFI_BP &&
16892610 cfa->base != CFI_BP)
1690
- state->bp_scratch = true;
2611
+ cfi->bp_scratch = true;
16912612 break;
16922613
16932614 case OP_DEST_REG_INDIRECT:
16942615
1695
- if (state->drap) {
1696
- if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
2616
+ if (cfi->drap) {
2617
+ if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
16972618
16982619 /* drap: mov %drap, disp(%rbp) */
16992620 cfa->base = CFI_BP_INDIRECT;
17002621 cfa->offset = op->dest.offset;
17012622
17022623 /* save drap offset so we know when to restore it */
1703
- state->drap_offset = op->dest.offset;
1704
- }
1705
-
1706
- else if (regs[op->src.reg].base == CFI_UNDEFINED) {
2624
+ cfi->drap_offset = op->dest.offset;
2625
+ } else {
17072626
17082627 /* drap: mov reg, disp(%rbp) */
1709
- save_reg(state, op->src.reg, CFI_BP, op->dest.offset);
2628
+ save_reg(cfi, op->src.reg, CFI_BP, op->dest.offset);
17102629 }
17112630
17122631 } else if (op->dest.reg == cfa->base) {
17132632
17142633 /* mov reg, disp(%rbp) */
17152634 /* mov reg, disp(%rsp) */
1716
- save_reg(state, op->src.reg, CFI_CFA,
1717
- op->dest.offset - state->cfa.offset);
2635
+ save_reg(cfi, op->src.reg, CFI_CFA,
2636
+ op->dest.offset - cfi->cfa.offset);
17182637 }
17192638
17202639 break;
17212640
17222641 case OP_DEST_LEAVE:
1723
- if ((!state->drap && cfa->base != CFI_BP) ||
1724
- (state->drap && cfa->base != state->drap_reg)) {
2642
+ if ((!cfi->drap && cfa->base != CFI_BP) ||
2643
+ (cfi->drap && cfa->base != cfi->drap_reg)) {
17252644 WARN_FUNC("leave instruction with modified stack frame",
17262645 insn->sec, insn->offset);
17272646 return -1;
....@@ -1729,10 +2648,10 @@
17292648
17302649 /* leave (mov %rbp, %rsp; pop %rbp) */
17312650
1732
- state->stack_size = -state->regs[CFI_BP].offset - 8;
1733
- restore_reg(state, CFI_BP);
2651
+ cfi->stack_size = -cfi->regs[CFI_BP].offset - 8;
2652
+ restore_reg(cfi, CFI_BP);
17342653
1735
- if (!state->drap) {
2654
+ if (!cfi->drap) {
17362655 cfa->base = CFI_SP;
17372656 cfa->offset -= 8;
17382657 }
....@@ -1740,14 +2659,14 @@
17402659 break;
17412660
17422661 case OP_DEST_MEM:
1743
- if (op->src.type != OP_SRC_POP) {
2662
+ if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
17442663 WARN_FUNC("unknown stack-related memory operation",
17452664 insn->sec, insn->offset);
17462665 return -1;
17472666 }
17482667
17492668 /* pop mem */
1750
- state->stack_size -= 8;
2669
+ cfi->stack_size -= 8;
17512670 if (cfa->base == CFI_SP)
17522671 cfa->offset -= 8;
17532672
....@@ -1762,46 +2681,260 @@
17622681 return 0;
17632682 }
17642683
1765
-static bool insn_state_match(struct instruction *insn, struct insn_state *state)
2684
+/*
2685
+ * The stack layouts of alternatives instructions can sometimes diverge when
2686
+ * they have stack modifications. That's fine as long as the potential stack
2687
+ * layouts don't conflict at any given potential instruction boundary.
2688
+ *
2689
+ * Flatten the CFIs of the different alternative code streams (both original
2690
+ * and replacement) into a single shared CFI array which can be used to detect
2691
+ * conflicts and nicely feed a linear array of ORC entries to the unwinder.
2692
+ */
2693
+static int propagate_alt_cfi(struct objtool_file *file, struct instruction *insn)
17662694 {
1767
- struct insn_state *state1 = &insn->state, *state2 = state;
2695
+ struct cfi_state **alt_cfi;
2696
+ int group_off;
2697
+
2698
+ if (!insn->alt_group)
2699
+ return 0;
2700
+
2701
+ if (!insn->cfi) {
2702
+ WARN("CFI missing");
2703
+ return -1;
2704
+ }
2705
+
2706
+ alt_cfi = insn->alt_group->cfi;
2707
+ group_off = insn->offset - insn->alt_group->first_insn->offset;
2708
+
2709
+ if (!alt_cfi[group_off]) {
2710
+ alt_cfi[group_off] = insn->cfi;
2711
+ } else {
2712
+ if (cficmp(alt_cfi[group_off], insn->cfi)) {
2713
+ WARN_FUNC("stack layout conflict in alternatives",
2714
+ insn->sec, insn->offset);
2715
+ return -1;
2716
+ }
2717
+ }
2718
+
2719
+ return 0;
2720
+}
2721
+
2722
+static int handle_insn_ops(struct instruction *insn, struct insn_state *state)
2723
+{
2724
+ struct stack_op *op;
2725
+
2726
+ list_for_each_entry(op, &insn->stack_ops, list) {
2727
+
2728
+ if (update_cfi_state(insn, &state->cfi, op))
2729
+ return 1;
2730
+
2731
+ if (op->dest.type == OP_DEST_PUSHF) {
2732
+ if (!state->uaccess_stack) {
2733
+ state->uaccess_stack = 1;
2734
+ } else if (state->uaccess_stack >> 31) {
2735
+ WARN_FUNC("PUSHF stack exhausted",
2736
+ insn->sec, insn->offset);
2737
+ return 1;
2738
+ }
2739
+ state->uaccess_stack <<= 1;
2740
+ state->uaccess_stack |= state->uaccess;
2741
+ }
2742
+
2743
+ if (op->src.type == OP_SRC_POPF) {
2744
+ if (state->uaccess_stack) {
2745
+ state->uaccess = state->uaccess_stack & 1;
2746
+ state->uaccess_stack >>= 1;
2747
+ if (state->uaccess_stack == 1)
2748
+ state->uaccess_stack = 0;
2749
+ }
2750
+ }
2751
+ }
2752
+
2753
+ return 0;
2754
+}
2755
+
2756
+static bool insn_cfi_match(struct instruction *insn, struct cfi_state *cfi2)
2757
+{
2758
+ struct cfi_state *cfi1 = insn->cfi;
17682759 int i;
17692760
1770
- if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) {
2761
+ if (!cfi1) {
2762
+ WARN("CFI missing");
2763
+ return false;
2764
+ }
2765
+
2766
+ if (memcmp(&cfi1->cfa, &cfi2->cfa, sizeof(cfi1->cfa))) {
2767
+
17712768 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
17722769 insn->sec, insn->offset,
1773
- state1->cfa.base, state1->cfa.offset,
1774
- state2->cfa.base, state2->cfa.offset);
2770
+ cfi1->cfa.base, cfi1->cfa.offset,
2771
+ cfi2->cfa.base, cfi2->cfa.offset);
17752772
1776
- } else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) {
2773
+ } else if (memcmp(&cfi1->regs, &cfi2->regs, sizeof(cfi1->regs))) {
17772774 for (i = 0; i < CFI_NUM_REGS; i++) {
1778
- if (!memcmp(&state1->regs[i], &state2->regs[i],
2775
+ if (!memcmp(&cfi1->regs[i], &cfi2->regs[i],
17792776 sizeof(struct cfi_reg)))
17802777 continue;
17812778
17822779 WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
17832780 insn->sec, insn->offset,
1784
- i, state1->regs[i].base, state1->regs[i].offset,
1785
- i, state2->regs[i].base, state2->regs[i].offset);
2781
+ i, cfi1->regs[i].base, cfi1->regs[i].offset,
2782
+ i, cfi2->regs[i].base, cfi2->regs[i].offset);
17862783 break;
17872784 }
17882785
1789
- } else if (state1->type != state2->type) {
1790
- WARN_FUNC("stack state mismatch: type1=%d type2=%d",
1791
- insn->sec, insn->offset, state1->type, state2->type);
2786
+ } else if (cfi1->type != cfi2->type) {
17922787
1793
- } else if (state1->drap != state2->drap ||
1794
- (state1->drap && state1->drap_reg != state2->drap_reg) ||
1795
- (state1->drap && state1->drap_offset != state2->drap_offset)) {
2788
+ WARN_FUNC("stack state mismatch: type1=%d type2=%d",
2789
+ insn->sec, insn->offset, cfi1->type, cfi2->type);
2790
+
2791
+ } else if (cfi1->drap != cfi2->drap ||
2792
+ (cfi1->drap && cfi1->drap_reg != cfi2->drap_reg) ||
2793
+ (cfi1->drap && cfi1->drap_offset != cfi2->drap_offset)) {
2794
+
17962795 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
17972796 insn->sec, insn->offset,
1798
- state1->drap, state1->drap_reg, state1->drap_offset,
1799
- state2->drap, state2->drap_reg, state2->drap_offset);
2797
+ cfi1->drap, cfi1->drap_reg, cfi1->drap_offset,
2798
+ cfi2->drap, cfi2->drap_reg, cfi2->drap_offset);
18002799
18012800 } else
18022801 return true;
18032802
18042803 return false;
2804
+}
2805
+
2806
+static inline bool func_uaccess_safe(struct symbol *func)
2807
+{
2808
+ if (func)
2809
+ return func->uaccess_safe;
2810
+
2811
+ return false;
2812
+}
2813
+
2814
+static inline const char *call_dest_name(struct instruction *insn)
2815
+{
2816
+ if (insn->call_dest)
2817
+ return insn->call_dest->name;
2818
+
2819
+ return "{dynamic}";
2820
+}
2821
+
2822
+static inline bool noinstr_call_dest(struct symbol *func)
2823
+{
2824
+ /*
2825
+ * We can't deal with indirect function calls at present;
2826
+ * assume they're instrumented.
2827
+ */
2828
+ if (!func)
2829
+ return false;
2830
+
2831
+ /*
2832
+ * If the symbol is from a noinstr section; we good.
2833
+ */
2834
+ if (func->sec->noinstr)
2835
+ return true;
2836
+
2837
+ /*
2838
+ * The __ubsan_handle_*() calls are like WARN(), they only happen when
2839
+ * something 'BAD' happened. At the risk of taking the machine down,
2840
+ * let them proceed to get the message out.
2841
+ */
2842
+ if (!strncmp(func->name, "__ubsan_handle_", 15))
2843
+ return true;
2844
+
2845
+ return false;
2846
+}
2847
+
2848
+static int validate_call(struct instruction *insn, struct insn_state *state)
2849
+{
2850
+ if (state->noinstr && state->instr <= 0 &&
2851
+ !noinstr_call_dest(insn->call_dest)) {
2852
+ WARN_FUNC("call to %s() leaves .noinstr.text section",
2853
+ insn->sec, insn->offset, call_dest_name(insn));
2854
+ return 1;
2855
+ }
2856
+
2857
+ if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
2858
+ WARN_FUNC("call to %s() with UACCESS enabled",
2859
+ insn->sec, insn->offset, call_dest_name(insn));
2860
+ return 1;
2861
+ }
2862
+
2863
+ if (state->df) {
2864
+ WARN_FUNC("call to %s() with DF set",
2865
+ insn->sec, insn->offset, call_dest_name(insn));
2866
+ return 1;
2867
+ }
2868
+
2869
+ return 0;
2870
+}
2871
+
2872
+static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
2873
+{
2874
+ if (has_modified_stack_frame(insn, state)) {
2875
+ WARN_FUNC("sibling call from callable instruction with modified stack frame",
2876
+ insn->sec, insn->offset);
2877
+ return 1;
2878
+ }
2879
+
2880
+ return validate_call(insn, state);
2881
+}
2882
+
2883
+static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
2884
+{
2885
+ if (state->noinstr && state->instr > 0) {
2886
+ WARN_FUNC("return with instrumentation enabled",
2887
+ insn->sec, insn->offset);
2888
+ return 1;
2889
+ }
2890
+
2891
+ if (state->uaccess && !func_uaccess_safe(func)) {
2892
+ WARN_FUNC("return with UACCESS enabled",
2893
+ insn->sec, insn->offset);
2894
+ return 1;
2895
+ }
2896
+
2897
+ if (!state->uaccess && func_uaccess_safe(func)) {
2898
+ WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function",
2899
+ insn->sec, insn->offset);
2900
+ return 1;
2901
+ }
2902
+
2903
+ if (state->df) {
2904
+ WARN_FUNC("return with DF set",
2905
+ insn->sec, insn->offset);
2906
+ return 1;
2907
+ }
2908
+
2909
+ if (func && has_modified_stack_frame(insn, state)) {
2910
+ WARN_FUNC("return with modified stack frame",
2911
+ insn->sec, insn->offset);
2912
+ return 1;
2913
+ }
2914
+
2915
+ if (state->cfi.bp_scratch) {
2916
+ WARN_FUNC("BP used as a scratch register",
2917
+ insn->sec, insn->offset);
2918
+ return 1;
2919
+ }
2920
+
2921
+ return 0;
2922
+}
2923
+
2924
+static struct instruction *next_insn_to_validate(struct objtool_file *file,
2925
+ struct instruction *insn)
2926
+{
2927
+ struct alt_group *alt_group = insn->alt_group;
2928
+
2929
+ /*
2930
+ * Simulate the fact that alternatives are patched in-place. When the
2931
+ * end of a replacement alt_group is reached, redirect objtool flow to
2932
+ * the end of the original alt_group.
2933
+ */
2934
+ if (alt_group && insn == alt_group->last_insn && alt_group->orig_group)
2935
+ return next_insn_same_sec(file, alt_group->orig_group->last_insn);
2936
+
2937
+ return next_insn_same_sec(file, insn);
18052938 }
18062939
18072940 /*
....@@ -1810,26 +2943,19 @@
18102943 * each instruction and validate all the rules described in
18112944 * tools/objtool/Documentation/stack-validation.txt.
18122945 */
1813
-static int validate_branch(struct objtool_file *file, struct instruction *first,
1814
- struct insn_state state)
2946
+static int validate_branch(struct objtool_file *file, struct symbol *func,
2947
+ struct instruction *insn, struct insn_state state)
18152948 {
18162949 struct alternative *alt;
1817
- struct instruction *insn, *next_insn;
2950
+ struct instruction *next_insn, *prev_insn = NULL;
18182951 struct section *sec;
1819
- struct symbol *func = NULL;
2952
+ u8 visited;
18202953 int ret;
18212954
1822
- insn = first;
18232955 sec = insn->sec;
18242956
1825
- if (insn->alt_group && list_empty(&insn->alts)) {
1826
- WARN_FUNC("don't know how to handle branch to middle of alternative instruction group",
1827
- sec, insn->offset);
1828
- return 1;
1829
- }
1830
-
18312957 while (1) {
1832
- next_insn = next_insn_same_sec(file, insn);
2958
+ next_insn = next_insn_to_validate(file, insn);
18332959
18342960 if (file->c_file && func && insn->func && func != insn->func->pfunc) {
18352961 WARN("%s() falls through to next function %s()",
....@@ -1837,21 +2963,25 @@
18372963 return 1;
18382964 }
18392965
1840
- if (insn->func)
1841
- func = insn->func->pfunc;
1842
-
18432966 if (func && insn->ignore) {
18442967 WARN_FUNC("BUG: why am I validating an ignored function?",
18452968 sec, insn->offset);
18462969 return 1;
18472970 }
18482971
1849
- if (insn->visited) {
1850
- if (!insn->hint && !insn_state_match(insn, &state))
2972
+ visited = VISITED_BRANCH << state.uaccess;
2973
+ if (insn->visited & VISITED_BRANCH_MASK) {
2974
+ if (!insn->hint && !insn_cfi_match(insn, &state.cfi))
18512975 return 1;
18522976
1853
- return 0;
2977
+ if (insn->visited & visited)
2978
+ return 0;
2979
+ } else {
2980
+ nr_insns_visited++;
18542981 }
2982
+
2983
+ if (state.noinstr)
2984
+ state.instr += insn->instr;
18552985
18562986 if (insn->hint) {
18572987 if (insn->restore) {
....@@ -1859,7 +2989,8 @@
18592989
18602990 i = insn;
18612991 save_insn = NULL;
1862
- func_for_each_insn_continue_reverse(file, insn->func, i) {
2992
+
2993
+ sym_for_each_insn_continue_reverse(file, func, i) {
18632994 if (i->save) {
18642995 save_insn = i;
18652996 break;
....@@ -1873,89 +3004,97 @@
18733004 }
18743005
18753006 if (!save_insn->visited) {
1876
- /*
1877
- * Oops, no state to copy yet.
1878
- * Hopefully we can reach this
1879
- * instruction from another branch
1880
- * after the save insn has been
1881
- * visited.
1882
- */
1883
- if (insn == first)
1884
- return 0;
1885
-
18863007 WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
18873008 sec, insn->offset);
18883009 return 1;
18893010 }
18903011
1891
- insn->state = save_insn->state;
3012
+ insn->cfi = save_insn->cfi;
3013
+ nr_cfi_reused++;
18923014 }
18933015
1894
- state = insn->state;
3016
+ state.cfi = *insn->cfi;
3017
+ } else {
3018
+ /* XXX track if we actually changed state.cfi */
18953019
1896
- } else
1897
- insn->state = state;
1898
-
1899
- insn->visited = true;
1900
-
1901
- if (!insn->ignore_alts) {
1902
- list_for_each_entry(alt, &insn->alts, list) {
1903
- ret = validate_branch(file, alt->insn, state);
1904
- if (ret)
1905
- return 1;
3020
+ if (prev_insn && !cficmp(prev_insn->cfi, &state.cfi)) {
3021
+ insn->cfi = prev_insn->cfi;
3022
+ nr_cfi_reused++;
3023
+ } else {
3024
+ insn->cfi = cfi_hash_find_or_add(&state.cfi);
19063025 }
19073026 }
3027
+
3028
+ insn->visited |= visited;
3029
+
3030
+ if (propagate_alt_cfi(file, insn))
3031
+ return 1;
3032
+
3033
+ if (!insn->ignore_alts && !list_empty(&insn->alts)) {
3034
+ bool skip_orig = false;
3035
+
3036
+ list_for_each_entry(alt, &insn->alts, list) {
3037
+ if (alt->skip_orig)
3038
+ skip_orig = true;
3039
+
3040
+ ret = validate_branch(file, func, alt->insn, state);
3041
+ if (ret) {
3042
+ if (backtrace)
3043
+ BT_FUNC("(alt)", insn);
3044
+ return ret;
3045
+ }
3046
+ }
3047
+
3048
+ if (skip_orig)
3049
+ return 0;
3050
+ }
3051
+
3052
+ if (handle_insn_ops(insn, &state))
3053
+ return 1;
19083054
19093055 switch (insn->type) {
19103056
19113057 case INSN_RETURN:
1912
- if (func && has_modified_stack_frame(&state)) {
1913
- WARN_FUNC("return with modified stack frame",
1914
- sec, insn->offset);
1915
- return 1;
3058
+ if (sls && !insn->retpoline_safe &&
3059
+ next_insn && next_insn->type != INSN_TRAP) {
3060
+ WARN_FUNC("missing int3 after ret",
3061
+ insn->sec, insn->offset);
19163062 }
1917
-
1918
- if (state.bp_scratch) {
1919
- WARN("%s uses BP as a scratch register",
1920
- insn->func->name);
1921
- return 1;
1922
- }
1923
-
1924
- return 0;
3063
+ return validate_return(func, insn, &state);
19253064
19263065 case INSN_CALL:
1927
- if (is_fentry_call(insn))
1928
- break;
1929
-
1930
- ret = dead_end_function(file, insn->call_dest);
1931
- if (ret == 1)
1932
- return 0;
1933
- if (ret == -1)
1934
- return 1;
1935
-
1936
- /* fallthrough */
19373066 case INSN_CALL_DYNAMIC:
1938
- if (!no_fp && func && !has_valid_stack_frame(&state)) {
3067
+ ret = validate_call(insn, &state);
3068
+ if (ret)
3069
+ return ret;
3070
+
3071
+ if (!no_fp && func && !is_special_call(insn) &&
3072
+ !has_valid_stack_frame(&state)) {
19393073 WARN_FUNC("call without frame pointer save/setup",
19403074 sec, insn->offset);
19413075 return 1;
19423076 }
3077
+
3078
+ if (dead_end_function(file, insn->call_dest))
3079
+ return 0;
3080
+
19433081 break;
19443082
19453083 case INSN_JUMP_CONDITIONAL:
19463084 case INSN_JUMP_UNCONDITIONAL:
1947
- if (insn->jump_dest &&
1948
- (!func || !insn->jump_dest->func ||
1949
- insn->jump_dest->func->pfunc == func)) {
1950
- ret = validate_branch(file, insn->jump_dest,
1951
- state);
3085
+ if (is_sibling_call(insn)) {
3086
+ ret = validate_sibling_call(insn, &state);
19523087 if (ret)
1953
- return 1;
3088
+ return ret;
19543089
1955
- } else if (func && has_modified_stack_frame(&state)) {
1956
- WARN_FUNC("sibling call from callable instruction with modified stack frame",
1957
- sec, insn->offset);
1958
- return 1;
3090
+ } else if (insn->jump_dest) {
3091
+ ret = validate_branch(file, func,
3092
+ insn->jump_dest, state);
3093
+ if (ret) {
3094
+ if (backtrace)
3095
+ BT_FUNC("(branch)", insn);
3096
+ return ret;
3097
+ }
19593098 }
19603099
19613100 if (insn->type == INSN_JUMP_UNCONDITIONAL)
....@@ -1964,14 +3103,24 @@
19643103 break;
19653104
19663105 case INSN_JUMP_DYNAMIC:
1967
- if (func && list_empty(&insn->alts) &&
1968
- has_modified_stack_frame(&state)) {
1969
- WARN_FUNC("sibling call from callable instruction with modified stack frame",
1970
- sec, insn->offset);
1971
- return 1;
3106
+ if (sls && !insn->retpoline_safe &&
3107
+ next_insn && next_insn->type != INSN_TRAP) {
3108
+ WARN_FUNC("missing int3 after indirect jump",
3109
+ insn->sec, insn->offset);
19723110 }
19733111
1974
- return 0;
3112
+ /* fallthrough */
3113
+ case INSN_JUMP_DYNAMIC_CONDITIONAL:
3114
+ if (is_sibling_call(insn)) {
3115
+ ret = validate_sibling_call(insn, &state);
3116
+ if (ret)
3117
+ return ret;
3118
+ }
3119
+
3120
+ if (insn->type == INSN_JUMP_DYNAMIC)
3121
+ return 0;
3122
+
3123
+ break;
19753124
19763125 case INSN_CONTEXT_SWITCH:
19773126 if (func && (!next_insn || !next_insn->hint)) {
....@@ -1981,10 +3130,45 @@
19813130 }
19823131 return 0;
19833132
1984
- case INSN_STACK:
1985
- if (update_insn_state(insn, &state))
3133
+ case INSN_STAC:
3134
+ if (state.uaccess) {
3135
+ WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
19863136 return 1;
3137
+ }
19873138
3139
+ state.uaccess = true;
3140
+ break;
3141
+
3142
+ case INSN_CLAC:
3143
+ if (!state.uaccess && func) {
3144
+ WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
3145
+ return 1;
3146
+ }
3147
+
3148
+ if (func_uaccess_safe(func) && !state.uaccess_stack) {
3149
+ WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
3150
+ return 1;
3151
+ }
3152
+
3153
+ state.uaccess = false;
3154
+ break;
3155
+
3156
+ case INSN_STD:
3157
+ if (state.df) {
3158
+ WARN_FUNC("recursive STD", sec, insn->offset);
3159
+ return 1;
3160
+ }
3161
+
3162
+ state.df = true;
3163
+ break;
3164
+
3165
+ case INSN_CLD:
3166
+ if (!state.df && func) {
3167
+ WARN_FUNC("redundant CLD", sec, insn->offset);
3168
+ return 1;
3169
+ }
3170
+
3171
+ state.df = false;
19883172 break;
19893173
19903174 default:
....@@ -1995,34 +3179,186 @@
19953179 return 0;
19963180
19973181 if (!next_insn) {
1998
- if (state.cfa.base == CFI_UNDEFINED)
3182
+ if (state.cfi.cfa.base == CFI_UNDEFINED)
19993183 return 0;
20003184 WARN("%s: unexpected end of section", sec->name);
20013185 return 1;
20023186 }
20033187
3188
+ prev_insn = insn;
20043189 insn = next_insn;
20053190 }
20063191
20073192 return 0;
20083193 }
20093194
2010
-static int validate_unwind_hints(struct objtool_file *file)
3195
+static int validate_unwind_hints(struct objtool_file *file, struct section *sec)
20113196 {
20123197 struct instruction *insn;
2013
- int ret, warnings = 0;
20143198 struct insn_state state;
3199
+ int ret, warnings = 0;
20153200
20163201 if (!file->hints)
20173202 return 0;
20183203
2019
- clear_insn_state(&state);
3204
+ init_insn_state(&state, sec);
20203205
2021
- for_each_insn(file, insn) {
3206
+ if (sec) {
3207
+ insn = find_insn(file, sec, 0);
3208
+ if (!insn)
3209
+ return 0;
3210
+ } else {
3211
+ insn = list_first_entry(&file->insn_list, typeof(*insn), list);
3212
+ }
3213
+
3214
+ while (&insn->list != &file->insn_list && (!sec || insn->sec == sec)) {
20223215 if (insn->hint && !insn->visited) {
2023
- ret = validate_branch(file, insn, state);
3216
+ ret = validate_branch(file, insn->func, insn, state);
3217
+ if (ret && backtrace)
3218
+ BT_FUNC("<=== (hint)", insn);
20243219 warnings += ret;
20253220 }
3221
+
3222
+ insn = list_next_entry(insn, list);
3223
+ }
3224
+
3225
+ return warnings;
3226
+}
3227
+
3228
+/*
3229
+ * Validate rethunk entry constraint: must untrain RET before the first RET.
3230
+ *
3231
+ * Follow every branch (intra-function) and ensure ANNOTATE_UNRET_END comes
3232
+ * before an actual RET instruction.
3233
+ */
3234
+static int validate_entry(struct objtool_file *file, struct instruction *insn)
3235
+{
3236
+ struct instruction *next, *dest;
3237
+ int ret, warnings = 0;
3238
+
3239
+ for (;;) {
3240
+ next = next_insn_to_validate(file, insn);
3241
+
3242
+ if (insn->visited & VISITED_ENTRY)
3243
+ return 0;
3244
+
3245
+ insn->visited |= VISITED_ENTRY;
3246
+
3247
+ if (!insn->ignore_alts && !list_empty(&insn->alts)) {
3248
+ struct alternative *alt;
3249
+ bool skip_orig = false;
3250
+
3251
+ list_for_each_entry(alt, &insn->alts, list) {
3252
+ if (alt->skip_orig)
3253
+ skip_orig = true;
3254
+
3255
+ ret = validate_entry(file, alt->insn);
3256
+ if (ret) {
3257
+ if (backtrace)
3258
+ BT_FUNC("(alt)", insn);
3259
+ return ret;
3260
+ }
3261
+ }
3262
+
3263
+ if (skip_orig)
3264
+ return 0;
3265
+ }
3266
+
3267
+ switch (insn->type) {
3268
+
3269
+ case INSN_CALL_DYNAMIC:
3270
+ case INSN_JUMP_DYNAMIC:
3271
+ case INSN_JUMP_DYNAMIC_CONDITIONAL:
3272
+ WARN_FUNC("early indirect call", insn->sec, insn->offset);
3273
+ return 1;
3274
+
3275
+ case INSN_JUMP_UNCONDITIONAL:
3276
+ case INSN_JUMP_CONDITIONAL:
3277
+ if (!is_sibling_call(insn)) {
3278
+ if (!insn->jump_dest) {
3279
+ WARN_FUNC("unresolved jump target after linking?!?",
3280
+ insn->sec, insn->offset);
3281
+ return -1;
3282
+ }
3283
+ ret = validate_entry(file, insn->jump_dest);
3284
+ if (ret) {
3285
+ if (backtrace) {
3286
+ BT_FUNC("(branch%s)", insn,
3287
+ insn->type == INSN_JUMP_CONDITIONAL ? "-cond" : "");
3288
+ }
3289
+ return ret;
3290
+ }
3291
+
3292
+ if (insn->type == INSN_JUMP_UNCONDITIONAL)
3293
+ return 0;
3294
+
3295
+ break;
3296
+ }
3297
+
3298
+ /* fallthrough */
3299
+ case INSN_CALL:
3300
+ dest = find_insn(file, insn->call_dest->sec,
3301
+ insn->call_dest->offset);
3302
+ if (!dest) {
3303
+ WARN("Unresolved function after linking!?: %s",
3304
+ insn->call_dest->name);
3305
+ return -1;
3306
+ }
3307
+
3308
+ ret = validate_entry(file, dest);
3309
+ if (ret) {
3310
+ if (backtrace)
3311
+ BT_FUNC("(call)", insn);
3312
+ return ret;
3313
+ }
3314
+ /*
3315
+ * If a call returns without error, it must have seen UNTRAIN_RET.
3316
+ * Therefore any non-error return is a success.
3317
+ */
3318
+ return 0;
3319
+
3320
+ case INSN_RETURN:
3321
+ WARN_FUNC("RET before UNTRAIN", insn->sec, insn->offset);
3322
+ return 1;
3323
+
3324
+ case INSN_NOP:
3325
+ if (insn->retpoline_safe)
3326
+ return 0;
3327
+ break;
3328
+
3329
+ default:
3330
+ break;
3331
+ }
3332
+
3333
+ if (!next) {
3334
+ WARN_FUNC("teh end!", insn->sec, insn->offset);
3335
+ return -1;
3336
+ }
3337
+ insn = next;
3338
+ }
3339
+
3340
+ return warnings;
3341
+}
3342
+
3343
+/*
3344
+ * Validate that all branches starting at 'insn->entry' encounter UNRET_END
3345
+ * before RET.
3346
+ */
3347
+static int validate_unret(struct objtool_file *file)
3348
+{
3349
+ struct instruction *insn;
3350
+ int ret, warnings = 0;
3351
+
3352
+ for_each_insn(file, insn) {
3353
+ if (!insn->entry)
3354
+ continue;
3355
+
3356
+ ret = validate_entry(file, insn);
3357
+ if (ret < 0) {
3358
+ WARN_FUNC("Failed UNRET validation", insn->sec, insn->offset);
3359
+ return ret;
3360
+ }
3361
+ warnings += ret;
20263362 }
20273363
20283364 return warnings;
....@@ -2035,7 +3371,8 @@
20353371
20363372 for_each_insn(file, insn) {
20373373 if (insn->type != INSN_JUMP_DYNAMIC &&
2038
- insn->type != INSN_CALL_DYNAMIC)
3374
+ insn->type != INSN_CALL_DYNAMIC &&
3375
+ insn->type != INSN_RETURN)
20393376 continue;
20403377
20413378 if (insn->retpoline_safe)
....@@ -2050,9 +3387,17 @@
20503387 if (!strcmp(insn->sec->name, ".init.text") && !module)
20513388 continue;
20523389
2053
- WARN_FUNC("indirect %s found in RETPOLINE build",
2054
- insn->sec, insn->offset,
2055
- insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
3390
+ if (insn->type == INSN_RETURN) {
3391
+ if (rethunk) {
3392
+ WARN_FUNC("'naked' return found in RETHUNK build",
3393
+ insn->sec, insn->offset);
3394
+ } else
3395
+ continue;
3396
+ } else {
3397
+ WARN_FUNC("indirect %s found in RETPOLINE build",
3398
+ insn->sec, insn->offset,
3399
+ insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
3400
+ }
20563401
20573402 warnings++;
20583403 }
....@@ -2073,11 +3418,12 @@
20733418 "__ubsan_handle_builtin_unreachable"));
20743419 }
20753420
2076
-static bool ignore_unreachable_insn(struct instruction *insn)
3421
+static bool ignore_unreachable_insn(struct objtool_file *file, struct instruction *insn)
20773422 {
20783423 int i;
3424
+ struct instruction *prev_insn;
20793425
2080
- if (insn->ignore || insn->type == INSN_NOP)
3426
+ if (insn->ignore || insn->type == INSN_NOP || insn->type == INSN_TRAP)
20813427 return true;
20823428
20833429 /*
....@@ -2100,8 +3446,11 @@
21003446 * __builtin_unreachable(). The BUG() macro has an unreachable() after
21013447 * the UD2, which causes GCC's undefined trap logic to emit another UD2
21023448 * (or occasionally a JMP to UD2).
3449
+ *
3450
+ * It may also insert a UD2 after calling a __noreturn function.
21033451 */
2104
- if (list_prev_entry(insn, list)->dead_end &&
3452
+ prev_insn = list_prev_entry(insn, list);
3453
+ if ((prev_insn->dead_end || dead_end_function(file, prev_insn->call_dest)) &&
21053454 (insn->type == INSN_BUG ||
21063455 (insn->type == INSN_JUMP_UNCONDITIONAL &&
21073456 insn->jump_dest && insn->jump_dest->type == INSN_BUG)))
....@@ -2137,33 +3486,81 @@
21373486 return false;
21383487 }
21393488
3489
+static int validate_symbol(struct objtool_file *file, struct section *sec,
3490
+ struct symbol *sym, struct insn_state *state)
3491
+{
3492
+ struct instruction *insn;
3493
+ int ret;
3494
+
3495
+ if (!sym->len) {
3496
+ WARN("%s() is missing an ELF size annotation", sym->name);
3497
+ return 1;
3498
+ }
3499
+
3500
+ if (sym->pfunc != sym || sym->alias != sym)
3501
+ return 0;
3502
+
3503
+ insn = find_insn(file, sec, sym->offset);
3504
+ if (!insn || insn->ignore || insn->visited)
3505
+ return 0;
3506
+
3507
+ state->uaccess = sym->uaccess_safe;
3508
+
3509
+ ret = validate_branch(file, insn->func, insn, *state);
3510
+ if (ret && backtrace)
3511
+ BT_FUNC("<=== (sym)", insn);
3512
+ return ret;
3513
+}
3514
+
3515
+static int validate_section(struct objtool_file *file, struct section *sec)
3516
+{
3517
+ struct insn_state state;
3518
+ struct symbol *func;
3519
+ int warnings = 0;
3520
+
3521
+ list_for_each_entry(func, &sec->symbol_list, list) {
3522
+ if (func->type != STT_FUNC)
3523
+ continue;
3524
+
3525
+ init_insn_state(&state, sec);
3526
+ set_func_state(&state.cfi);
3527
+
3528
+ warnings += validate_symbol(file, sec, func, &state);
3529
+ }
3530
+
3531
+ return warnings;
3532
+}
3533
+
3534
+static int validate_vmlinux_functions(struct objtool_file *file)
3535
+{
3536
+ struct section *sec;
3537
+ int warnings = 0;
3538
+
3539
+ sec = find_section_by_name(file->elf, ".noinstr.text");
3540
+ if (sec) {
3541
+ warnings += validate_section(file, sec);
3542
+ warnings += validate_unwind_hints(file, sec);
3543
+ }
3544
+
3545
+ sec = find_section_by_name(file->elf, ".entry.text");
3546
+ if (sec) {
3547
+ warnings += validate_section(file, sec);
3548
+ warnings += validate_unwind_hints(file, sec);
3549
+ }
3550
+
3551
+ return warnings;
3552
+}
3553
+
21403554 static int validate_functions(struct objtool_file *file)
21413555 {
21423556 struct section *sec;
2143
- struct symbol *func;
2144
- struct instruction *insn;
2145
- struct insn_state state;
2146
- int ret, warnings = 0;
2147
-
2148
- clear_insn_state(&state);
2149
-
2150
- state.cfa = initial_func_cfi.cfa;
2151
- memcpy(&state.regs, &initial_func_cfi.regs,
2152
- CFI_NUM_REGS * sizeof(struct cfi_reg));
2153
- state.stack_size = initial_func_cfi.cfa.offset;
3557
+ int warnings = 0;
21543558
21553559 for_each_sec(file, sec) {
2156
- list_for_each_entry(func, &sec->symbol_list, list) {
2157
- if (func->type != STT_FUNC || func->pfunc != func)
2158
- continue;
3560
+ if (!(sec->sh.sh_flags & SHF_EXECINSTR))
3561
+ continue;
21593562
2160
- insn = find_insn(file, sec, func->offset);
2161
- if (!insn || insn->ignore)
2162
- continue;
2163
-
2164
- ret = validate_branch(file, insn, state);
2165
- warnings += ret;
2166
- }
3563
+ warnings += validate_section(file, sec);
21673564 }
21683565
21693566 return warnings;
....@@ -2177,7 +3574,7 @@
21773574 return 0;
21783575
21793576 for_each_insn(file, insn) {
2180
- if (insn->visited || ignore_unreachable_insn(insn))
3577
+ if (insn->visited || ignore_unreachable_insn(file, insn))
21813578 continue;
21823579
21833580 WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
....@@ -2187,95 +3584,112 @@
21873584 return 0;
21883585 }
21893586
2190
-static void cleanup(struct objtool_file *file)
2191
-{
2192
- struct instruction *insn, *tmpinsn;
2193
- struct alternative *alt, *tmpalt;
2194
-
2195
- list_for_each_entry_safe(insn, tmpinsn, &file->insn_list, list) {
2196
- list_for_each_entry_safe(alt, tmpalt, &insn->alts, list) {
2197
- list_del(&alt->list);
2198
- free(alt);
2199
- }
2200
- list_del(&insn->list);
2201
- hash_del(&insn->hash);
2202
- free(insn);
2203
- }
2204
- elf_close(file->elf);
2205
-}
2206
-
2207
-static struct objtool_file file;
2208
-
2209
-int check(const char *_objname, bool orc)
3587
+int check(struct objtool_file *file)
22103588 {
22113589 int ret, warnings = 0;
22123590
2213
- objname = _objname;
2214
-
2215
- file.elf = elf_open(objname, orc ? O_RDWR : O_RDONLY);
2216
- if (!file.elf)
2217
- return 1;
2218
-
2219
- INIT_LIST_HEAD(&file.insn_list);
2220
- hash_init(file.insn_hash);
2221
- file.whitelist = find_section_by_name(file.elf, ".discard.func_stack_frame_non_standard");
2222
- file.c_file = find_section_by_name(file.elf, ".comment");
2223
- file.ignore_unreachables = no_unreachable;
2224
- file.hints = false;
2225
-
22263591 arch_initial_func_cfi_state(&initial_func_cfi);
3592
+ init_cfi_state(&init_cfi);
3593
+ init_cfi_state(&func_cfi);
3594
+ set_func_state(&func_cfi);
22273595
2228
- ret = decode_sections(&file);
3596
+ if (!cfi_hash_alloc())
3597
+ goto out;
3598
+
3599
+ cfi_hash_add(&init_cfi);
3600
+ cfi_hash_add(&func_cfi);
3601
+
3602
+ ret = decode_sections(file);
22293603 if (ret < 0)
22303604 goto out;
3605
+
22313606 warnings += ret;
22323607
2233
- if (list_empty(&file.insn_list))
3608
+ if (list_empty(&file->insn_list))
22343609 goto out;
22353610
3611
+ if (vmlinux && !validate_dup) {
3612
+ ret = validate_vmlinux_functions(file);
3613
+ if (ret < 0)
3614
+ goto out;
3615
+
3616
+ warnings += ret;
3617
+ goto out;
3618
+ }
3619
+
22363620 if (retpoline) {
2237
- ret = validate_retpoline(&file);
3621
+ ret = validate_retpoline(file);
22383622 if (ret < 0)
22393623 return ret;
22403624 warnings += ret;
22413625 }
22423626
2243
- ret = validate_functions(&file);
3627
+ ret = validate_functions(file);
22443628 if (ret < 0)
22453629 goto out;
22463630 warnings += ret;
22473631
2248
- ret = validate_unwind_hints(&file);
3632
+ ret = validate_unwind_hints(file, NULL);
22493633 if (ret < 0)
22503634 goto out;
22513635 warnings += ret;
3636
+
3637
+ if (unret) {
3638
+ /*
3639
+ * Must be after validate_branch() and friends, it plays
3640
+ * further games with insn->visited.
3641
+ */
3642
+ ret = validate_unret(file);
3643
+ if (ret < 0)
3644
+ return ret;
3645
+ warnings += ret;
3646
+ }
22523647
22533648 if (!warnings) {
2254
- ret = validate_reachable_instructions(&file);
3649
+ ret = validate_reachable_instructions(file);
22553650 if (ret < 0)
22563651 goto out;
22573652 warnings += ret;
22583653 }
22593654
2260
- if (orc) {
2261
- ret = create_orc(&file);
2262
- if (ret < 0)
2263
- goto out;
3655
+ ret = create_static_call_sections(file);
3656
+ if (ret < 0)
3657
+ goto out;
3658
+ warnings += ret;
22643659
2265
- ret = create_orc_sections(&file);
3660
+ if (mcount) {
3661
+ ret = create_mcount_loc_sections(file);
22663662 if (ret < 0)
22673663 goto out;
3664
+ warnings += ret;
3665
+ }
22683666
2269
- ret = elf_write(file.elf);
3667
+ if (retpoline) {
3668
+ ret = create_retpoline_sites_sections(file);
22703669 if (ret < 0)
22713670 goto out;
3671
+ warnings += ret;
3672
+ }
3673
+
3674
+ if (rethunk) {
3675
+ ret = create_return_sites_sections(file);
3676
+ if (ret < 0)
3677
+ goto out;
3678
+ warnings += ret;
3679
+ }
3680
+
3681
+ if (stats) {
3682
+ printf("nr_insns_visited: %ld\n", nr_insns_visited);
3683
+ printf("nr_cfi: %ld\n", nr_cfi);
3684
+ printf("nr_cfi_reused: %ld\n", nr_cfi_reused);
3685
+ printf("nr_cfi_cache: %ld\n", nr_cfi_cache);
22723686 }
22733687
22743688 out:
2275
- cleanup(&file);
2276
-
2277
- /* ignore warnings for now until we get all the code cleaned up */
2278
- if (ret || warnings)
2279
- return 0;
3689
+ /*
3690
+ * For now, don't fail the kernel build on fatal warnings. These
3691
+ * errors are still fairly common due to the growing matrix of
3692
+ * supported toolchains and their recent pace of change.
3693
+ */
22803694 return 0;
22813695 }