]>
Commit | Line | Data |
---|---|---|
551935d1 AO |
1 | /* Control flow redundancy hardening. |
2 | Copyright (C) 2022 Free Software Foundation, Inc. | |
3 | Contributed by Alexandre Oliva <oliva@adacore.com>. | |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | #include "config.h" | |
22 | #define INCLUDE_ALGORITHM /* find */ | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
25 | #include "backend.h" | |
8f75e6cf | 26 | #include "memmodel.h" |
a663fe63 | 27 | #include "tm_p.h" |
551935d1 AO |
28 | #include "tree.h" |
29 | #include "fold-const.h" | |
30 | #include "gimple.h" | |
31 | #include "gimplify.h" | |
32 | #include "tree-pass.h" | |
33 | #include "ssa.h" | |
34 | #include "gimple-iterator.h" | |
35 | #include "gimple-pretty-print.h" | |
36 | #include "tree-cfg.h" | |
37 | #include "tree-cfgcleanup.h" | |
38 | #include "tree-eh.h" | |
39 | #include "except.h" | |
40 | #include "sbitmap.h" | |
41 | #include "basic-block.h" | |
42 | #include "cfghooks.h" | |
43 | #include "cfgloop.h" | |
44 | #include "cgraph.h" | |
45 | #include "alias.h" | |
46 | #include "varasm.h" | |
47 | #include "output.h" | |
48 | #include "langhooks.h" | |
49 | #include "diagnostic.h" | |
50 | #include "intl.h" | |
51 | ||
52 | namespace { | |
53 | ||
54 | /* This pass introduces verification, at function exits, that booleans | |
55 | set in each basic block during function execution reflect the | |
56 | control flow graph: for each visited block, check that at least one | |
57 | predecessor and at least one successor were also visited. This | |
58 | sort of hardening may detect various kinds of attacks. */ | |
59 | ||
60 | /* Define a pass to harden code through control flow redundancy. */ | |
61 | ||
62 | const pass_data pass_data_harden_control_flow_redundancy = { | |
63 | GIMPLE_PASS, | |
64 | "hardcfr", | |
65 | OPTGROUP_NONE, | |
66 | TV_NONE, | |
67 | PROP_cfg | PROP_ssa, // properties_required | |
68 | 0, // properties_provided | |
69 | 0, // properties_destroyed | |
70 | TODO_cleanup_cfg, // properties_start | |
71 | 0, // properties_finish | |
72 | }; | |
73 | ||
74 | class pass_harden_control_flow_redundancy : public gimple_opt_pass | |
75 | { | |
76 | public: | |
77 | pass_harden_control_flow_redundancy (gcc::context *ctxt) | |
78 | : gimple_opt_pass (pass_data_harden_control_flow_redundancy, ctxt) | |
79 | {} | |
80 | opt_pass *clone () { return new pass_harden_control_flow_redundancy (m_ctxt); } | |
81 | virtual bool gate (function *fun) { | |
82 | /* Return quickly if the pass is disabled, without checking any of | |
83 | the conditions that might give rise to warnings that would only | |
84 | be appropriate if hardening was requested. */ | |
85 | if (!flag_harden_control_flow_redundancy) | |
86 | return false; | |
87 | ||
88 | /* Functions that return more than once, like setjmp and vfork | |
89 | (that also gets this flag set), will start recording a path | |
90 | after the first return, and then may take another path when | |
91 | they return again. The unterminated path may then be flagged | |
92 | as an error. ??? We could save the visited array before the | |
93 | call and restore it if it returns again. */ | |
94 | if (fun->calls_setjmp) | |
95 | { | |
96 | warning_at (DECL_SOURCE_LOCATION (fun->decl), 0, | |
97 | "%qD calls %<setjmp%> or similar," | |
98 | " %<-fharden-control-flow-redundancy%> is not supported", | |
99 | fun->decl); | |
100 | return false; | |
101 | } | |
102 | ||
103 | /* Some targets bypass the abnormal dispatcher block in nonlocal | |
104 | gotos, and then we'd miss its visited bit. It might be doable | |
105 | to make it work uniformly, but this feature is not used often | |
106 | enough to make it worthwhile. */ | |
107 | if (fun->has_nonlocal_label) | |
108 | { | |
109 | warning_at (DECL_SOURCE_LOCATION (fun->decl), 0, | |
110 | "%qD receives nonlocal gotos," | |
111 | " %<-fharden-control-flow-redundancy%> is not supported", | |
112 | fun->decl); | |
113 | return false; | |
114 | } | |
115 | ||
116 | if (fun->cfg && param_hardcfr_max_blocks > 0 | |
117 | && (n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS | |
118 | > param_hardcfr_max_blocks)) | |
119 | { | |
120 | warning_at (DECL_SOURCE_LOCATION (fun->decl), 0, | |
121 | "%qD has more than %u blocks, the requested" | |
122 | " maximum for %<-fharden-control-flow-redundancy%>", | |
123 | fun->decl, param_hardcfr_max_blocks); | |
124 | return false; | |
125 | } | |
126 | ||
127 | return true; | |
128 | } | |
129 | virtual unsigned int execute (function *); | |
130 | }; | |
131 | ||
132 | } | |
133 | ||
134 | /* Return TRUE iff CFR checks should be inserted before returning | |
135 | calls. */ | |
136 | ||
137 | static bool | |
138 | check_returning_calls_p () | |
139 | { | |
140 | return | |
141 | flag_harden_control_flow_redundancy_check_returning_calls > 0 | |
142 | || (flag_harden_control_flow_redundancy_check_returning_calls < 0 | |
143 | /* Gates pass_tail_calls. */ | |
144 | && flag_optimize_sibling_calls | |
145 | /* Gates pass_all_optimizations. */ | |
146 | && optimize >= 1 && !optimize_debug); | |
147 | } | |
148 | ||
149 | /* Scan BB from the end, updating *RETPTR if given as return stmts and | |
150 | copies are found. Return a call or a stmt that cannot appear after | |
151 | a tail call, or NULL if the top of the block is reached without | |
152 | finding any. */ | |
153 | ||
154 | static gimple * | |
155 | hardcfr_scan_block (basic_block bb, tree **retptr) | |
156 | { | |
157 | gimple_stmt_iterator gsi; | |
158 | for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
159 | { | |
160 | gimple *stmt = gsi_stmt (gsi); | |
161 | ||
162 | /* Ignore labels, returns, nops, clobbers and debug stmts. */ | |
163 | if (gimple_code (stmt) == GIMPLE_LABEL | |
164 | || gimple_code (stmt) == GIMPLE_NOP | |
165 | || gimple_code (stmt) == GIMPLE_PREDICT | |
166 | || gimple_clobber_p (stmt) | |
167 | || is_gimple_debug (stmt)) | |
168 | continue; | |
169 | ||
170 | if (gimple_code (stmt) == GIMPLE_RETURN) | |
171 | { | |
172 | greturn *gret = as_a <greturn *> (stmt); | |
173 | if (retptr) | |
174 | { | |
175 | gcc_checking_assert (!*retptr); | |
176 | *retptr = gimple_return_retval_ptr (gret); | |
177 | } | |
178 | continue; | |
179 | } | |
180 | ||
181 | /* Check for a call. */ | |
182 | if (is_gimple_call (stmt)) | |
183 | return stmt; | |
184 | ||
185 | /* Allow simple copies to the return value, updating the return | |
186 | value to be found in earlier assignments. */ | |
187 | if (retptr && *retptr && gimple_assign_single_p (stmt) | |
188 | && **retptr == gimple_assign_lhs (stmt)) | |
189 | { | |
190 | *retptr = gimple_assign_rhs1_ptr (stmt); | |
191 | continue; | |
192 | } | |
193 | ||
194 | return stmt; | |
195 | } | |
196 | ||
197 | /* Any other kind of stmt will prevent a tail call. */ | |
198 | return NULL; | |
199 | } | |
200 | ||
201 | /* Return TRUE iff CALL is to be preceded by a CFR checkpoint, i.e., | |
202 | if it's a returning call (one whose result is ultimately returned | |
203 | without intervening non-copy statements) and we're checking | |
204 | returning calls, a __builtin_return call (noreturn with a path to | |
205 | the exit block), a must-tail call, or a tail call. */ | |
206 | ||
207 | static bool | |
208 | returning_call_p (gcall *call) | |
209 | { | |
210 | if (!(gimple_call_noreturn_p (call) | |
211 | || gimple_call_must_tail_p (call) | |
212 | || gimple_call_tail_p (call) | |
213 | || check_returning_calls_p ())) | |
214 | return false; | |
215 | ||
216 | /* Quickly check that there's a path to exit compatible with a | |
217 | returning call. Detect infinite loops by limiting the path | |
218 | length to the basic block count, and by looking for duplicate | |
219 | blocks before allocating more memory for the path, for amortized | |
220 | O(n). */ | |
221 | auto_vec<basic_block, 10> path; | |
222 | for (basic_block bb = gimple_bb (call); | |
223 | bb != EXIT_BLOCK_PTR_FOR_FN (cfun); | |
224 | bb = single_succ (bb)) | |
225 | if (!single_succ_p (bb) | |
226 | || (single_succ_edge (bb)->flags & EDGE_EH) != 0 | |
227 | || n_basic_blocks_for_fn (cfun) - path.length () <= NUM_FIXED_BLOCKS | |
228 | || (path.length () == path.allocated () | |
229 | && std::find (path.begin (), path.end (), bb) != path.end ())) | |
230 | return false; | |
231 | else | |
232 | path.safe_push (bb); | |
233 | ||
234 | /* Check the stmts in the blocks and trace the return value. */ | |
235 | tree *retptr = NULL; | |
236 | for (;;) | |
237 | { | |
238 | gcc_checking_assert (!path.is_empty ()); | |
239 | basic_block bb = path.pop (); | |
240 | gimple *stop = hardcfr_scan_block (bb, &retptr); | |
241 | if (stop) | |
242 | { | |
243 | if (stop != call) | |
244 | return false; | |
245 | gcc_checking_assert (path.is_empty ()); | |
246 | break; | |
247 | } | |
248 | ||
249 | gphi *retphi = NULL; | |
250 | if (retptr && *retptr && TREE_CODE (*retptr) == SSA_NAME | |
251 | && !SSA_NAME_IS_DEFAULT_DEF (*retptr) | |
252 | && SSA_NAME_DEF_STMT (*retptr) | |
253 | && is_a <gphi *> (SSA_NAME_DEF_STMT (*retptr)) | |
254 | && gimple_bb (SSA_NAME_DEF_STMT (*retptr)) == bb) | |
255 | { | |
256 | retphi = as_a <gphi *> (SSA_NAME_DEF_STMT (*retptr)); | |
257 | gcc_checking_assert (gimple_phi_result (retphi) == *retptr); | |
258 | } | |
259 | else | |
260 | continue; | |
261 | ||
262 | gcc_checking_assert (!path.is_empty ()); | |
263 | edge e = single_succ_edge (path.last ()); | |
264 | int i = EDGE_COUNT (bb->preds); | |
265 | while (i--) | |
266 | if (EDGE_PRED (bb, i) == e) | |
267 | break; | |
268 | gcc_checking_assert (i >= 0); | |
269 | retptr = gimple_phi_arg_def_ptr (retphi, i); | |
270 | } | |
271 | ||
272 | return (gimple_call_noreturn_p (call) | |
273 | || gimple_call_must_tail_p (call) | |
274 | || gimple_call_tail_p (call) | |
275 | || (gimple_call_lhs (call) == (retptr ? *retptr : NULL) | |
276 | && check_returning_calls_p ())); | |
277 | } | |
278 | ||
279 | typedef auto_vec<edge, 10> chk_edges_t; | |
280 | ||
281 | /* Declare for mutual recursion. */ | |
282 | static bool hardcfr_sibcall_search_preds (basic_block bb, | |
283 | chk_edges_t &chk_edges, | |
284 | int &count_chkcall, | |
285 | auto_sbitmap &chkcall_blocks, | |
286 | int &count_postchk, | |
287 | auto_sbitmap &postchk_blocks, | |
288 | tree *retptr); | |
289 | ||
290 | /* Search backwards from the end of BB for a mandatory or potential | |
291 | sibcall. Schedule the block to be handled sort-of like noreturn if | |
292 | so. Recurse to preds, with updated RETPTR, if the block only | |
293 | contains stmts that may follow such a call, scheduling checking at | |
294 | edges and marking blocks as post-check as needed. Return true iff, | |
295 | at the end of the block, a check will have already been | |
296 | performed. */ | |
297 | ||
298 | static bool | |
299 | hardcfr_sibcall_search_block (basic_block bb, | |
300 | chk_edges_t &chk_edges, | |
301 | int &count_chkcall, | |
302 | auto_sbitmap &chkcall_blocks, | |
303 | int &count_postchk, | |
304 | auto_sbitmap &postchk_blocks, | |
305 | tree *retptr) | |
306 | { | |
307 | /* Conditionals and internal exceptions rule out tail calls. */ | |
308 | if (!single_succ_p (bb) | |
309 | || (single_succ_edge (bb)->flags & EDGE_EH) != 0) | |
310 | return false; | |
311 | ||
312 | gimple *stmt = hardcfr_scan_block (bb, &retptr); | |
313 | if (!stmt) | |
314 | return hardcfr_sibcall_search_preds (bb, chk_edges, | |
315 | count_chkcall, chkcall_blocks, | |
316 | count_postchk, postchk_blocks, | |
317 | retptr); | |
318 | ||
319 | if (!is_a <gcall *> (stmt)) | |
320 | return false; | |
321 | ||
322 | /* Avoid disrupting mandatory or early-marked tail calls, | |
323 | inserting the check before them. This works for | |
324 | must-tail calls, but tail calling as an optimization is | |
325 | detected too late for us. | |
326 | ||
327 | Also check for noreturn calls here. Noreturn calls won't | |
328 | normally have edges to exit, so they won't be found here, | |
329 | but __builtin_return does, and we must check before | |
330 | it, so handle it like a tail call. */ | |
331 | gcall *call = as_a <gcall *> (stmt); | |
332 | if (!(gimple_call_noreturn_p (call) | |
333 | || gimple_call_must_tail_p (call) | |
334 | || gimple_call_tail_p (call) | |
335 | || (gimple_call_lhs (call) == (retptr ? *retptr : NULL) | |
336 | && check_returning_calls_p ()))) | |
337 | return false; | |
338 | ||
339 | gcc_checking_assert (returning_call_p (call)); | |
340 | ||
341 | /* We found a call that is to be preceded by checking. */ | |
342 | if (bitmap_set_bit (chkcall_blocks, bb->index)) | |
343 | ++count_chkcall; | |
344 | else | |
345 | gcc_unreachable (); | |
346 | return true; | |
347 | } | |
348 | ||
349 | ||
350 | /* Search preds of BB for a mandatory or potential sibcall or | |
351 | returning call, and arrange for the blocks containing them to have | |
352 | a check inserted before the call, like noreturn calls. If any | |
353 | preds are found to perform checking, schedule checks at the edges | |
354 | of those that don't, and mark BB as postcheck.. */ | |
355 | ||
356 | static bool | |
357 | hardcfr_sibcall_search_preds (basic_block bb, | |
358 | chk_edges_t &chk_edges, | |
359 | int &count_chkcall, | |
360 | auto_sbitmap &chkcall_blocks, | |
361 | int &count_postchk, | |
362 | auto_sbitmap &postchk_blocks, | |
363 | tree *retptr) | |
364 | { | |
365 | /* For the exit block, we wish to force a check at every | |
366 | predecessor, so pretend we've already found a pred that had | |
367 | checking, so that we schedule checking at every one of its pred | |
368 | edges. */ | |
369 | bool first = bb->index >= NUM_FIXED_BLOCKS; | |
370 | bool postchecked = true; | |
371 | ||
372 | gphi *retphi = NULL; | |
373 | if (retptr && *retptr && TREE_CODE (*retptr) == SSA_NAME | |
374 | && !SSA_NAME_IS_DEFAULT_DEF (*retptr) | |
375 | && SSA_NAME_DEF_STMT (*retptr) | |
376 | && is_a <gphi *> (SSA_NAME_DEF_STMT (*retptr)) | |
377 | && gimple_bb (SSA_NAME_DEF_STMT (*retptr)) == bb) | |
378 | { | |
379 | retphi = as_a <gphi *> (SSA_NAME_DEF_STMT (*retptr)); | |
380 | gcc_checking_assert (gimple_phi_result (retphi) == *retptr); | |
381 | } | |
382 | ||
383 | for (int i = EDGE_COUNT (bb->preds); i--; first = false) | |
384 | { | |
385 | edge e = EDGE_PRED (bb, i); | |
386 | ||
387 | bool checked | |
388 | = hardcfr_sibcall_search_block (e->src, chk_edges, | |
389 | count_chkcall, chkcall_blocks, | |
390 | count_postchk, postchk_blocks, | |
391 | !retphi ? retptr | |
392 | : gimple_phi_arg_def_ptr (retphi, i)); | |
393 | ||
394 | if (first) | |
395 | { | |
396 | postchecked = checked; | |
397 | continue; | |
398 | } | |
399 | ||
400 | /* When we first find a checked block, force a check at every | |
401 | other incoming edge we've already visited, and those we | |
402 | visit afterwards that don't have their own check, so that | |
403 | when we reach BB, the check has already been performed. */ | |
404 | if (!postchecked && checked) | |
405 | { | |
406 | for (int j = EDGE_COUNT (bb->preds); --j > i; ) | |
407 | chk_edges.safe_push (EDGE_PRED (bb, j)); | |
408 | postchecked = true; | |
409 | } | |
410 | if (postchecked && !checked) | |
411 | chk_edges.safe_push (EDGE_PRED (bb, i)); | |
412 | } | |
413 | ||
414 | if (postchecked && bb->index >= NUM_FIXED_BLOCKS) | |
415 | { | |
416 | if (bitmap_set_bit (postchk_blocks, bb->index)) | |
417 | count_postchk++; | |
418 | else | |
419 | gcc_unreachable (); | |
420 | } | |
421 | ||
422 | return postchecked; | |
423 | } | |
424 | ||
425 | ||
426 | class rt_bb_visited | |
427 | { | |
428 | /* Use a sufficiently wide unsigned type to hold basic block numbers. */ | |
429 | typedef size_t blknum; | |
430 | ||
431 | /* Record the original block count of the function. */ | |
432 | blknum nblocks; | |
433 | /* Record the number of bits per VWORD (short for VISITED WORD), an | |
434 | efficient mode to set and test bits for blocks we visited, and to | |
435 | encode the CFG in case out-of-line verification is used. */ | |
436 | unsigned vword_bits; | |
437 | ||
438 | /* Hold the unsigned integral VWORD type. */ | |
439 | tree vword_type; | |
440 | /* Hold a pointer-to-VWORD type. */ | |
441 | tree vword_ptr; | |
442 | ||
443 | /* Hold a growing sequence used to check, inline or out-of-line, | |
444 | that VISITED encodes an expected execution path. */ | |
445 | gimple_seq ckseq; | |
446 | /* If nonNULL, hold a growing representation of the CFG for | |
447 | out-of-line testing. */ | |
448 | tree rtcfg; | |
449 | ||
450 | /* Hold the declaration of an array of VWORDs, used as an array of | |
451 | NBLOCKS-2 bits. */ | |
452 | tree visited; | |
453 | ||
454 | /* If performing inline checking, hold a declarations of boolean | |
455 | variables used for inline checking. CKBLK holds the result of | |
456 | testing whether the VISITED bit corresponding to a predecessor or | |
457 | successor is set, CKINV inverts that bit, CKPART gets cleared if | |
458 | a block was not visited or if CKINV for any of its predecessors | |
459 | or successors is set, and CKFAIL gets set if CKPART remains set | |
460 | at the end of a block's predecessors or successors list. */ | |
461 | tree ckfail, ckpart, ckinv, ckblk; | |
462 | ||
463 | /* Convert a block index N to a block vindex, the index used to | |
464 | identify it in the VISITED array. Check that it's in range: | |
465 | neither ENTRY nor EXIT, but maybe one-past-the-end, to compute | |
466 | the visited array length. */ | |
467 | blknum num2idx (blknum n) { | |
468 | gcc_checking_assert (n >= NUM_FIXED_BLOCKS && n <= nblocks); | |
469 | return (n - NUM_FIXED_BLOCKS); | |
470 | } | |
471 | /* Return the block vindex for BB, that must not be ENTRY or | |
472 | EXIT. */ | |
473 | blknum bb2idx (basic_block bb) { | |
474 | gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun) | |
475 | && bb != EXIT_BLOCK_PTR_FOR_FN (cfun)); | |
476 | gcc_checking_assert (blknum (bb->index) < nblocks); | |
477 | return num2idx (bb->index); | |
478 | } | |
479 | ||
480 | /* Compute the type to be used for the VISITED array. */ | |
481 | tree vtype () | |
482 | { | |
483 | blknum n = num2idx (nblocks); | |
484 | return build_array_type_nelts (vword_type, | |
485 | (n + vword_bits - 1) / vword_bits); | |
486 | } | |
487 | ||
488 | /* Compute and return the index into VISITED for block BB. If BITP | |
489 | is non-NULL, also compute and store the bit mask corresponding to | |
490 | block BB in *BITP, so that (visited[index] & mask) tells whether | |
491 | BB was visited. */ | |
492 | tree vwordidx (basic_block bb, tree *bitp = NULL) | |
493 | { | |
494 | blknum idx = bb2idx (bb); | |
495 | if (bitp) | |
496 | { | |
497 | unsigned bit = idx % vword_bits; | |
498 | /* We don't need to adjust shifts to follow native bit | |
499 | endianness here, all of our uses of the CFG and visited | |
500 | bitmaps, whether at compile or runtime, are shifted bits on | |
501 | full words. This adjustment here would require a | |
502 | corresponding adjustment at runtime, which would be nothing | |
503 | but undesirable overhead for us. */ | |
504 | if (0 /* && BITS_BIG_ENDIAN */) | |
505 | bit = vword_bits - bit - 1; | |
506 | wide_int wbit = wi::set_bit_in_zero (bit, vword_bits); | |
507 | *bitp = wide_int_to_tree (vword_type, wbit); | |
508 | } | |
509 | return build_int_cst (vword_ptr, idx / vword_bits); | |
510 | } | |
511 | ||
512 | /* Return an expr to accesses the visited element that holds | |
513 | information about BB. If BITP is non-NULL, set it to the mask to | |
514 | tell which bit in that expr refers to BB. */ | |
515 | tree vword (basic_block bb, tree *bitp = NULL) | |
516 | { | |
517 | return build2 (MEM_REF, vword_type, | |
518 | build1 (ADDR_EXPR, vword_ptr, visited), | |
519 | int_const_binop (MULT_EXPR, vwordidx (bb, bitp), | |
520 | fold_convert (vword_ptr, | |
521 | TYPE_SIZE_UNIT | |
522 | (vword_type)))); | |
523 | } | |
524 | ||
525 | /* Return an expr that evaluates to true iff BB was marked as | |
526 | VISITED. Add any gimple stmts to SEQP. */ | |
527 | tree vindex (basic_block bb, gimple_seq *seqp) | |
528 | { | |
529 | if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun) | |
530 | || bb == EXIT_BLOCK_PTR_FOR_FN (cfun)) | |
531 | return boolean_true_node; | |
532 | ||
533 | tree bit, setme = vword (bb, &bit); | |
534 | tree temp = create_tmp_var (vword_type, ".cfrtemp"); | |
535 | ||
536 | gassign *vload = gimple_build_assign (temp, setme); | |
537 | gimple_seq_add_stmt (seqp, vload); | |
538 | ||
539 | gassign *vmask = gimple_build_assign (temp, BIT_AND_EXPR, temp, bit); | |
540 | gimple_seq_add_stmt (seqp, vmask); | |
541 | ||
542 | return build2 (NE_EXPR, boolean_type_node, | |
543 | temp, build_int_cst (vword_type, 0)); | |
544 | } | |
545 | ||
546 | /* Set the bit corresponding to BB in VISITED. Add to SEQ any | |
547 | required gimple stmts, and return SEQ, possibly modified. */ | |
548 | gimple_seq vset (basic_block bb, gimple_seq seq = NULL) | |
549 | { | |
550 | tree bit, setme = vword (bb, &bit); | |
551 | tree temp = create_tmp_var (vword_type, ".cfrtemp"); | |
552 | ||
553 | gassign *vload = gimple_build_assign (temp, setme); | |
554 | gimple_seq_add_stmt (&seq, vload); | |
555 | ||
556 | gassign *vbitset = gimple_build_assign (temp, BIT_IOR_EXPR, temp, bit); | |
557 | gimple_seq_add_stmt (&seq, vbitset); | |
558 | ||
559 | gassign *vstore = gimple_build_assign (unshare_expr (setme), temp); | |
560 | gimple_seq_add_stmt (&seq, vstore); | |
561 | ||
562 | /* Prevent stores into visited from being deferred, forcing | |
563 | subsequent bitsets to reload the word rather than reusing | |
564 | values already in register. The purpose is threefold: make the | |
565 | bitset get to memory in this block, so that control flow | |
566 | attacks in functions called in this block don't easily bypass | |
567 | the bitset; prevent the bitset word from being retained in a | |
568 | register across blocks, which could, in an attack scenario, | |
569 | make a later block set more than one bit; and prevent hoisting | |
570 | or sinking loads or stores of bitset words out of loops or even | |
571 | throughout functions, which could significantly weaken the | |
572 | verification. This is equivalent to making the bitsetting | |
573 | volatile within the function body, but without changing its | |
574 | type; making the bitset volatile would make inline checking far | |
575 | less optimizable for no reason. */ | |
576 | vec<tree, va_gc> *inputs = NULL; | |
577 | vec<tree, va_gc> *outputs = NULL; | |
578 | vec_safe_push (outputs, | |
579 | build_tree_list | |
580 | (build_tree_list | |
581 | (NULL_TREE, build_string (2, "=m")), | |
582 | visited)); | |
583 | vec_safe_push (inputs, | |
584 | build_tree_list | |
585 | (build_tree_list | |
586 | (NULL_TREE, build_string (1, "m")), | |
587 | visited)); | |
588 | gasm *stabilize = gimple_build_asm_vec ("", inputs, outputs, | |
589 | NULL, NULL); | |
590 | gimple_seq_add_stmt (&seq, stabilize); | |
591 | ||
592 | return seq; | |
593 | } | |
594 | ||
595 | public: | |
596 | /* Prepare to add control flow redundancy testing to CFUN. */ | |
597 | rt_bb_visited (int checkpoints) | |
598 | : nblocks (n_basic_blocks_for_fn (cfun)), | |
599 | vword_type (NULL), ckseq (NULL), rtcfg (NULL) | |
600 | { | |
601 | /* If we've already added a declaration for the builtin checker, | |
602 | extract vword_type and vword_bits from its declaration. */ | |
603 | if (tree checkfn = builtin_decl_explicit (BUILT_IN___HARDCFR_CHECK)) | |
604 | { | |
605 | tree check_arg_list = TYPE_ARG_TYPES (TREE_TYPE (checkfn)); | |
606 | tree vword_const_ptr_type = TREE_VALUE (TREE_CHAIN (check_arg_list)); | |
607 | vword_type = TYPE_MAIN_VARIANT (TREE_TYPE (vword_const_ptr_type)); | |
608 | vword_bits = tree_to_shwi (TYPE_SIZE (vword_type)); | |
609 | } | |
610 | /* Otherwise, select vword_bits, vword_type et al, and use it to | |
611 | declare the builtin checker. */ | |
612 | else | |
613 | { | |
614 | /* This setting needs to be kept in sync with libgcc/hardcfr.c. | |
615 | We aim for at least 28 bits, which enables us to refer to as | |
616 | many as 28 << 28 blocks in a function's CFG. That's way over | |
617 | 4G blocks. */ | |
618 | machine_mode VWORDmode; | |
619 | if (BITS_PER_UNIT >= 28) | |
620 | { | |
621 | VWORDmode = QImode; | |
622 | vword_bits = BITS_PER_UNIT; | |
623 | } | |
624 | else if (BITS_PER_UNIT >= 14) | |
625 | { | |
626 | VWORDmode = HImode; | |
627 | vword_bits = 2 * BITS_PER_UNIT; | |
628 | } | |
629 | else | |
630 | { | |
631 | VWORDmode = SImode; | |
632 | vword_bits = 4 * BITS_PER_UNIT; | |
633 | } | |
634 | ||
635 | vword_type = lang_hooks.types.type_for_mode (VWORDmode, 1); | |
636 | gcc_checking_assert (vword_bits == tree_to_shwi (TYPE_SIZE | |
637 | (vword_type))); | |
638 | ||
639 | vword_type = build_variant_type_copy (vword_type); | |
640 | TYPE_ALIAS_SET (vword_type) = new_alias_set (); | |
641 | ||
642 | tree vword_const = build_qualified_type (vword_type, TYPE_QUAL_CONST); | |
643 | tree vword_const_ptr = build_pointer_type (vword_const); | |
644 | tree type = build_function_type_list (void_type_node, sizetype, | |
645 | vword_const_ptr, vword_const_ptr, | |
646 | NULL_TREE); | |
647 | tree decl = add_builtin_function_ext_scope | |
648 | ("__builtin___hardcfr_check", | |
649 | type, BUILT_IN___HARDCFR_CHECK, BUILT_IN_NORMAL, | |
650 | "__hardcfr_check", NULL_TREE); | |
651 | TREE_NOTHROW (decl) = true; | |
652 | set_builtin_decl (BUILT_IN___HARDCFR_CHECK, decl, true); | |
653 | } | |
654 | ||
655 | /* The checker uses a qualified pointer, so we can't reuse it, | |
656 | so build a new one. */ | |
657 | vword_ptr = build_pointer_type (vword_type); | |
658 | ||
659 | tree visited_type = vtype (); | |
660 | visited = create_tmp_var (visited_type, ".cfrvisited"); | |
661 | ||
662 | if (nblocks - NUM_FIXED_BLOCKS > blknum (param_hardcfr_max_inline_blocks) | |
663 | || checkpoints > 1) | |
664 | { | |
665 | /* Make sure vword_bits is wide enough for the representation | |
666 | of nblocks in rtcfg. Compare with vword_bits << vword_bits, | |
667 | but avoiding overflows, shifting nblocks right instead. If | |
668 | vword_bits is wider than HOST_WIDE_INT, assume it fits, so | |
669 | as to avoid undefined shifts. */ | |
670 | gcc_assert (HOST_BITS_PER_WIDE_INT <= vword_bits | |
671 | || (((unsigned HOST_WIDE_INT)(num2idx (nblocks)) | |
672 | >> vword_bits) < vword_bits)); | |
673 | ||
674 | /* Build a terminator for the constructor list. */ | |
675 | rtcfg = build_tree_list (NULL_TREE, NULL_TREE); | |
676 | return; | |
677 | } | |
678 | ||
679 | ckfail = create_tmp_var (boolean_type_node, ".cfrfail"); | |
680 | ckpart = create_tmp_var (boolean_type_node, ".cfrpart"); | |
681 | ckinv = create_tmp_var (boolean_type_node, ".cfrinv"); | |
682 | ckblk = create_tmp_var (boolean_type_node, ".cfrblk"); | |
683 | ||
684 | gassign *ckfail_init = gimple_build_assign (ckfail, boolean_false_node); | |
685 | gimple_seq_add_stmt (&ckseq, ckfail_init); | |
686 | } | |
687 | ||
688 | /* Insert SEQ before a resx or a call in INSBB. */ | |
689 | void insert_exit_check_in_block (gimple_seq seq, basic_block insbb) | |
690 | { | |
691 | gimple_stmt_iterator gsi = gsi_last_bb (insbb); | |
692 | ||
693 | while (!gsi_end_p (gsi)) | |
694 | if (is_a <gresx *> (gsi_stmt (gsi)) | |
695 | || is_a <gcall *> (gsi_stmt (gsi))) | |
696 | break; | |
697 | else | |
698 | gsi_prev (&gsi); | |
699 | ||
700 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); | |
701 | } | |
702 | ||
703 | /* Insert SEQ on E. */ | |
704 | void insert_exit_check_on_edge (gimple_seq seq, edge e) | |
705 | { | |
706 | gsi_insert_seq_on_edge_immediate (e, seq); | |
707 | } | |
708 | ||
709 | /* Add checking code to CHK_EDGES and CHKCALL_BLOCKS, and | |
710 | initialization code on the entry edge. Before this point, the | |
711 | CFG has been undisturbed, and all the needed data has been | |
712 | collected and safely stowed. */ | |
713 | void check (chk_edges_t &chk_edges, | |
714 | int count_chkcall, auto_sbitmap const &chkcall_blocks) | |
715 | { | |
716 | /* If we're using out-of-line checking, create and statically | |
717 | initialize the CFG checking representation, generate the | |
718 | checker call for the checking sequence, and insert it in all | |
719 | exit edges, if there's more than one. If there's only one, we | |
720 | use the same logic as the inline case to insert the check | |
721 | sequence. */ | |
722 | if (rtcfg) | |
723 | { | |
724 | /* Unreverse the list, and drop the tail node turned into head. */ | |
725 | rtcfg = TREE_CHAIN (nreverse (rtcfg)); | |
726 | ||
727 | /* Turn the indices stored in TREE_PURPOSE into separate | |
728 | nodes. It was useful to keep them together to enable | |
729 | combination of masks and for clear separation of | |
730 | terminators while constructing it, but now we have to turn | |
731 | it into a sequence of words. */ | |
732 | for (tree node = rtcfg; node; node = TREE_CHAIN (node)) | |
733 | { | |
734 | tree wordidx = TREE_PURPOSE (node); | |
735 | if (!wordidx) | |
736 | continue; | |
737 | ||
738 | TREE_PURPOSE (node) = NULL_TREE; | |
739 | TREE_CHAIN (node) = tree_cons (NULL_TREE, | |
740 | fold_convert (vword_type, wordidx), | |
741 | TREE_CHAIN (node)); | |
742 | } | |
743 | ||
744 | /* Build the static initializer for the array with the CFG | |
745 | representation for out-of-line checking. */ | |
746 | tree init = build_constructor_from_list (NULL_TREE, rtcfg); | |
747 | TREE_TYPE (init) = build_array_type_nelts (vword_type, | |
748 | CONSTRUCTOR_NELTS (init)); | |
749 | char buf[32]; | |
750 | ASM_GENERATE_INTERNAL_LABEL (buf, "Lhardcfg", | |
751 | current_function_funcdef_no); | |
752 | rtcfg = build_decl (UNKNOWN_LOCATION, VAR_DECL, | |
753 | get_identifier (buf), | |
754 | TREE_TYPE (init)); | |
755 | TREE_READONLY (rtcfg) = 1; | |
756 | TREE_STATIC (rtcfg) = 1; | |
757 | TREE_ADDRESSABLE (rtcfg) = 1; | |
758 | TREE_USED (rtcfg) = 1; | |
759 | DECL_ARTIFICIAL (rtcfg) = 1; | |
760 | DECL_IGNORED_P (rtcfg) = 1; | |
761 | DECL_INITIAL (rtcfg) = init; | |
762 | make_decl_rtl (rtcfg); | |
763 | varpool_node::finalize_decl (rtcfg); | |
764 | ||
765 | /* Add the checker call to ckseq. */ | |
766 | gcall *call_chk = gimple_build_call (builtin_decl_explicit | |
767 | (BUILT_IN___HARDCFR_CHECK), 3, | |
768 | build_int_cst (sizetype, | |
769 | num2idx (nblocks)), | |
770 | build1 (ADDR_EXPR, vword_ptr, | |
771 | visited), | |
772 | build1 (ADDR_EXPR, vword_ptr, | |
773 | rtcfg)); | |
774 | gimple_seq_add_stmt (&ckseq, call_chk); | |
775 | ||
776 | gimple *clobber = gimple_build_assign (visited, | |
777 | build_clobber | |
778 | (TREE_TYPE (visited))); | |
779 | gimple_seq_add_stmt (&ckseq, clobber); | |
780 | ||
781 | /* If we have multiple exit edges, insert (copies of) | |
782 | ckseq in all of them. */ | |
783 | for (int i = chk_edges.length (); i--; ) | |
784 | { | |
785 | gimple_seq seq = ckseq; | |
786 | /* Copy the sequence, unless we're dealing with the | |
787 | last edge (we're counting down to zero). */ | |
788 | if (i || count_chkcall) | |
789 | seq = gimple_seq_copy (seq); | |
790 | ||
791 | edge e = chk_edges[i]; | |
792 | ||
793 | if (dump_file) | |
794 | { | |
795 | if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)) | |
796 | fprintf (dump_file, | |
797 | "Inserting out-of-line check in" | |
798 | " block %i's edge to exit.\n", | |
799 | e->src->index); | |
800 | else | |
801 | fprintf (dump_file, | |
802 | "Inserting out-of-line check in" | |
803 | " block %i's edge to postcheck block %i.\n", | |
804 | e->src->index, e->dest->index); | |
805 | } | |
806 | ||
807 | insert_exit_check_on_edge (seq, e); | |
808 | ||
809 | gcc_checking_assert (!bitmap_bit_p (chkcall_blocks, e->src->index)); | |
810 | } | |
811 | ||
812 | sbitmap_iterator it; | |
813 | unsigned i; | |
814 | EXECUTE_IF_SET_IN_BITMAP (chkcall_blocks, 0, i, it) | |
815 | { | |
816 | basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i); | |
817 | ||
818 | gimple_seq seq = ckseq; | |
819 | gcc_checking_assert (count_chkcall > 0); | |
820 | if (--count_chkcall) | |
821 | seq = gimple_seq_copy (seq); | |
822 | ||
823 | if (dump_file) | |
824 | fprintf (dump_file, | |
825 | "Inserting out-of-line check before stmt in block %i.\n", | |
826 | bb->index); | |
827 | ||
828 | insert_exit_check_in_block (seq, bb); | |
829 | } | |
830 | ||
831 | gcc_checking_assert (count_chkcall == 0); | |
832 | } | |
833 | else | |
834 | { | |
835 | /* Inline checking requires a single exit edge. */ | |
836 | gimple *last = gimple_build_assign (visited, | |
837 | build_clobber | |
838 | (TREE_TYPE (visited))); | |
839 | gimple_seq_add_stmt (&ckseq, last); | |
840 | ||
841 | if (!count_chkcall) | |
842 | { | |
843 | edge e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)); | |
844 | ||
845 | if (dump_file) | |
846 | { | |
847 | if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)) | |
848 | fprintf (dump_file, | |
849 | "Inserting out-of-line check in" | |
850 | " block %i's edge to postcheck block %i.\n", | |
851 | e->src->index, e->dest->index); | |
852 | else | |
853 | fprintf (dump_file, | |
854 | "Inserting inline check in" | |
855 | " block %i's edge to exit.\n", | |
856 | e->src->index); | |
857 | } | |
858 | ||
859 | insert_exit_check_on_edge (ckseq, e); | |
860 | } | |
861 | else | |
862 | { | |
863 | gcc_checking_assert (count_chkcall == 1); | |
864 | ||
865 | sbitmap_iterator it; | |
866 | unsigned i; | |
867 | EXECUTE_IF_SET_IN_BITMAP (chkcall_blocks, 0, i, it) | |
868 | { | |
869 | basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i); | |
870 | ||
871 | gimple_seq seq = ckseq; | |
872 | gcc_checking_assert (count_chkcall > 0); | |
873 | if (--count_chkcall) | |
874 | seq = gimple_seq_copy (seq); | |
875 | ||
876 | if (dump_file) | |
877 | fprintf (dump_file, | |
878 | "Inserting inline check before stmt in block %i.\n", | |
879 | bb->index); | |
880 | ||
881 | insert_exit_check_in_block (seq, bb); | |
882 | } | |
883 | ||
884 | gcc_checking_assert (count_chkcall == 0); | |
885 | } | |
886 | ||
887 | /* The inserted ckseq computes CKFAIL at LAST. Now we have to | |
888 | conditionally trap on it. */ | |
889 | basic_block insbb = gimple_bb (last); | |
890 | ||
891 | /* Create a block with the unconditional trap. */ | |
892 | basic_block trp = create_empty_bb (insbb); | |
893 | gimple_stmt_iterator gsit = gsi_after_labels (trp); | |
894 | ||
895 | gcall *trap = gimple_build_call (builtin_decl_explicit | |
896 | (BUILT_IN_TRAP), 0); | |
897 | gsi_insert_before (&gsit, trap, GSI_SAME_STMT); | |
898 | ||
899 | if (BB_PARTITION (insbb)) | |
900 | BB_SET_PARTITION (trp, BB_COLD_PARTITION); | |
901 | ||
902 | if (current_loops) | |
903 | add_bb_to_loop (trp, current_loops->tree_root); | |
904 | ||
905 | /* Insert a conditional branch to the trap block. If the | |
906 | conditional wouldn't be the last stmt, split the block. */ | |
907 | gimple_stmt_iterator gsi = gsi_for_stmt (last); | |
908 | if (!gsi_one_before_end_p (gsi)) | |
909 | split_block (gsi_bb (gsi), gsi_stmt (gsi)); | |
910 | ||
911 | gcond *cond = gimple_build_cond (NE_EXPR, ckfail, | |
912 | fold_convert (TREE_TYPE (ckfail), | |
913 | boolean_false_node), | |
914 | NULL, NULL); | |
915 | gsi_insert_after (&gsi, cond, GSI_SAME_STMT); | |
916 | ||
917 | /* Adjust the edges. */ | |
918 | single_succ_edge (gsi_bb (gsi))->flags &= ~EDGE_FALLTHRU; | |
919 | single_succ_edge (gsi_bb (gsi))->flags |= EDGE_FALSE_VALUE; | |
920 | single_succ_edge (gsi_bb (gsi))->probability | |
921 | = profile_probability::always (); | |
922 | edge e = make_edge (gsi_bb (gsi), trp, EDGE_TRUE_VALUE); | |
923 | e->probability = profile_probability::never (); | |
924 | gcc_checking_assert (e->dest == trp); | |
925 | gcc_checking_assert (!e->dest->count.initialized_p ()); | |
926 | e->dest->count = e->count (); | |
927 | ||
928 | /* Set the trap's dominator after splitting. */ | |
929 | if (dom_info_available_p (CDI_DOMINATORS)) | |
930 | set_immediate_dominator (CDI_DOMINATORS, trp, gimple_bb (last)); | |
931 | } | |
932 | ||
933 | /* Insert initializers for visited at the entry. Do this after | |
934 | other insertions, to avoid messing with block numbers. */ | |
935 | gimple_seq iseq = NULL; | |
936 | ||
937 | gcall *vinit = gimple_build_call (builtin_decl_explicit | |
938 | (BUILT_IN_MEMSET), 3, | |
939 | build1 (ADDR_EXPR, | |
940 | build_pointer_type | |
941 | (TREE_TYPE (visited)), | |
942 | visited), | |
943 | integer_zero_node, | |
944 | TYPE_SIZE_UNIT (TREE_TYPE (visited))); | |
945 | gimple_seq_add_stmt (&iseq, vinit); | |
946 | ||
947 | gsi_insert_seq_on_edge_immediate (single_succ_edge | |
948 | (ENTRY_BLOCK_PTR_FOR_FN (cfun)), | |
949 | iseq); | |
950 | } | |
951 | ||
952 | /* Push onto RTCFG a (mask, index) pair to test for IBB when BB is | |
953 | visited. XSELF is to be the ENTRY or EXIT block (depending on | |
954 | whether we're looking at preds or succs), to be remapped to BB | |
955 | because we can't represent them, and there's no point in testing | |
956 | them anyway. Return true if no further blocks need to be visited | |
957 | in the list, because we've already encountered a | |
958 | self-reference. */ | |
959 | bool | |
960 | push_rtcfg_pair (basic_block ibb, basic_block bb, | |
961 | basic_block xself) | |
962 | { | |
963 | /* We don't have a bit to test for the entry and exit | |
964 | blocks, but it is always visited, so we test for the | |
965 | block itself, which gets us the right result and | |
966 | enables the self-test optimization below. */ | |
967 | if (ibb == xself) | |
968 | ibb = bb; | |
969 | ||
970 | tree mask, idx = vwordidx (ibb, &mask); | |
971 | /* Combine masks with the same idx, but not if we're going | |
972 | to optimize for self-test. */ | |
973 | if (ibb != bb && TREE_PURPOSE (rtcfg) | |
974 | && tree_int_cst_equal (idx, TREE_PURPOSE (rtcfg))) | |
975 | TREE_VALUE (rtcfg) = int_const_binop (BIT_IOR_EXPR, mask, | |
976 | TREE_VALUE (rtcfg)); | |
977 | else | |
978 | rtcfg = tree_cons (idx, mask, rtcfg); | |
979 | ||
980 | /* For self-tests (i.e., tests that the block itself was | |
981 | also visited), testing anything else is pointless, | |
982 | because it's a tautology, so just drop other edges. */ | |
983 | if (ibb == bb) | |
984 | { | |
985 | while (TREE_PURPOSE (TREE_CHAIN (rtcfg))) | |
986 | TREE_CHAIN (rtcfg) = TREE_CHAIN (TREE_CHAIN (rtcfg)); | |
987 | return true; | |
988 | } | |
989 | ||
990 | return false; | |
991 | } | |
992 | ||
993 | /* Add to CKSEQ stmts to clear CKPART if OBB is visited. */ | |
994 | void | |
995 | build_block_check (basic_block obb) | |
996 | { | |
997 | tree vobb = fold_convert (TREE_TYPE (ckblk), | |
998 | vindex (obb, &ckseq)); | |
999 | gassign *blkrunp = gimple_build_assign (ckblk, vobb); | |
1000 | gimple_seq_add_stmt (&ckseq, blkrunp); | |
1001 | ||
1002 | gassign *blknotrunp = gimple_build_assign (ckinv, | |
1003 | EQ_EXPR, | |
1004 | ckblk, | |
1005 | fold_convert | |
1006 | (TREE_TYPE (ckblk), | |
1007 | boolean_false_node)); | |
1008 | gimple_seq_add_stmt (&ckseq, blknotrunp); | |
1009 | ||
1010 | gassign *andblk = gimple_build_assign (ckpart, | |
1011 | BIT_AND_EXPR, | |
1012 | ckpart, ckinv); | |
1013 | gimple_seq_add_stmt (&ckseq, andblk); | |
1014 | } | |
1015 | ||
1016 | /* Add to BB code to set its bit in VISITED, and add to RTCFG or | |
1017 | CKSEQ the data or code needed to check BB's predecessors and | |
1018 | successors. If CHECKPOINT, assume the block is a checkpoint, | |
1019 | whether or not it has an edge to EXIT. If POSTCHECK, assume the | |
1020 | block post-dominates checkpoints and therefore no bitmap setting | |
1021 | or checks are to be performed in or for it. Do NOT change the | |
1022 | CFG. */ | |
1023 | void visit (basic_block bb, bool checkpoint, bool postcheck) | |
1024 | { | |
1025 | /* Set the bit in VISITED when entering the block. */ | |
1026 | gimple_stmt_iterator gsi = gsi_after_labels (bb); | |
1027 | if (!postcheck) | |
1028 | gsi_insert_seq_before (&gsi, vset (bb), GSI_SAME_STMT); | |
1029 | ||
1030 | if (rtcfg) | |
1031 | { | |
1032 | if (!postcheck) | |
1033 | { | |
1034 | /* Build a list of (index, mask) terminated by (NULL, 0). | |
1035 | Consolidate masks with the same index when they're | |
1036 | adjacent. First, predecessors. Count backwards, because | |
1037 | we're going to reverse the list. The order shouldn't | |
1038 | matter, but let's not make it surprising. */ | |
1039 | for (int i = EDGE_COUNT (bb->preds); i--; ) | |
1040 | if (push_rtcfg_pair (EDGE_PRED (bb, i)->src, bb, | |
1041 | ENTRY_BLOCK_PTR_FOR_FN (cfun))) | |
1042 | break; | |
1043 | } | |
1044 | rtcfg = tree_cons (NULL_TREE, build_int_cst (vword_type, 0), rtcfg); | |
1045 | ||
1046 | if (!postcheck) | |
1047 | { | |
1048 | /* Then, successors. */ | |
1049 | if (!checkpoint | |
1050 | || !push_rtcfg_pair (EXIT_BLOCK_PTR_FOR_FN (cfun), | |
1051 | bb, EXIT_BLOCK_PTR_FOR_FN (cfun))) | |
1052 | for (int i = EDGE_COUNT (bb->succs); i--; ) | |
1053 | if (push_rtcfg_pair (EDGE_SUCC (bb, i)->dest, bb, | |
1054 | EXIT_BLOCK_PTR_FOR_FN (cfun))) | |
1055 | break; | |
1056 | } | |
1057 | rtcfg = tree_cons (NULL_TREE, build_int_cst (vword_type, 0), rtcfg); | |
1058 | } | |
1059 | else if (!postcheck) | |
1060 | { | |
1061 | /* Schedule test to fail if the block was reached but somehow none | |
1062 | of its predecessors were. */ | |
1063 | tree bit = fold_convert (TREE_TYPE (ckpart), vindex (bb, &ckseq)); | |
1064 | gassign *blkrunp = gimple_build_assign (ckpart, bit); | |
1065 | gimple_seq_add_stmt (&ckseq, blkrunp); | |
1066 | ||
1067 | for (int i = 0, e = EDGE_COUNT (bb->preds); i < e; i++) | |
1068 | build_block_check (EDGE_PRED (bb, i)->src); | |
1069 | gimple *orfailp = gimple_build_assign (ckfail, BIT_IOR_EXPR, | |
1070 | ckfail, ckpart); | |
1071 | gimple_seq_add_stmt (&ckseq, orfailp); | |
1072 | ||
1073 | /* Likewise for successors. */ | |
1074 | gassign *blkruns = gimple_build_assign (ckpart, unshare_expr (bit)); | |
1075 | gimple_seq_add_stmt (&ckseq, blkruns); | |
1076 | ||
1077 | if (checkpoint) | |
1078 | build_block_check (EXIT_BLOCK_PTR_FOR_FN (cfun)); | |
1079 | for (int i = 0, e = EDGE_COUNT (bb->succs); i < e; i++) | |
1080 | build_block_check (EDGE_SUCC (bb, i)->dest); | |
1081 | ||
1082 | gimple *orfails = gimple_build_assign (ckfail, BIT_IOR_EXPR, | |
1083 | ckfail, ckpart); | |
1084 | gimple_seq_add_stmt (&ckseq, orfails); | |
1085 | } | |
1086 | } | |
1087 | }; | |
1088 | ||
1089 | /* Avoid checking before noreturn calls that are known (expected, | |
1090 | really) to finish by throwing an exception, rather than by ending | |
1091 | the program or looping forever. Such functions have to be | |
1092 | annotated, with an attribute (expected_throw) or flag (ECF_XTHROW), | |
1093 | so that exception-raising functions, such as C++'s __cxa_throw, | |
1094 | __cxa_rethrow, and Ada's gnat_rcheck_*, gnat_reraise*, | |
1095 | ada.exception.raise_exception*, and the language-independent | |
1096 | unwinders could be detected here and handled differently from other | |
1097 | noreturn functions. */ | |
1098 | static bool | |
1099 | always_throwing_noreturn_call_p (gimple *stmt) | |
1100 | { | |
1101 | if (!is_a <gcall *> (stmt)) | |
1102 | return is_a <gresx *> (stmt); | |
1103 | ||
1104 | gcall *call = as_a <gcall *> (stmt); | |
1105 | return (gimple_call_noreturn_p (call) | |
1106 | && gimple_call_expected_throw_p (call)); | |
1107 | } | |
1108 | ||
1109 | /* Control flow redundancy hardening: record the execution path, and | |
1110 | verify at exit that an expect path was taken. */ | |
1111 | ||
1112 | unsigned int | |
1113 | pass_harden_control_flow_redundancy::execute (function *fun) | |
1114 | { | |
1115 | bool const check_at_escaping_exceptions | |
1116 | = (flag_exceptions | |
1117 | && flag_harden_control_flow_redundancy_check_exceptions); | |
1118 | bool const check_before_noreturn_calls | |
1119 | = flag_harden_control_flow_redundancy_check_noreturn > HCFRNR_NEVER; | |
1120 | bool const check_before_nothrow_noreturn_calls | |
1121 | = (check_before_noreturn_calls | |
1122 | && flag_harden_control_flow_redundancy_check_noreturn >= HCFRNR_NOTHROW); | |
1123 | bool const check_before_throwing_noreturn_calls | |
1124 | = (flag_exceptions | |
1125 | && check_before_noreturn_calls | |
1126 | && flag_harden_control_flow_redundancy_check_noreturn > HCFRNR_NOTHROW); | |
1127 | bool const check_before_always_throwing_noreturn_calls | |
1128 | = (flag_exceptions | |
1129 | && check_before_noreturn_calls | |
1130 | && flag_harden_control_flow_redundancy_check_noreturn >= HCFRNR_ALWAYS); | |
1131 | basic_block bb; | |
1132 | basic_block bb_eh_cleanup = NULL; | |
1133 | ||
1134 | if (flag_harden_control_flow_redundancy_skip_leaf) | |
1135 | { | |
1136 | bool found_calls_p = false; | |
1137 | ||
1138 | FOR_EACH_BB_FN (bb, fun) | |
1139 | { | |
1140 | for (gimple_stmt_iterator gsi = gsi_last_bb (bb); | |
1141 | !gsi_end_p (gsi); gsi_prev (&gsi)) | |
1142 | if (is_a <gcall *> (gsi_stmt (gsi))) | |
1143 | { | |
1144 | found_calls_p = true; | |
1145 | break; | |
1146 | } | |
1147 | if (found_calls_p) | |
1148 | break; | |
1149 | } | |
1150 | ||
1151 | if (!found_calls_p) | |
1152 | { | |
1153 | if (dump_file) | |
1154 | fprintf (dump_file, | |
1155 | "Disabling CFR for leaf function, as requested\n"); | |
1156 | ||
1157 | return 0; | |
1158 | } | |
1159 | } | |
1160 | ||
1161 | if (check_at_escaping_exceptions) | |
1162 | { | |
1163 | int lp_eh_cleanup = -1; | |
1164 | ||
1165 | /* Record the preexisting blocks, to avoid visiting newly-created | |
1166 | blocks. */ | |
1167 | auto_sbitmap to_visit (last_basic_block_for_fn (fun)); | |
1168 | bitmap_clear (to_visit); | |
1169 | ||
1170 | FOR_EACH_BB_FN (bb, fun) | |
1171 | bitmap_set_bit (to_visit, bb->index); | |
1172 | ||
1173 | /* Scan the blocks for stmts with escaping exceptions, that | |
1174 | wouldn't be denoted in the CFG, and associate them with an | |
1175 | empty cleanup handler around the whole function. Walk | |
1176 | backwards, so that even when we split the block, */ | |
1177 | sbitmap_iterator it; | |
1178 | unsigned i; | |
1179 | EXECUTE_IF_SET_IN_BITMAP (to_visit, 0, i, it) | |
1180 | { | |
1181 | bb = BASIC_BLOCK_FOR_FN (fun, i); | |
1182 | ||
1183 | for (gimple_stmt_iterator gsi = gsi_last_bb (bb); | |
1184 | !gsi_end_p (gsi); gsi_prev (&gsi)) | |
1185 | { | |
1186 | gimple *stmt = gsi_stmt (gsi); | |
1187 | if (!stmt_could_throw_p (fun, stmt)) | |
1188 | continue; | |
1189 | ||
1190 | /* If it must not throw, or if it already has a handler, | |
1191 | we need not worry about it. */ | |
1192 | if (lookup_stmt_eh_lp (stmt) != 0) | |
1193 | continue; | |
1194 | ||
1195 | /* Don't split blocks at, nor add EH edges to, tail | |
1196 | calls, we will add verification before the call | |
1197 | anyway. */ | |
1198 | if (is_a <gcall *> (stmt) | |
1199 | && (gimple_call_must_tail_p (as_a <gcall *> (stmt)) | |
1200 | || gimple_call_tail_p (as_a <gcall *> (stmt)) | |
1201 | || returning_call_p (as_a <gcall *> (stmt)))) | |
1202 | continue; | |
1203 | ||
1204 | if (!gsi_one_before_end_p (gsi)) | |
1205 | split_block (bb, stmt); | |
1206 | /* A resx or noreturn call needs not be associated with | |
1207 | the cleanup handler if we're going to add checking | |
1208 | before it. We only test cases that didn't require | |
1209 | block splitting because noreturn calls would always | |
1210 | be at the end of blocks, and we test for zero | |
1211 | successors because if there is an edge, it's not | |
1212 | noreturn, as any EH edges would have already been | |
1213 | caught by the lookup_stmt_eh_lp test above. */ | |
1214 | else if (check_before_noreturn_calls | |
1215 | && EDGE_COUNT (bb->succs) == 0 | |
1216 | && (is_a <gresx *> (stmt) | |
1217 | ? check_before_always_throwing_noreturn_calls | |
1218 | : (!is_a <gcall *> (stmt) | |
1219 | || !gimple_call_noreturn_p (stmt)) | |
1220 | ? (gcc_unreachable (), false) | |
1221 | : (!flag_exceptions | |
1222 | || gimple_call_nothrow_p (as_a <gcall *> (stmt))) | |
1223 | ? check_before_nothrow_noreturn_calls | |
1224 | : always_throwing_noreturn_call_p (stmt) | |
1225 | ? check_before_always_throwing_noreturn_calls | |
1226 | : check_before_throwing_noreturn_calls)) | |
1227 | { | |
1228 | if (dump_file) | |
1229 | { | |
1230 | fprintf (dump_file, | |
1231 | "Bypassing cleanup for noreturn stmt" | |
1232 | " in block %i:\n", | |
1233 | bb->index); | |
1234 | print_gimple_stmt (dump_file, stmt, 0); | |
1235 | } | |
1236 | continue; | |
1237 | } | |
1238 | ||
1239 | if (!bb_eh_cleanup) | |
1240 | { | |
1241 | bb_eh_cleanup = create_empty_bb (bb); | |
1242 | if (dom_info_available_p (CDI_DOMINATORS)) | |
1243 | set_immediate_dominator (CDI_DOMINATORS, bb_eh_cleanup, bb); | |
1244 | if (current_loops) | |
1245 | add_bb_to_loop (bb_eh_cleanup, current_loops->tree_root); | |
1246 | ||
1247 | /* Make the new block an EH cleanup for the call. */ | |
1248 | eh_region new_r = gen_eh_region_cleanup (NULL); | |
1249 | eh_landing_pad lp = gen_eh_landing_pad (new_r); | |
1250 | tree label = gimple_block_label (bb_eh_cleanup); | |
1251 | lp->post_landing_pad = label; | |
1252 | EH_LANDING_PAD_NR (label) = lp_eh_cleanup = lp->index; | |
1253 | ||
1254 | /* Just propagate the exception. | |
1255 | We will later insert the verifier call. */ | |
1256 | gimple_stmt_iterator ehgsi; | |
1257 | ehgsi = gsi_after_labels (bb_eh_cleanup); | |
1258 | gresx *resx = gimple_build_resx (new_r->index); | |
1259 | gsi_insert_before (&ehgsi, resx, GSI_SAME_STMT); | |
1260 | ||
1261 | if (dump_file) | |
1262 | fprintf (dump_file, | |
1263 | "Created cleanup block %i:\n", | |
1264 | bb_eh_cleanup->index); | |
1265 | } | |
1266 | else if (dom_info_available_p (CDI_DOMINATORS)) | |
1267 | { | |
1268 | basic_block immdom; | |
1269 | immdom = get_immediate_dominator (CDI_DOMINATORS, | |
1270 | bb_eh_cleanup); | |
1271 | if (!dominated_by_p (CDI_DOMINATORS, bb, immdom)) | |
1272 | { | |
1273 | immdom = nearest_common_dominator (CDI_DOMINATORS, | |
1274 | immdom, bb); | |
1275 | set_immediate_dominator (CDI_DOMINATORS, | |
1276 | bb_eh_cleanup, immdom); | |
1277 | } | |
1278 | } | |
1279 | ||
1280 | if (dump_file) | |
1281 | { | |
1282 | fprintf (dump_file, | |
1283 | "Associated cleanup block with stmt in block %i:\n", | |
1284 | bb->index); | |
1285 | print_gimple_stmt (dump_file, stmt, 0); | |
1286 | } | |
1287 | ||
1288 | add_stmt_to_eh_lp (stmt, lp_eh_cleanup); | |
1289 | /* Finally, wire the EH cleanup block into the CFG. */ | |
2f398d14 | 1290 | edge neeh = make_eh_edge (stmt); |
551935d1 AO |
1291 | neeh->probability = profile_probability::never (); |
1292 | gcc_checking_assert (neeh->dest == bb_eh_cleanup); | |
1293 | if (neeh->dest->count.initialized_p ()) | |
1294 | neeh->dest->count += neeh->count (); | |
1295 | else | |
1296 | neeh->dest->count = neeh->count (); | |
1297 | } | |
1298 | } | |
1299 | ||
1300 | if (bb_eh_cleanup) | |
1301 | { | |
1302 | /* A cfg_cleanup after bb_eh_cleanup makes for a more compact | |
1303 | rtcfg, and it avoids bb numbering differences when we split | |
1304 | blocks because of trailing debug insns only. */ | |
1305 | cleanup_tree_cfg (); | |
1306 | gcc_checking_assert (EDGE_COUNT (bb_eh_cleanup->succs) == 0); | |
1307 | } | |
1308 | } | |
1309 | ||
1310 | /* These record blocks with calls that are to be preceded by | |
1311 | checkpoints, such as noreturn calls (if so chosen), must-tail | |
1312 | calls, potential early-marked tail calls, and returning calls (if | |
1313 | so chosen). */ | |
1314 | int count_chkcall = 0; | |
1315 | auto_sbitmap chkcall_blocks (last_basic_block_for_fn (fun)); | |
1316 | bitmap_clear (chkcall_blocks); | |
1317 | ||
1318 | /* We wish to add verification at blocks without successors, such as | |
1319 | noreturn calls (raising or not) and the reraise at the cleanup | |
1320 | block, but not other reraises: they will go through the cleanup | |
1321 | block. */ | |
1322 | if (check_before_noreturn_calls) | |
1323 | FOR_EACH_BB_FN (bb, fun) | |
1324 | { | |
1325 | gimple_stmt_iterator gsi = gsi_last_bb (bb); | |
1326 | if (gsi_end_p (gsi)) | |
1327 | continue; | |
1328 | gimple *stmt = gsi_stmt (gsi); | |
1329 | ||
1330 | if (EDGE_COUNT (bb->succs) == 0) | |
1331 | { | |
1332 | /* A stmt at the end of a block without any successors is | |
1333 | either a resx or a noreturn call without a local | |
1334 | handler. Check that it's one of the desired | |
1335 | checkpoints. */ | |
1336 | if (flag_exceptions && is_a <gresx *> (stmt) | |
1337 | ? (check_before_always_throwing_noreturn_calls | |
1338 | || bb == bb_eh_cleanup) | |
1339 | : (!is_a <gcall *> (stmt) | |
1340 | || !gimple_call_noreturn_p (stmt)) | |
1341 | ? (stmt_can_make_abnormal_goto (stmt) | |
1342 | /* ??? Check before indirect nonlocal goto, or | |
1343 | calls thereof? */ | |
1344 | ? false | |
1345 | /* Catch cases in which successors would be | |
1346 | expected. */ | |
1347 | : (gcc_unreachable (), false)) | |
1348 | : (!flag_exceptions | |
1349 | || gimple_call_nothrow_p (as_a <gcall *> (stmt))) | |
1350 | ? check_before_nothrow_noreturn_calls | |
1351 | : always_throwing_noreturn_call_p (stmt) | |
1352 | ? check_before_always_throwing_noreturn_calls | |
1353 | : check_before_throwing_noreturn_calls) | |
1354 | { | |
1355 | if (dump_file) | |
1356 | { | |
1357 | fprintf (dump_file, | |
1358 | "Scheduling check before stmt" | |
1359 | " in succ-less block %i:\n", | |
1360 | bb->index); | |
1361 | print_gimple_stmt (dump_file, stmt, 0); | |
1362 | } | |
1363 | ||
1364 | if (bitmap_set_bit (chkcall_blocks, bb->index)) | |
1365 | count_chkcall++; | |
1366 | else | |
1367 | gcc_unreachable (); | |
1368 | } | |
1369 | continue; | |
1370 | } | |
1371 | ||
1372 | /* If there are no exceptions, it would seem like any noreturn | |
1373 | call must have zero successor edges, but __builtin_return | |
1374 | gets successor edges. We don't want to handle it here, it | |
1375 | will be dealt with in sibcall_search_preds. Otherwise, | |
1376 | check for blocks without non-EH successors, but skip those | |
1377 | with resx stmts and edges (i.e., those other than that in | |
1378 | bb_eh_cleanup), since those will go through bb_eh_cleanup, | |
1379 | that will have been counted as noreturn above because it | |
1380 | has no successors. */ | |
1381 | gcc_checking_assert (bb != bb_eh_cleanup | |
1382 | || !check_at_escaping_exceptions); | |
1383 | if (flag_exceptions && is_a <gresx *> (stmt) | |
1384 | ? check_before_always_throwing_noreturn_calls | |
1385 | : (!is_a <gcall *> (stmt) | |
1386 | || !gimple_call_noreturn_p (stmt)) | |
1387 | ? false | |
1388 | : (!flag_exceptions | |
1389 | || gimple_call_nothrow_p (as_a <gcall *> (stmt))) | |
1390 | ? false /* rather than check_before_nothrow_noreturn_calls */ | |
1391 | : always_throwing_noreturn_call_p (stmt) | |
1392 | ? check_before_always_throwing_noreturn_calls | |
1393 | : check_before_throwing_noreturn_calls) | |
1394 | { | |
1395 | gcc_checking_assert (single_succ_p (bb) | |
1396 | && (single_succ_edge (bb)->flags & EDGE_EH)); | |
1397 | ||
1398 | if (dump_file) | |
1399 | { | |
1400 | fprintf (dump_file, | |
1401 | "Scheduling check before stmt" | |
1402 | " in EH-succ block %i:\n", | |
1403 | bb->index); | |
1404 | print_gimple_stmt (dump_file, stmt, 0); | |
1405 | } | |
1406 | ||
1407 | if (bitmap_set_bit (chkcall_blocks, bb->index)) | |
1408 | count_chkcall++; | |
1409 | else | |
1410 | gcc_unreachable (); | |
1411 | } | |
1412 | } | |
1413 | else if (bb_eh_cleanup) | |
1414 | { | |
1415 | if (bitmap_set_bit (chkcall_blocks, bb_eh_cleanup->index)) | |
1416 | count_chkcall++; | |
1417 | else | |
1418 | gcc_unreachable (); | |
1419 | } | |
1420 | ||
1421 | gcc_checking_assert (!bb_eh_cleanup | |
1422 | || bitmap_bit_p (chkcall_blocks, bb_eh_cleanup->index)); | |
1423 | ||
1424 | /* If we don't have edges to exit nor noreturn calls (including the | |
1425 | cleanup reraise), then we may skip instrumentation: that would | |
1426 | amount to a function that ends with an infinite loop. */ | |
1427 | if (!count_chkcall | |
1428 | && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0) | |
1429 | { | |
1430 | if (dump_file) | |
1431 | fprintf (dump_file, | |
1432 | "Disabling CFR, no exit paths to check\n"); | |
1433 | ||
1434 | return 0; | |
1435 | } | |
1436 | ||
1437 | /* Search for must-tail calls, early-marked potential tail calls, | |
1438 | and, if requested, returning calls. As we introduce early | |
1439 | checks, */ | |
1440 | int count_postchk = 0; | |
1441 | auto_sbitmap postchk_blocks (last_basic_block_for_fn (fun)); | |
1442 | bitmap_clear (postchk_blocks); | |
1443 | chk_edges_t chk_edges; | |
1444 | hardcfr_sibcall_search_preds (EXIT_BLOCK_PTR_FOR_FN (fun), chk_edges, | |
1445 | count_chkcall, chkcall_blocks, | |
1446 | count_postchk, postchk_blocks, | |
1447 | NULL); | |
1448 | ||
1449 | rt_bb_visited vstd (chk_edges.length () + count_chkcall); | |
1450 | ||
1451 | auto_sbitmap combined_blocks (last_basic_block_for_fn (fun)); | |
1452 | bitmap_copy (combined_blocks, chkcall_blocks); | |
1453 | int i; | |
1454 | edge *e; | |
1455 | FOR_EACH_VEC_ELT (chk_edges, i, e) | |
1456 | if (!bitmap_set_bit (combined_blocks, (*e)->src->index)) | |
1457 | /* There may be multiple chk_edges with the same src block; | |
1458 | guard againt overlaps with chkcall_blocks only. */ | |
1459 | gcc_assert (!bitmap_bit_p (chkcall_blocks, (*e)->src->index)); | |
1460 | ||
1461 | /* Visit blocks in index order, because building rtcfg depends on | |
1462 | that. Blocks must be compact, which the cleanup_cfg requirement | |
1463 | ensures. This would also enable FOR_EACH_BB_FN to be used to | |
1464 | iterate in index order, but bb_eh_cleanup block splits and | |
1465 | insertions changes that. */ | |
1466 | gcc_checking_assert (n_basic_blocks_for_fn (fun) | |
1467 | == last_basic_block_for_fn (fun)); | |
1468 | for (int i = NUM_FIXED_BLOCKS; i < n_basic_blocks_for_fn (fun); i++) | |
1469 | { | |
1470 | bb = BASIC_BLOCK_FOR_FN (fun, i); | |
1471 | gcc_checking_assert (bb->index == i); | |
1472 | vstd.visit (bb, bitmap_bit_p (combined_blocks, i), | |
1473 | bitmap_bit_p (postchk_blocks, i)); | |
1474 | } | |
1475 | ||
1476 | vstd.check (chk_edges, count_chkcall, chkcall_blocks); | |
1477 | ||
1478 | return | |
1479 | TODO_update_ssa | |
1480 | | TODO_cleanup_cfg | |
1481 | | TODO_verify_il; | |
1482 | } | |
1483 | ||
1484 | /* Instantiate a hardcfr pass. */ | |
1485 | ||
1486 | gimple_opt_pass * | |
1487 | make_pass_harden_control_flow_redundancy (gcc::context *ctxt) | |
1488 | { | |
1489 | return new pass_harden_control_flow_redundancy (ctxt); | |
1490 | } |