]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-harden-control-flow.cc
bootstrap: Include tm_p.h
[thirdparty/gcc.git] / gcc / gimple-harden-control-flow.cc
1 /* Control flow redundancy hardening.
2 Copyright (C) 2022 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <oliva@adacore.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #define INCLUDE_ALGORITHM /* find */
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tm_p.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "gimple.h"
30 #include "gimplify.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-iterator.h"
34 #include "gimple-pretty-print.h"
35 #include "tree-cfg.h"
36 #include "tree-cfgcleanup.h"
37 #include "tree-eh.h"
38 #include "except.h"
39 #include "sbitmap.h"
40 #include "basic-block.h"
41 #include "cfghooks.h"
42 #include "cfgloop.h"
43 #include "cgraph.h"
44 #include "alias.h"
45 #include "varasm.h"
46 #include "output.h"
47 #include "langhooks.h"
48 #include "diagnostic.h"
49 #include "intl.h"
50
51 namespace {
52
53 /* This pass introduces verification, at function exits, that booleans
54 set in each basic block during function execution reflect the
55 control flow graph: for each visited block, check that at least one
56 predecessor and at least one successor were also visited. This
57 sort of hardening may detect various kinds of attacks. */
58
59 /* Define a pass to harden code through control flow redundancy. */
60
61 const pass_data pass_data_harden_control_flow_redundancy = {
62 GIMPLE_PASS,
63 "hardcfr",
64 OPTGROUP_NONE,
65 TV_NONE,
66 PROP_cfg | PROP_ssa, // properties_required
67 0, // properties_provided
68 0, // properties_destroyed
69 TODO_cleanup_cfg, // properties_start
70 0, // properties_finish
71 };
72
73 class pass_harden_control_flow_redundancy : public gimple_opt_pass
74 {
75 public:
76 pass_harden_control_flow_redundancy (gcc::context *ctxt)
77 : gimple_opt_pass (pass_data_harden_control_flow_redundancy, ctxt)
78 {}
79 opt_pass *clone () { return new pass_harden_control_flow_redundancy (m_ctxt); }
80 virtual bool gate (function *fun) {
81 /* Return quickly if the pass is disabled, without checking any of
82 the conditions that might give rise to warnings that would only
83 be appropriate if hardening was requested. */
84 if (!flag_harden_control_flow_redundancy)
85 return false;
86
87 /* Functions that return more than once, like setjmp and vfork
88 (that also gets this flag set), will start recording a path
89 after the first return, and then may take another path when
90 they return again. The unterminated path may then be flagged
91 as an error. ??? We could save the visited array before the
92 call and restore it if it returns again. */
93 if (fun->calls_setjmp)
94 {
95 warning_at (DECL_SOURCE_LOCATION (fun->decl), 0,
96 "%qD calls %<setjmp%> or similar,"
97 " %<-fharden-control-flow-redundancy%> is not supported",
98 fun->decl);
99 return false;
100 }
101
102 /* Some targets bypass the abnormal dispatcher block in nonlocal
103 gotos, and then we'd miss its visited bit. It might be doable
104 to make it work uniformly, but this feature is not used often
105 enough to make it worthwhile. */
106 if (fun->has_nonlocal_label)
107 {
108 warning_at (DECL_SOURCE_LOCATION (fun->decl), 0,
109 "%qD receives nonlocal gotos,"
110 " %<-fharden-control-flow-redundancy%> is not supported",
111 fun->decl);
112 return false;
113 }
114
115 if (fun->cfg && param_hardcfr_max_blocks > 0
116 && (n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS
117 > param_hardcfr_max_blocks))
118 {
119 warning_at (DECL_SOURCE_LOCATION (fun->decl), 0,
120 "%qD has more than %u blocks, the requested"
121 " maximum for %<-fharden-control-flow-redundancy%>",
122 fun->decl, param_hardcfr_max_blocks);
123 return false;
124 }
125
126 return true;
127 }
128 virtual unsigned int execute (function *);
129 };
130
131 }
132
133 /* Return TRUE iff CFR checks should be inserted before returning
134 calls. */
135
136 static bool
137 check_returning_calls_p ()
138 {
139 return
140 flag_harden_control_flow_redundancy_check_returning_calls > 0
141 || (flag_harden_control_flow_redundancy_check_returning_calls < 0
142 /* Gates pass_tail_calls. */
143 && flag_optimize_sibling_calls
144 /* Gates pass_all_optimizations. */
145 && optimize >= 1 && !optimize_debug);
146 }
147
148 /* Scan BB from the end, updating *RETPTR if given as return stmts and
149 copies are found. Return a call or a stmt that cannot appear after
150 a tail call, or NULL if the top of the block is reached without
151 finding any. */
152
153 static gimple *
154 hardcfr_scan_block (basic_block bb, tree **retptr)
155 {
156 gimple_stmt_iterator gsi;
157 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
158 {
159 gimple *stmt = gsi_stmt (gsi);
160
161 /* Ignore labels, returns, nops, clobbers and debug stmts. */
162 if (gimple_code (stmt) == GIMPLE_LABEL
163 || gimple_code (stmt) == GIMPLE_NOP
164 || gimple_code (stmt) == GIMPLE_PREDICT
165 || gimple_clobber_p (stmt)
166 || is_gimple_debug (stmt))
167 continue;
168
169 if (gimple_code (stmt) == GIMPLE_RETURN)
170 {
171 greturn *gret = as_a <greturn *> (stmt);
172 if (retptr)
173 {
174 gcc_checking_assert (!*retptr);
175 *retptr = gimple_return_retval_ptr (gret);
176 }
177 continue;
178 }
179
180 /* Check for a call. */
181 if (is_gimple_call (stmt))
182 return stmt;
183
184 /* Allow simple copies to the return value, updating the return
185 value to be found in earlier assignments. */
186 if (retptr && *retptr && gimple_assign_single_p (stmt)
187 && **retptr == gimple_assign_lhs (stmt))
188 {
189 *retptr = gimple_assign_rhs1_ptr (stmt);
190 continue;
191 }
192
193 return stmt;
194 }
195
196 /* Any other kind of stmt will prevent a tail call. */
197 return NULL;
198 }
199
200 /* Return TRUE iff CALL is to be preceded by a CFR checkpoint, i.e.,
201 if it's a returning call (one whose result is ultimately returned
202 without intervening non-copy statements) and we're checking
203 returning calls, a __builtin_return call (noreturn with a path to
204 the exit block), a must-tail call, or a tail call. */
205
206 static bool
207 returning_call_p (gcall *call)
208 {
209 if (!(gimple_call_noreturn_p (call)
210 || gimple_call_must_tail_p (call)
211 || gimple_call_tail_p (call)
212 || check_returning_calls_p ()))
213 return false;
214
215 /* Quickly check that there's a path to exit compatible with a
216 returning call. Detect infinite loops by limiting the path
217 length to the basic block count, and by looking for duplicate
218 blocks before allocating more memory for the path, for amortized
219 O(n). */
220 auto_vec<basic_block, 10> path;
221 for (basic_block bb = gimple_bb (call);
222 bb != EXIT_BLOCK_PTR_FOR_FN (cfun);
223 bb = single_succ (bb))
224 if (!single_succ_p (bb)
225 || (single_succ_edge (bb)->flags & EDGE_EH) != 0
226 || n_basic_blocks_for_fn (cfun) - path.length () <= NUM_FIXED_BLOCKS
227 || (path.length () == path.allocated ()
228 && std::find (path.begin (), path.end (), bb) != path.end ()))
229 return false;
230 else
231 path.safe_push (bb);
232
233 /* Check the stmts in the blocks and trace the return value. */
234 tree *retptr = NULL;
235 for (;;)
236 {
237 gcc_checking_assert (!path.is_empty ());
238 basic_block bb = path.pop ();
239 gimple *stop = hardcfr_scan_block (bb, &retptr);
240 if (stop)
241 {
242 if (stop != call)
243 return false;
244 gcc_checking_assert (path.is_empty ());
245 break;
246 }
247
248 gphi *retphi = NULL;
249 if (retptr && *retptr && TREE_CODE (*retptr) == SSA_NAME
250 && !SSA_NAME_IS_DEFAULT_DEF (*retptr)
251 && SSA_NAME_DEF_STMT (*retptr)
252 && is_a <gphi *> (SSA_NAME_DEF_STMT (*retptr))
253 && gimple_bb (SSA_NAME_DEF_STMT (*retptr)) == bb)
254 {
255 retphi = as_a <gphi *> (SSA_NAME_DEF_STMT (*retptr));
256 gcc_checking_assert (gimple_phi_result (retphi) == *retptr);
257 }
258 else
259 continue;
260
261 gcc_checking_assert (!path.is_empty ());
262 edge e = single_succ_edge (path.last ());
263 int i = EDGE_COUNT (bb->preds);
264 while (i--)
265 if (EDGE_PRED (bb, i) == e)
266 break;
267 gcc_checking_assert (i >= 0);
268 retptr = gimple_phi_arg_def_ptr (retphi, i);
269 }
270
271 return (gimple_call_noreturn_p (call)
272 || gimple_call_must_tail_p (call)
273 || gimple_call_tail_p (call)
274 || (gimple_call_lhs (call) == (retptr ? *retptr : NULL)
275 && check_returning_calls_p ()));
276 }
277
278 typedef auto_vec<edge, 10> chk_edges_t;
279
280 /* Declare for mutual recursion. */
281 static bool hardcfr_sibcall_search_preds (basic_block bb,
282 chk_edges_t &chk_edges,
283 int &count_chkcall,
284 auto_sbitmap &chkcall_blocks,
285 int &count_postchk,
286 auto_sbitmap &postchk_blocks,
287 tree *retptr);
288
289 /* Search backwards from the end of BB for a mandatory or potential
290 sibcall. Schedule the block to be handled sort-of like noreturn if
291 so. Recurse to preds, with updated RETPTR, if the block only
292 contains stmts that may follow such a call, scheduling checking at
293 edges and marking blocks as post-check as needed. Return true iff,
294 at the end of the block, a check will have already been
295 performed. */
296
297 static bool
298 hardcfr_sibcall_search_block (basic_block bb,
299 chk_edges_t &chk_edges,
300 int &count_chkcall,
301 auto_sbitmap &chkcall_blocks,
302 int &count_postchk,
303 auto_sbitmap &postchk_blocks,
304 tree *retptr)
305 {
306 /* Conditionals and internal exceptions rule out tail calls. */
307 if (!single_succ_p (bb)
308 || (single_succ_edge (bb)->flags & EDGE_EH) != 0)
309 return false;
310
311 gimple *stmt = hardcfr_scan_block (bb, &retptr);
312 if (!stmt)
313 return hardcfr_sibcall_search_preds (bb, chk_edges,
314 count_chkcall, chkcall_blocks,
315 count_postchk, postchk_blocks,
316 retptr);
317
318 if (!is_a <gcall *> (stmt))
319 return false;
320
321 /* Avoid disrupting mandatory or early-marked tail calls,
322 inserting the check before them. This works for
323 must-tail calls, but tail calling as an optimization is
324 detected too late for us.
325
326 Also check for noreturn calls here. Noreturn calls won't
327 normally have edges to exit, so they won't be found here,
328 but __builtin_return does, and we must check before
329 it, so handle it like a tail call. */
330 gcall *call = as_a <gcall *> (stmt);
331 if (!(gimple_call_noreturn_p (call)
332 || gimple_call_must_tail_p (call)
333 || gimple_call_tail_p (call)
334 || (gimple_call_lhs (call) == (retptr ? *retptr : NULL)
335 && check_returning_calls_p ())))
336 return false;
337
338 gcc_checking_assert (returning_call_p (call));
339
340 /* We found a call that is to be preceded by checking. */
341 if (bitmap_set_bit (chkcall_blocks, bb->index))
342 ++count_chkcall;
343 else
344 gcc_unreachable ();
345 return true;
346 }
347
348
349 /* Search preds of BB for a mandatory or potential sibcall or
350 returning call, and arrange for the blocks containing them to have
351 a check inserted before the call, like noreturn calls. If any
352 preds are found to perform checking, schedule checks at the edges
353 of those that don't, and mark BB as postcheck.. */
354
355 static bool
356 hardcfr_sibcall_search_preds (basic_block bb,
357 chk_edges_t &chk_edges,
358 int &count_chkcall,
359 auto_sbitmap &chkcall_blocks,
360 int &count_postchk,
361 auto_sbitmap &postchk_blocks,
362 tree *retptr)
363 {
364 /* For the exit block, we wish to force a check at every
365 predecessor, so pretend we've already found a pred that had
366 checking, so that we schedule checking at every one of its pred
367 edges. */
368 bool first = bb->index >= NUM_FIXED_BLOCKS;
369 bool postchecked = true;
370
371 gphi *retphi = NULL;
372 if (retptr && *retptr && TREE_CODE (*retptr) == SSA_NAME
373 && !SSA_NAME_IS_DEFAULT_DEF (*retptr)
374 && SSA_NAME_DEF_STMT (*retptr)
375 && is_a <gphi *> (SSA_NAME_DEF_STMT (*retptr))
376 && gimple_bb (SSA_NAME_DEF_STMT (*retptr)) == bb)
377 {
378 retphi = as_a <gphi *> (SSA_NAME_DEF_STMT (*retptr));
379 gcc_checking_assert (gimple_phi_result (retphi) == *retptr);
380 }
381
382 for (int i = EDGE_COUNT (bb->preds); i--; first = false)
383 {
384 edge e = EDGE_PRED (bb, i);
385
386 bool checked
387 = hardcfr_sibcall_search_block (e->src, chk_edges,
388 count_chkcall, chkcall_blocks,
389 count_postchk, postchk_blocks,
390 !retphi ? retptr
391 : gimple_phi_arg_def_ptr (retphi, i));
392
393 if (first)
394 {
395 postchecked = checked;
396 continue;
397 }
398
399 /* When we first find a checked block, force a check at every
400 other incoming edge we've already visited, and those we
401 visit afterwards that don't have their own check, so that
402 when we reach BB, the check has already been performed. */
403 if (!postchecked && checked)
404 {
405 for (int j = EDGE_COUNT (bb->preds); --j > i; )
406 chk_edges.safe_push (EDGE_PRED (bb, j));
407 postchecked = true;
408 }
409 if (postchecked && !checked)
410 chk_edges.safe_push (EDGE_PRED (bb, i));
411 }
412
413 if (postchecked && bb->index >= NUM_FIXED_BLOCKS)
414 {
415 if (bitmap_set_bit (postchk_blocks, bb->index))
416 count_postchk++;
417 else
418 gcc_unreachable ();
419 }
420
421 return postchecked;
422 }
423
424
425 class rt_bb_visited
426 {
427 /* Use a sufficiently wide unsigned type to hold basic block numbers. */
428 typedef size_t blknum;
429
430 /* Record the original block count of the function. */
431 blknum nblocks;
432 /* Record the number of bits per VWORD (short for VISITED WORD), an
433 efficient mode to set and test bits for blocks we visited, and to
434 encode the CFG in case out-of-line verification is used. */
435 unsigned vword_bits;
436
437 /* Hold the unsigned integral VWORD type. */
438 tree vword_type;
439 /* Hold a pointer-to-VWORD type. */
440 tree vword_ptr;
441
442 /* Hold a growing sequence used to check, inline or out-of-line,
443 that VISITED encodes an expected execution path. */
444 gimple_seq ckseq;
445 /* If nonNULL, hold a growing representation of the CFG for
446 out-of-line testing. */
447 tree rtcfg;
448
449 /* Hold the declaration of an array of VWORDs, used as an array of
450 NBLOCKS-2 bits. */
451 tree visited;
452
453 /* If performing inline checking, hold a declarations of boolean
454 variables used for inline checking. CKBLK holds the result of
455 testing whether the VISITED bit corresponding to a predecessor or
456 successor is set, CKINV inverts that bit, CKPART gets cleared if
457 a block was not visited or if CKINV for any of its predecessors
458 or successors is set, and CKFAIL gets set if CKPART remains set
459 at the end of a block's predecessors or successors list. */
460 tree ckfail, ckpart, ckinv, ckblk;
461
462 /* Convert a block index N to a block vindex, the index used to
463 identify it in the VISITED array. Check that it's in range:
464 neither ENTRY nor EXIT, but maybe one-past-the-end, to compute
465 the visited array length. */
466 blknum num2idx (blknum n) {
467 gcc_checking_assert (n >= NUM_FIXED_BLOCKS && n <= nblocks);
468 return (n - NUM_FIXED_BLOCKS);
469 }
470 /* Return the block vindex for BB, that must not be ENTRY or
471 EXIT. */
472 blknum bb2idx (basic_block bb) {
473 gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
474 && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
475 gcc_checking_assert (blknum (bb->index) < nblocks);
476 return num2idx (bb->index);
477 }
478
479 /* Compute the type to be used for the VISITED array. */
480 tree vtype ()
481 {
482 blknum n = num2idx (nblocks);
483 return build_array_type_nelts (vword_type,
484 (n + vword_bits - 1) / vword_bits);
485 }
486
487 /* Compute and return the index into VISITED for block BB. If BITP
488 is non-NULL, also compute and store the bit mask corresponding to
489 block BB in *BITP, so that (visited[index] & mask) tells whether
490 BB was visited. */
491 tree vwordidx (basic_block bb, tree *bitp = NULL)
492 {
493 blknum idx = bb2idx (bb);
494 if (bitp)
495 {
496 unsigned bit = idx % vword_bits;
497 /* We don't need to adjust shifts to follow native bit
498 endianness here, all of our uses of the CFG and visited
499 bitmaps, whether at compile or runtime, are shifted bits on
500 full words. This adjustment here would require a
501 corresponding adjustment at runtime, which would be nothing
502 but undesirable overhead for us. */
503 if (0 /* && BITS_BIG_ENDIAN */)
504 bit = vword_bits - bit - 1;
505 wide_int wbit = wi::set_bit_in_zero (bit, vword_bits);
506 *bitp = wide_int_to_tree (vword_type, wbit);
507 }
508 return build_int_cst (vword_ptr, idx / vword_bits);
509 }
510
511 /* Return an expr to accesses the visited element that holds
512 information about BB. If BITP is non-NULL, set it to the mask to
513 tell which bit in that expr refers to BB. */
514 tree vword (basic_block bb, tree *bitp = NULL)
515 {
516 return build2 (MEM_REF, vword_type,
517 build1 (ADDR_EXPR, vword_ptr, visited),
518 int_const_binop (MULT_EXPR, vwordidx (bb, bitp),
519 fold_convert (vword_ptr,
520 TYPE_SIZE_UNIT
521 (vword_type))));
522 }
523
524 /* Return an expr that evaluates to true iff BB was marked as
525 VISITED. Add any gimple stmts to SEQP. */
526 tree vindex (basic_block bb, gimple_seq *seqp)
527 {
528 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
529 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
530 return boolean_true_node;
531
532 tree bit, setme = vword (bb, &bit);
533 tree temp = create_tmp_var (vword_type, ".cfrtemp");
534
535 gassign *vload = gimple_build_assign (temp, setme);
536 gimple_seq_add_stmt (seqp, vload);
537
538 gassign *vmask = gimple_build_assign (temp, BIT_AND_EXPR, temp, bit);
539 gimple_seq_add_stmt (seqp, vmask);
540
541 return build2 (NE_EXPR, boolean_type_node,
542 temp, build_int_cst (vword_type, 0));
543 }
544
545 /* Set the bit corresponding to BB in VISITED. Add to SEQ any
546 required gimple stmts, and return SEQ, possibly modified. */
547 gimple_seq vset (basic_block bb, gimple_seq seq = NULL)
548 {
549 tree bit, setme = vword (bb, &bit);
550 tree temp = create_tmp_var (vword_type, ".cfrtemp");
551
552 gassign *vload = gimple_build_assign (temp, setme);
553 gimple_seq_add_stmt (&seq, vload);
554
555 gassign *vbitset = gimple_build_assign (temp, BIT_IOR_EXPR, temp, bit);
556 gimple_seq_add_stmt (&seq, vbitset);
557
558 gassign *vstore = gimple_build_assign (unshare_expr (setme), temp);
559 gimple_seq_add_stmt (&seq, vstore);
560
561 /* Prevent stores into visited from being deferred, forcing
562 subsequent bitsets to reload the word rather than reusing
563 values already in register. The purpose is threefold: make the
564 bitset get to memory in this block, so that control flow
565 attacks in functions called in this block don't easily bypass
566 the bitset; prevent the bitset word from being retained in a
567 register across blocks, which could, in an attack scenario,
568 make a later block set more than one bit; and prevent hoisting
569 or sinking loads or stores of bitset words out of loops or even
570 throughout functions, which could significantly weaken the
571 verification. This is equivalent to making the bitsetting
572 volatile within the function body, but without changing its
573 type; making the bitset volatile would make inline checking far
574 less optimizable for no reason. */
575 vec<tree, va_gc> *inputs = NULL;
576 vec<tree, va_gc> *outputs = NULL;
577 vec_safe_push (outputs,
578 build_tree_list
579 (build_tree_list
580 (NULL_TREE, build_string (2, "=m")),
581 visited));
582 vec_safe_push (inputs,
583 build_tree_list
584 (build_tree_list
585 (NULL_TREE, build_string (1, "m")),
586 visited));
587 gasm *stabilize = gimple_build_asm_vec ("", inputs, outputs,
588 NULL, NULL);
589 gimple_seq_add_stmt (&seq, stabilize);
590
591 return seq;
592 }
593
594 public:
595 /* Prepare to add control flow redundancy testing to CFUN. */
596 rt_bb_visited (int checkpoints)
597 : nblocks (n_basic_blocks_for_fn (cfun)),
598 vword_type (NULL), ckseq (NULL), rtcfg (NULL)
599 {
600 /* If we've already added a declaration for the builtin checker,
601 extract vword_type and vword_bits from its declaration. */
602 if (tree checkfn = builtin_decl_explicit (BUILT_IN___HARDCFR_CHECK))
603 {
604 tree check_arg_list = TYPE_ARG_TYPES (TREE_TYPE (checkfn));
605 tree vword_const_ptr_type = TREE_VALUE (TREE_CHAIN (check_arg_list));
606 vword_type = TYPE_MAIN_VARIANT (TREE_TYPE (vword_const_ptr_type));
607 vword_bits = tree_to_shwi (TYPE_SIZE (vword_type));
608 }
609 /* Otherwise, select vword_bits, vword_type et al, and use it to
610 declare the builtin checker. */
611 else
612 {
613 /* This setting needs to be kept in sync with libgcc/hardcfr.c.
614 We aim for at least 28 bits, which enables us to refer to as
615 many as 28 << 28 blocks in a function's CFG. That's way over
616 4G blocks. */
617 machine_mode VWORDmode;
618 if (BITS_PER_UNIT >= 28)
619 {
620 VWORDmode = QImode;
621 vword_bits = BITS_PER_UNIT;
622 }
623 else if (BITS_PER_UNIT >= 14)
624 {
625 VWORDmode = HImode;
626 vword_bits = 2 * BITS_PER_UNIT;
627 }
628 else
629 {
630 VWORDmode = SImode;
631 vword_bits = 4 * BITS_PER_UNIT;
632 }
633
634 vword_type = lang_hooks.types.type_for_mode (VWORDmode, 1);
635 gcc_checking_assert (vword_bits == tree_to_shwi (TYPE_SIZE
636 (vword_type)));
637
638 vword_type = build_variant_type_copy (vword_type);
639 TYPE_ALIAS_SET (vword_type) = new_alias_set ();
640
641 tree vword_const = build_qualified_type (vword_type, TYPE_QUAL_CONST);
642 tree vword_const_ptr = build_pointer_type (vword_const);
643 tree type = build_function_type_list (void_type_node, sizetype,
644 vword_const_ptr, vword_const_ptr,
645 NULL_TREE);
646 tree decl = add_builtin_function_ext_scope
647 ("__builtin___hardcfr_check",
648 type, BUILT_IN___HARDCFR_CHECK, BUILT_IN_NORMAL,
649 "__hardcfr_check", NULL_TREE);
650 TREE_NOTHROW (decl) = true;
651 set_builtin_decl (BUILT_IN___HARDCFR_CHECK, decl, true);
652 }
653
654 /* The checker uses a qualified pointer, so we can't reuse it,
655 so build a new one. */
656 vword_ptr = build_pointer_type (vword_type);
657
658 tree visited_type = vtype ();
659 visited = create_tmp_var (visited_type, ".cfrvisited");
660
661 if (nblocks - NUM_FIXED_BLOCKS > blknum (param_hardcfr_max_inline_blocks)
662 || checkpoints > 1)
663 {
664 /* Make sure vword_bits is wide enough for the representation
665 of nblocks in rtcfg. Compare with vword_bits << vword_bits,
666 but avoiding overflows, shifting nblocks right instead. If
667 vword_bits is wider than HOST_WIDE_INT, assume it fits, so
668 as to avoid undefined shifts. */
669 gcc_assert (HOST_BITS_PER_WIDE_INT <= vword_bits
670 || (((unsigned HOST_WIDE_INT)(num2idx (nblocks))
671 >> vword_bits) < vword_bits));
672
673 /* Build a terminator for the constructor list. */
674 rtcfg = build_tree_list (NULL_TREE, NULL_TREE);
675 return;
676 }
677
678 ckfail = create_tmp_var (boolean_type_node, ".cfrfail");
679 ckpart = create_tmp_var (boolean_type_node, ".cfrpart");
680 ckinv = create_tmp_var (boolean_type_node, ".cfrinv");
681 ckblk = create_tmp_var (boolean_type_node, ".cfrblk");
682
683 gassign *ckfail_init = gimple_build_assign (ckfail, boolean_false_node);
684 gimple_seq_add_stmt (&ckseq, ckfail_init);
685 }
686
687 /* Insert SEQ before a resx or a call in INSBB. */
688 void insert_exit_check_in_block (gimple_seq seq, basic_block insbb)
689 {
690 gimple_stmt_iterator gsi = gsi_last_bb (insbb);
691
692 while (!gsi_end_p (gsi))
693 if (is_a <gresx *> (gsi_stmt (gsi))
694 || is_a <gcall *> (gsi_stmt (gsi)))
695 break;
696 else
697 gsi_prev (&gsi);
698
699 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
700 }
701
702 /* Insert SEQ on E. */
703 void insert_exit_check_on_edge (gimple_seq seq, edge e)
704 {
705 gsi_insert_seq_on_edge_immediate (e, seq);
706 }
707
708 /* Add checking code to CHK_EDGES and CHKCALL_BLOCKS, and
709 initialization code on the entry edge. Before this point, the
710 CFG has been undisturbed, and all the needed data has been
711 collected and safely stowed. */
712 void check (chk_edges_t &chk_edges,
713 int count_chkcall, auto_sbitmap const &chkcall_blocks)
714 {
715 /* If we're using out-of-line checking, create and statically
716 initialize the CFG checking representation, generate the
717 checker call for the checking sequence, and insert it in all
718 exit edges, if there's more than one. If there's only one, we
719 use the same logic as the inline case to insert the check
720 sequence. */
721 if (rtcfg)
722 {
723 /* Unreverse the list, and drop the tail node turned into head. */
724 rtcfg = TREE_CHAIN (nreverse (rtcfg));
725
726 /* Turn the indices stored in TREE_PURPOSE into separate
727 nodes. It was useful to keep them together to enable
728 combination of masks and for clear separation of
729 terminators while constructing it, but now we have to turn
730 it into a sequence of words. */
731 for (tree node = rtcfg; node; node = TREE_CHAIN (node))
732 {
733 tree wordidx = TREE_PURPOSE (node);
734 if (!wordidx)
735 continue;
736
737 TREE_PURPOSE (node) = NULL_TREE;
738 TREE_CHAIN (node) = tree_cons (NULL_TREE,
739 fold_convert (vword_type, wordidx),
740 TREE_CHAIN (node));
741 }
742
743 /* Build the static initializer for the array with the CFG
744 representation for out-of-line checking. */
745 tree init = build_constructor_from_list (NULL_TREE, rtcfg);
746 TREE_TYPE (init) = build_array_type_nelts (vword_type,
747 CONSTRUCTOR_NELTS (init));
748 char buf[32];
749 ASM_GENERATE_INTERNAL_LABEL (buf, "Lhardcfg",
750 current_function_funcdef_no);
751 rtcfg = build_decl (UNKNOWN_LOCATION, VAR_DECL,
752 get_identifier (buf),
753 TREE_TYPE (init));
754 TREE_READONLY (rtcfg) = 1;
755 TREE_STATIC (rtcfg) = 1;
756 TREE_ADDRESSABLE (rtcfg) = 1;
757 TREE_USED (rtcfg) = 1;
758 DECL_ARTIFICIAL (rtcfg) = 1;
759 DECL_IGNORED_P (rtcfg) = 1;
760 DECL_INITIAL (rtcfg) = init;
761 make_decl_rtl (rtcfg);
762 varpool_node::finalize_decl (rtcfg);
763
764 /* Add the checker call to ckseq. */
765 gcall *call_chk = gimple_build_call (builtin_decl_explicit
766 (BUILT_IN___HARDCFR_CHECK), 3,
767 build_int_cst (sizetype,
768 num2idx (nblocks)),
769 build1 (ADDR_EXPR, vword_ptr,
770 visited),
771 build1 (ADDR_EXPR, vword_ptr,
772 rtcfg));
773 gimple_seq_add_stmt (&ckseq, call_chk);
774
775 gimple *clobber = gimple_build_assign (visited,
776 build_clobber
777 (TREE_TYPE (visited)));
778 gimple_seq_add_stmt (&ckseq, clobber);
779
780 /* If we have multiple exit edges, insert (copies of)
781 ckseq in all of them. */
782 for (int i = chk_edges.length (); i--; )
783 {
784 gimple_seq seq = ckseq;
785 /* Copy the sequence, unless we're dealing with the
786 last edge (we're counting down to zero). */
787 if (i || count_chkcall)
788 seq = gimple_seq_copy (seq);
789
790 edge e = chk_edges[i];
791
792 if (dump_file)
793 {
794 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
795 fprintf (dump_file,
796 "Inserting out-of-line check in"
797 " block %i's edge to exit.\n",
798 e->src->index);
799 else
800 fprintf (dump_file,
801 "Inserting out-of-line check in"
802 " block %i's edge to postcheck block %i.\n",
803 e->src->index, e->dest->index);
804 }
805
806 insert_exit_check_on_edge (seq, e);
807
808 gcc_checking_assert (!bitmap_bit_p (chkcall_blocks, e->src->index));
809 }
810
811 sbitmap_iterator it;
812 unsigned i;
813 EXECUTE_IF_SET_IN_BITMAP (chkcall_blocks, 0, i, it)
814 {
815 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
816
817 gimple_seq seq = ckseq;
818 gcc_checking_assert (count_chkcall > 0);
819 if (--count_chkcall)
820 seq = gimple_seq_copy (seq);
821
822 if (dump_file)
823 fprintf (dump_file,
824 "Inserting out-of-line check before stmt in block %i.\n",
825 bb->index);
826
827 insert_exit_check_in_block (seq, bb);
828 }
829
830 gcc_checking_assert (count_chkcall == 0);
831 }
832 else
833 {
834 /* Inline checking requires a single exit edge. */
835 gimple *last = gimple_build_assign (visited,
836 build_clobber
837 (TREE_TYPE (visited)));
838 gimple_seq_add_stmt (&ckseq, last);
839
840 if (!count_chkcall)
841 {
842 edge e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
843
844 if (dump_file)
845 {
846 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
847 fprintf (dump_file,
848 "Inserting out-of-line check in"
849 " block %i's edge to postcheck block %i.\n",
850 e->src->index, e->dest->index);
851 else
852 fprintf (dump_file,
853 "Inserting inline check in"
854 " block %i's edge to exit.\n",
855 e->src->index);
856 }
857
858 insert_exit_check_on_edge (ckseq, e);
859 }
860 else
861 {
862 gcc_checking_assert (count_chkcall == 1);
863
864 sbitmap_iterator it;
865 unsigned i;
866 EXECUTE_IF_SET_IN_BITMAP (chkcall_blocks, 0, i, it)
867 {
868 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
869
870 gimple_seq seq = ckseq;
871 gcc_checking_assert (count_chkcall > 0);
872 if (--count_chkcall)
873 seq = gimple_seq_copy (seq);
874
875 if (dump_file)
876 fprintf (dump_file,
877 "Inserting inline check before stmt in block %i.\n",
878 bb->index);
879
880 insert_exit_check_in_block (seq, bb);
881 }
882
883 gcc_checking_assert (count_chkcall == 0);
884 }
885
886 /* The inserted ckseq computes CKFAIL at LAST. Now we have to
887 conditionally trap on it. */
888 basic_block insbb = gimple_bb (last);
889
890 /* Create a block with the unconditional trap. */
891 basic_block trp = create_empty_bb (insbb);
892 gimple_stmt_iterator gsit = gsi_after_labels (trp);
893
894 gcall *trap = gimple_build_call (builtin_decl_explicit
895 (BUILT_IN_TRAP), 0);
896 gsi_insert_before (&gsit, trap, GSI_SAME_STMT);
897
898 if (BB_PARTITION (insbb))
899 BB_SET_PARTITION (trp, BB_COLD_PARTITION);
900
901 if (current_loops)
902 add_bb_to_loop (trp, current_loops->tree_root);
903
904 /* Insert a conditional branch to the trap block. If the
905 conditional wouldn't be the last stmt, split the block. */
906 gimple_stmt_iterator gsi = gsi_for_stmt (last);
907 if (!gsi_one_before_end_p (gsi))
908 split_block (gsi_bb (gsi), gsi_stmt (gsi));
909
910 gcond *cond = gimple_build_cond (NE_EXPR, ckfail,
911 fold_convert (TREE_TYPE (ckfail),
912 boolean_false_node),
913 NULL, NULL);
914 gsi_insert_after (&gsi, cond, GSI_SAME_STMT);
915
916 /* Adjust the edges. */
917 single_succ_edge (gsi_bb (gsi))->flags &= ~EDGE_FALLTHRU;
918 single_succ_edge (gsi_bb (gsi))->flags |= EDGE_FALSE_VALUE;
919 single_succ_edge (gsi_bb (gsi))->probability
920 = profile_probability::always ();
921 edge e = make_edge (gsi_bb (gsi), trp, EDGE_TRUE_VALUE);
922 e->probability = profile_probability::never ();
923 gcc_checking_assert (e->dest == trp);
924 gcc_checking_assert (!e->dest->count.initialized_p ());
925 e->dest->count = e->count ();
926
927 /* Set the trap's dominator after splitting. */
928 if (dom_info_available_p (CDI_DOMINATORS))
929 set_immediate_dominator (CDI_DOMINATORS, trp, gimple_bb (last));
930 }
931
932 /* Insert initializers for visited at the entry. Do this after
933 other insertions, to avoid messing with block numbers. */
934 gimple_seq iseq = NULL;
935
936 gcall *vinit = gimple_build_call (builtin_decl_explicit
937 (BUILT_IN_MEMSET), 3,
938 build1 (ADDR_EXPR,
939 build_pointer_type
940 (TREE_TYPE (visited)),
941 visited),
942 integer_zero_node,
943 TYPE_SIZE_UNIT (TREE_TYPE (visited)));
944 gimple_seq_add_stmt (&iseq, vinit);
945
946 gsi_insert_seq_on_edge_immediate (single_succ_edge
947 (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
948 iseq);
949 }
950
951 /* Push onto RTCFG a (mask, index) pair to test for IBB when BB is
952 visited. XSELF is to be the ENTRY or EXIT block (depending on
953 whether we're looking at preds or succs), to be remapped to BB
954 because we can't represent them, and there's no point in testing
955 them anyway. Return true if no further blocks need to be visited
956 in the list, because we've already encountered a
957 self-reference. */
958 bool
959 push_rtcfg_pair (basic_block ibb, basic_block bb,
960 basic_block xself)
961 {
962 /* We don't have a bit to test for the entry and exit
963 blocks, but it is always visited, so we test for the
964 block itself, which gets us the right result and
965 enables the self-test optimization below. */
966 if (ibb == xself)
967 ibb = bb;
968
969 tree mask, idx = vwordidx (ibb, &mask);
970 /* Combine masks with the same idx, but not if we're going
971 to optimize for self-test. */
972 if (ibb != bb && TREE_PURPOSE (rtcfg)
973 && tree_int_cst_equal (idx, TREE_PURPOSE (rtcfg)))
974 TREE_VALUE (rtcfg) = int_const_binop (BIT_IOR_EXPR, mask,
975 TREE_VALUE (rtcfg));
976 else
977 rtcfg = tree_cons (idx, mask, rtcfg);
978
979 /* For self-tests (i.e., tests that the block itself was
980 also visited), testing anything else is pointless,
981 because it's a tautology, so just drop other edges. */
982 if (ibb == bb)
983 {
984 while (TREE_PURPOSE (TREE_CHAIN (rtcfg)))
985 TREE_CHAIN (rtcfg) = TREE_CHAIN (TREE_CHAIN (rtcfg));
986 return true;
987 }
988
989 return false;
990 }
991
992 /* Add to CKSEQ stmts to clear CKPART if OBB is visited. */
993 void
994 build_block_check (basic_block obb)
995 {
996 tree vobb = fold_convert (TREE_TYPE (ckblk),
997 vindex (obb, &ckseq));
998 gassign *blkrunp = gimple_build_assign (ckblk, vobb);
999 gimple_seq_add_stmt (&ckseq, blkrunp);
1000
1001 gassign *blknotrunp = gimple_build_assign (ckinv,
1002 EQ_EXPR,
1003 ckblk,
1004 fold_convert
1005 (TREE_TYPE (ckblk),
1006 boolean_false_node));
1007 gimple_seq_add_stmt (&ckseq, blknotrunp);
1008
1009 gassign *andblk = gimple_build_assign (ckpart,
1010 BIT_AND_EXPR,
1011 ckpart, ckinv);
1012 gimple_seq_add_stmt (&ckseq, andblk);
1013 }
1014
1015 /* Add to BB code to set its bit in VISITED, and add to RTCFG or
1016 CKSEQ the data or code needed to check BB's predecessors and
1017 successors. If CHECKPOINT, assume the block is a checkpoint,
1018 whether or not it has an edge to EXIT. If POSTCHECK, assume the
1019 block post-dominates checkpoints and therefore no bitmap setting
1020 or checks are to be performed in or for it. Do NOT change the
1021 CFG. */
1022 void visit (basic_block bb, bool checkpoint, bool postcheck)
1023 {
1024 /* Set the bit in VISITED when entering the block. */
1025 gimple_stmt_iterator gsi = gsi_after_labels (bb);
1026 if (!postcheck)
1027 gsi_insert_seq_before (&gsi, vset (bb), GSI_SAME_STMT);
1028
1029 if (rtcfg)
1030 {
1031 if (!postcheck)
1032 {
1033 /* Build a list of (index, mask) terminated by (NULL, 0).
1034 Consolidate masks with the same index when they're
1035 adjacent. First, predecessors. Count backwards, because
1036 we're going to reverse the list. The order shouldn't
1037 matter, but let's not make it surprising. */
1038 for (int i = EDGE_COUNT (bb->preds); i--; )
1039 if (push_rtcfg_pair (EDGE_PRED (bb, i)->src, bb,
1040 ENTRY_BLOCK_PTR_FOR_FN (cfun)))
1041 break;
1042 }
1043 rtcfg = tree_cons (NULL_TREE, build_int_cst (vword_type, 0), rtcfg);
1044
1045 if (!postcheck)
1046 {
1047 /* Then, successors. */
1048 if (!checkpoint
1049 || !push_rtcfg_pair (EXIT_BLOCK_PTR_FOR_FN (cfun),
1050 bb, EXIT_BLOCK_PTR_FOR_FN (cfun)))
1051 for (int i = EDGE_COUNT (bb->succs); i--; )
1052 if (push_rtcfg_pair (EDGE_SUCC (bb, i)->dest, bb,
1053 EXIT_BLOCK_PTR_FOR_FN (cfun)))
1054 break;
1055 }
1056 rtcfg = tree_cons (NULL_TREE, build_int_cst (vword_type, 0), rtcfg);
1057 }
1058 else if (!postcheck)
1059 {
1060 /* Schedule test to fail if the block was reached but somehow none
1061 of its predecessors were. */
1062 tree bit = fold_convert (TREE_TYPE (ckpart), vindex (bb, &ckseq));
1063 gassign *blkrunp = gimple_build_assign (ckpart, bit);
1064 gimple_seq_add_stmt (&ckseq, blkrunp);
1065
1066 for (int i = 0, e = EDGE_COUNT (bb->preds); i < e; i++)
1067 build_block_check (EDGE_PRED (bb, i)->src);
1068 gimple *orfailp = gimple_build_assign (ckfail, BIT_IOR_EXPR,
1069 ckfail, ckpart);
1070 gimple_seq_add_stmt (&ckseq, orfailp);
1071
1072 /* Likewise for successors. */
1073 gassign *blkruns = gimple_build_assign (ckpart, unshare_expr (bit));
1074 gimple_seq_add_stmt (&ckseq, blkruns);
1075
1076 if (checkpoint)
1077 build_block_check (EXIT_BLOCK_PTR_FOR_FN (cfun));
1078 for (int i = 0, e = EDGE_COUNT (bb->succs); i < e; i++)
1079 build_block_check (EDGE_SUCC (bb, i)->dest);
1080
1081 gimple *orfails = gimple_build_assign (ckfail, BIT_IOR_EXPR,
1082 ckfail, ckpart);
1083 gimple_seq_add_stmt (&ckseq, orfails);
1084 }
1085 }
1086 };
1087
1088 /* Avoid checking before noreturn calls that are known (expected,
1089 really) to finish by throwing an exception, rather than by ending
1090 the program or looping forever. Such functions have to be
1091 annotated, with an attribute (expected_throw) or flag (ECF_XTHROW),
1092 so that exception-raising functions, such as C++'s __cxa_throw,
1093 __cxa_rethrow, and Ada's gnat_rcheck_*, gnat_reraise*,
1094 ada.exception.raise_exception*, and the language-independent
1095 unwinders could be detected here and handled differently from other
1096 noreturn functions. */
1097 static bool
1098 always_throwing_noreturn_call_p (gimple *stmt)
1099 {
1100 if (!is_a <gcall *> (stmt))
1101 return is_a <gresx *> (stmt);
1102
1103 gcall *call = as_a <gcall *> (stmt);
1104 return (gimple_call_noreturn_p (call)
1105 && gimple_call_expected_throw_p (call));
1106 }
1107
1108 /* Control flow redundancy hardening: record the execution path, and
1109 verify at exit that an expect path was taken. */
1110
1111 unsigned int
1112 pass_harden_control_flow_redundancy::execute (function *fun)
1113 {
1114 bool const check_at_escaping_exceptions
1115 = (flag_exceptions
1116 && flag_harden_control_flow_redundancy_check_exceptions);
1117 bool const check_before_noreturn_calls
1118 = flag_harden_control_flow_redundancy_check_noreturn > HCFRNR_NEVER;
1119 bool const check_before_nothrow_noreturn_calls
1120 = (check_before_noreturn_calls
1121 && flag_harden_control_flow_redundancy_check_noreturn >= HCFRNR_NOTHROW);
1122 bool const check_before_throwing_noreturn_calls
1123 = (flag_exceptions
1124 && check_before_noreturn_calls
1125 && flag_harden_control_flow_redundancy_check_noreturn > HCFRNR_NOTHROW);
1126 bool const check_before_always_throwing_noreturn_calls
1127 = (flag_exceptions
1128 && check_before_noreturn_calls
1129 && flag_harden_control_flow_redundancy_check_noreturn >= HCFRNR_ALWAYS);
1130 basic_block bb;
1131 basic_block bb_eh_cleanup = NULL;
1132
1133 if (flag_harden_control_flow_redundancy_skip_leaf)
1134 {
1135 bool found_calls_p = false;
1136
1137 FOR_EACH_BB_FN (bb, fun)
1138 {
1139 for (gimple_stmt_iterator gsi = gsi_last_bb (bb);
1140 !gsi_end_p (gsi); gsi_prev (&gsi))
1141 if (is_a <gcall *> (gsi_stmt (gsi)))
1142 {
1143 found_calls_p = true;
1144 break;
1145 }
1146 if (found_calls_p)
1147 break;
1148 }
1149
1150 if (!found_calls_p)
1151 {
1152 if (dump_file)
1153 fprintf (dump_file,
1154 "Disabling CFR for leaf function, as requested\n");
1155
1156 return 0;
1157 }
1158 }
1159
1160 if (check_at_escaping_exceptions)
1161 {
1162 int lp_eh_cleanup = -1;
1163
1164 /* Record the preexisting blocks, to avoid visiting newly-created
1165 blocks. */
1166 auto_sbitmap to_visit (last_basic_block_for_fn (fun));
1167 bitmap_clear (to_visit);
1168
1169 FOR_EACH_BB_FN (bb, fun)
1170 bitmap_set_bit (to_visit, bb->index);
1171
1172 /* Scan the blocks for stmts with escaping exceptions, that
1173 wouldn't be denoted in the CFG, and associate them with an
1174 empty cleanup handler around the whole function. Walk
1175 backwards, so that even when we split the block, */
1176 sbitmap_iterator it;
1177 unsigned i;
1178 EXECUTE_IF_SET_IN_BITMAP (to_visit, 0, i, it)
1179 {
1180 bb = BASIC_BLOCK_FOR_FN (fun, i);
1181
1182 for (gimple_stmt_iterator gsi = gsi_last_bb (bb);
1183 !gsi_end_p (gsi); gsi_prev (&gsi))
1184 {
1185 gimple *stmt = gsi_stmt (gsi);
1186 if (!stmt_could_throw_p (fun, stmt))
1187 continue;
1188
1189 /* If it must not throw, or if it already has a handler,
1190 we need not worry about it. */
1191 if (lookup_stmt_eh_lp (stmt) != 0)
1192 continue;
1193
1194 /* Don't split blocks at, nor add EH edges to, tail
1195 calls, we will add verification before the call
1196 anyway. */
1197 if (is_a <gcall *> (stmt)
1198 && (gimple_call_must_tail_p (as_a <gcall *> (stmt))
1199 || gimple_call_tail_p (as_a <gcall *> (stmt))
1200 || returning_call_p (as_a <gcall *> (stmt))))
1201 continue;
1202
1203 if (!gsi_one_before_end_p (gsi))
1204 split_block (bb, stmt);
1205 /* A resx or noreturn call needs not be associated with
1206 the cleanup handler if we're going to add checking
1207 before it. We only test cases that didn't require
1208 block splitting because noreturn calls would always
1209 be at the end of blocks, and we test for zero
1210 successors because if there is an edge, it's not
1211 noreturn, as any EH edges would have already been
1212 caught by the lookup_stmt_eh_lp test above. */
1213 else if (check_before_noreturn_calls
1214 && EDGE_COUNT (bb->succs) == 0
1215 && (is_a <gresx *> (stmt)
1216 ? check_before_always_throwing_noreturn_calls
1217 : (!is_a <gcall *> (stmt)
1218 || !gimple_call_noreturn_p (stmt))
1219 ? (gcc_unreachable (), false)
1220 : (!flag_exceptions
1221 || gimple_call_nothrow_p (as_a <gcall *> (stmt)))
1222 ? check_before_nothrow_noreturn_calls
1223 : always_throwing_noreturn_call_p (stmt)
1224 ? check_before_always_throwing_noreturn_calls
1225 : check_before_throwing_noreturn_calls))
1226 {
1227 if (dump_file)
1228 {
1229 fprintf (dump_file,
1230 "Bypassing cleanup for noreturn stmt"
1231 " in block %i:\n",
1232 bb->index);
1233 print_gimple_stmt (dump_file, stmt, 0);
1234 }
1235 continue;
1236 }
1237
1238 if (!bb_eh_cleanup)
1239 {
1240 bb_eh_cleanup = create_empty_bb (bb);
1241 if (dom_info_available_p (CDI_DOMINATORS))
1242 set_immediate_dominator (CDI_DOMINATORS, bb_eh_cleanup, bb);
1243 if (current_loops)
1244 add_bb_to_loop (bb_eh_cleanup, current_loops->tree_root);
1245
1246 /* Make the new block an EH cleanup for the call. */
1247 eh_region new_r = gen_eh_region_cleanup (NULL);
1248 eh_landing_pad lp = gen_eh_landing_pad (new_r);
1249 tree label = gimple_block_label (bb_eh_cleanup);
1250 lp->post_landing_pad = label;
1251 EH_LANDING_PAD_NR (label) = lp_eh_cleanup = lp->index;
1252
1253 /* Just propagate the exception.
1254 We will later insert the verifier call. */
1255 gimple_stmt_iterator ehgsi;
1256 ehgsi = gsi_after_labels (bb_eh_cleanup);
1257 gresx *resx = gimple_build_resx (new_r->index);
1258 gsi_insert_before (&ehgsi, resx, GSI_SAME_STMT);
1259
1260 if (dump_file)
1261 fprintf (dump_file,
1262 "Created cleanup block %i:\n",
1263 bb_eh_cleanup->index);
1264 }
1265 else if (dom_info_available_p (CDI_DOMINATORS))
1266 {
1267 basic_block immdom;
1268 immdom = get_immediate_dominator (CDI_DOMINATORS,
1269 bb_eh_cleanup);
1270 if (!dominated_by_p (CDI_DOMINATORS, bb, immdom))
1271 {
1272 immdom = nearest_common_dominator (CDI_DOMINATORS,
1273 immdom, bb);
1274 set_immediate_dominator (CDI_DOMINATORS,
1275 bb_eh_cleanup, immdom);
1276 }
1277 }
1278
1279 if (dump_file)
1280 {
1281 fprintf (dump_file,
1282 "Associated cleanup block with stmt in block %i:\n",
1283 bb->index);
1284 print_gimple_stmt (dump_file, stmt, 0);
1285 }
1286
1287 add_stmt_to_eh_lp (stmt, lp_eh_cleanup);
1288 /* Finally, wire the EH cleanup block into the CFG. */
1289 edge neeh = make_eh_edges (stmt);
1290 neeh->probability = profile_probability::never ();
1291 gcc_checking_assert (neeh->dest == bb_eh_cleanup);
1292 if (neeh->dest->count.initialized_p ())
1293 neeh->dest->count += neeh->count ();
1294 else
1295 neeh->dest->count = neeh->count ();
1296 }
1297 }
1298
1299 if (bb_eh_cleanup)
1300 {
1301 /* A cfg_cleanup after bb_eh_cleanup makes for a more compact
1302 rtcfg, and it avoids bb numbering differences when we split
1303 blocks because of trailing debug insns only. */
1304 cleanup_tree_cfg ();
1305 gcc_checking_assert (EDGE_COUNT (bb_eh_cleanup->succs) == 0);
1306 }
1307 }
1308
1309 /* These record blocks with calls that are to be preceded by
1310 checkpoints, such as noreturn calls (if so chosen), must-tail
1311 calls, potential early-marked tail calls, and returning calls (if
1312 so chosen). */
1313 int count_chkcall = 0;
1314 auto_sbitmap chkcall_blocks (last_basic_block_for_fn (fun));
1315 bitmap_clear (chkcall_blocks);
1316
1317 /* We wish to add verification at blocks without successors, such as
1318 noreturn calls (raising or not) and the reraise at the cleanup
1319 block, but not other reraises: they will go through the cleanup
1320 block. */
1321 if (check_before_noreturn_calls)
1322 FOR_EACH_BB_FN (bb, fun)
1323 {
1324 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1325 if (gsi_end_p (gsi))
1326 continue;
1327 gimple *stmt = gsi_stmt (gsi);
1328
1329 if (EDGE_COUNT (bb->succs) == 0)
1330 {
1331 /* A stmt at the end of a block without any successors is
1332 either a resx or a noreturn call without a local
1333 handler. Check that it's one of the desired
1334 checkpoints. */
1335 if (flag_exceptions && is_a <gresx *> (stmt)
1336 ? (check_before_always_throwing_noreturn_calls
1337 || bb == bb_eh_cleanup)
1338 : (!is_a <gcall *> (stmt)
1339 || !gimple_call_noreturn_p (stmt))
1340 ? (stmt_can_make_abnormal_goto (stmt)
1341 /* ??? Check before indirect nonlocal goto, or
1342 calls thereof? */
1343 ? false
1344 /* Catch cases in which successors would be
1345 expected. */
1346 : (gcc_unreachable (), false))
1347 : (!flag_exceptions
1348 || gimple_call_nothrow_p (as_a <gcall *> (stmt)))
1349 ? check_before_nothrow_noreturn_calls
1350 : always_throwing_noreturn_call_p (stmt)
1351 ? check_before_always_throwing_noreturn_calls
1352 : check_before_throwing_noreturn_calls)
1353 {
1354 if (dump_file)
1355 {
1356 fprintf (dump_file,
1357 "Scheduling check before stmt"
1358 " in succ-less block %i:\n",
1359 bb->index);
1360 print_gimple_stmt (dump_file, stmt, 0);
1361 }
1362
1363 if (bitmap_set_bit (chkcall_blocks, bb->index))
1364 count_chkcall++;
1365 else
1366 gcc_unreachable ();
1367 }
1368 continue;
1369 }
1370
1371 /* If there are no exceptions, it would seem like any noreturn
1372 call must have zero successor edges, but __builtin_return
1373 gets successor edges. We don't want to handle it here, it
1374 will be dealt with in sibcall_search_preds. Otherwise,
1375 check for blocks without non-EH successors, but skip those
1376 with resx stmts and edges (i.e., those other than that in
1377 bb_eh_cleanup), since those will go through bb_eh_cleanup,
1378 that will have been counted as noreturn above because it
1379 has no successors. */
1380 gcc_checking_assert (bb != bb_eh_cleanup
1381 || !check_at_escaping_exceptions);
1382 if (flag_exceptions && is_a <gresx *> (stmt)
1383 ? check_before_always_throwing_noreturn_calls
1384 : (!is_a <gcall *> (stmt)
1385 || !gimple_call_noreturn_p (stmt))
1386 ? false
1387 : (!flag_exceptions
1388 || gimple_call_nothrow_p (as_a <gcall *> (stmt)))
1389 ? false /* rather than check_before_nothrow_noreturn_calls */
1390 : always_throwing_noreturn_call_p (stmt)
1391 ? check_before_always_throwing_noreturn_calls
1392 : check_before_throwing_noreturn_calls)
1393 {
1394 gcc_checking_assert (single_succ_p (bb)
1395 && (single_succ_edge (bb)->flags & EDGE_EH));
1396
1397 if (dump_file)
1398 {
1399 fprintf (dump_file,
1400 "Scheduling check before stmt"
1401 " in EH-succ block %i:\n",
1402 bb->index);
1403 print_gimple_stmt (dump_file, stmt, 0);
1404 }
1405
1406 if (bitmap_set_bit (chkcall_blocks, bb->index))
1407 count_chkcall++;
1408 else
1409 gcc_unreachable ();
1410 }
1411 }
1412 else if (bb_eh_cleanup)
1413 {
1414 if (bitmap_set_bit (chkcall_blocks, bb_eh_cleanup->index))
1415 count_chkcall++;
1416 else
1417 gcc_unreachable ();
1418 }
1419
1420 gcc_checking_assert (!bb_eh_cleanup
1421 || bitmap_bit_p (chkcall_blocks, bb_eh_cleanup->index));
1422
1423 /* If we don't have edges to exit nor noreturn calls (including the
1424 cleanup reraise), then we may skip instrumentation: that would
1425 amount to a function that ends with an infinite loop. */
1426 if (!count_chkcall
1427 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1428 {
1429 if (dump_file)
1430 fprintf (dump_file,
1431 "Disabling CFR, no exit paths to check\n");
1432
1433 return 0;
1434 }
1435
1436 /* Search for must-tail calls, early-marked potential tail calls,
1437 and, if requested, returning calls. As we introduce early
1438 checks, */
1439 int count_postchk = 0;
1440 auto_sbitmap postchk_blocks (last_basic_block_for_fn (fun));
1441 bitmap_clear (postchk_blocks);
1442 chk_edges_t chk_edges;
1443 hardcfr_sibcall_search_preds (EXIT_BLOCK_PTR_FOR_FN (fun), chk_edges,
1444 count_chkcall, chkcall_blocks,
1445 count_postchk, postchk_blocks,
1446 NULL);
1447
1448 rt_bb_visited vstd (chk_edges.length () + count_chkcall);
1449
1450 auto_sbitmap combined_blocks (last_basic_block_for_fn (fun));
1451 bitmap_copy (combined_blocks, chkcall_blocks);
1452 int i;
1453 edge *e;
1454 FOR_EACH_VEC_ELT (chk_edges, i, e)
1455 if (!bitmap_set_bit (combined_blocks, (*e)->src->index))
1456 /* There may be multiple chk_edges with the same src block;
1457 guard againt overlaps with chkcall_blocks only. */
1458 gcc_assert (!bitmap_bit_p (chkcall_blocks, (*e)->src->index));
1459
1460 /* Visit blocks in index order, because building rtcfg depends on
1461 that. Blocks must be compact, which the cleanup_cfg requirement
1462 ensures. This would also enable FOR_EACH_BB_FN to be used to
1463 iterate in index order, but bb_eh_cleanup block splits and
1464 insertions changes that. */
1465 gcc_checking_assert (n_basic_blocks_for_fn (fun)
1466 == last_basic_block_for_fn (fun));
1467 for (int i = NUM_FIXED_BLOCKS; i < n_basic_blocks_for_fn (fun); i++)
1468 {
1469 bb = BASIC_BLOCK_FOR_FN (fun, i);
1470 gcc_checking_assert (bb->index == i);
1471 vstd.visit (bb, bitmap_bit_p (combined_blocks, i),
1472 bitmap_bit_p (postchk_blocks, i));
1473 }
1474
1475 vstd.check (chk_edges, count_chkcall, chkcall_blocks);
1476
1477 return
1478 TODO_update_ssa
1479 | TODO_cleanup_cfg
1480 | TODO_verify_il;
1481 }
1482
1483 /* Instantiate a hardcfr pass. */
1484
1485 gimple_opt_pass *
1486 make_pass_harden_control_flow_redundancy (gcc::context *ctxt)
1487 {
1488 return new pass_harden_control_flow_redundancy (ctxt);
1489 }