]> git.ipfire.org Git - people/ms/gcc.git/blob - gcc/gimple-harden-conditionals.cc
c++: namespace-scoped friend in local class [PR69410]
[people/ms/gcc.git] / gcc / gimple-harden-conditionals.cc
1 /* Harden conditionals.
2 Copyright (C) 2021-2023 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <oliva@adacore.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "gimple.h"
30 #include "gimplify.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "basic-block.h"
36 #include "cfghooks.h"
37 #include "cfgloop.h"
38 #include "tree-eh.h"
39 #include "sbitmap.h"
40 #include "diagnostic.h"
41 #include "intl.h"
42
43 namespace {
44
45 /* These passes introduces redundant, but reversed conditionals at
46 compares, such as those used in conditional branches, and those
47 that compute boolean results. This doesn't make much sense for
48 abstract CPUs, but this kind of hardening may avoid undesirable
49 execution paths on actual CPUs under such attacks as of power
50 deprivation. */
51
52 /* Define a pass to harden conditionals other than branches. */
53
54 const pass_data pass_data_harden_compares = {
55 GIMPLE_PASS,
56 "hardcmp",
57 OPTGROUP_NONE,
58 TV_NONE,
59 PROP_cfg | PROP_ssa, // properties_required
60 0, // properties_provided
61 0, // properties_destroyed
62 0, // properties_start
63 TODO_update_ssa
64 | TODO_cleanup_cfg
65 | TODO_verify_il, // properties_finish
66 };
67
68 class pass_harden_compares : public gimple_opt_pass
69 {
70 public:
71 pass_harden_compares (gcc::context *ctxt)
72 : gimple_opt_pass (pass_data_harden_compares, ctxt)
73 {}
74 opt_pass *clone () final override
75 {
76 return new pass_harden_compares (m_ctxt);
77 }
78 bool gate (function *) final override
79 {
80 return flag_harden_compares;
81 }
82 unsigned int execute (function *) final override;
83 };
84
85 /* Define a pass to harden conditionals in branches. This pass must
86 run after the above, otherwise it will re-harden the checks
87 introduced by the above. */
88
89 const pass_data pass_data_harden_conditional_branches = {
90 GIMPLE_PASS,
91 "hardcbr",
92 OPTGROUP_NONE,
93 TV_NONE,
94 PROP_cfg | PROP_ssa, // properties_required
95 0, // properties_provided
96 0, // properties_destroyed
97 0, // properties_start
98 TODO_update_ssa
99 | TODO_cleanup_cfg
100 | TODO_verify_il, // properties_finish
101 };
102
103 class pass_harden_conditional_branches : public gimple_opt_pass
104 {
105 public:
106 pass_harden_conditional_branches (gcc::context *ctxt)
107 : gimple_opt_pass (pass_data_harden_conditional_branches, ctxt)
108 {}
109 opt_pass *clone () final override
110 {
111 return new pass_harden_conditional_branches (m_ctxt);
112 }
113 bool gate (function *) final override
114 {
115 return flag_harden_conditional_branches;
116 }
117 unsigned int execute (function *) final override;
118 };
119
120 }
121
122 /* If VAL is an SSA name, return an SSA name holding the same value,
123 but without the compiler's knowing that it holds the same value, so
124 that uses thereof can't be optimized the way VAL might. Insert
125 stmts that initialize it before *GSIP, with LOC.
126
127 Otherwise, VAL must be an invariant, returned unchanged. */
128
129 static inline tree
130 detach_value (location_t loc, gimple_stmt_iterator *gsip, tree val)
131 {
132 if (TREE_CONSTANT (val) || TREE_CODE (val) != SSA_NAME)
133 {
134 gcc_checking_assert (is_gimple_min_invariant (val));
135 return val;
136 }
137
138 /* Create a SSA "copy" of VAL. It would be nice to have it named
139 after the corresponding variable, but sharing the same decl is
140 problematic when VAL is a DECL_BY_REFERENCE RESULT_DECL, and
141 copying just the identifier hits -fcompare-debug failures. */
142 tree ret = make_ssa_name (TREE_TYPE (val));
143
144 /* Some modes won't fit in general regs, so we fall back to memory
145 for them. ??? It would be ideal to try to identify an alternate,
146 wider or more suitable register class, and use the corresponding
147 constraint, but there's no logic to go from register class to
148 constraint, even if there is a corresponding constraint, and even
149 if we could enumerate constraints, we can't get to their string
150 either. So this will do for now. */
151 bool need_memory = true;
152 enum machine_mode mode = TYPE_MODE (TREE_TYPE (val));
153 if (mode != BLKmode)
154 for (int i = 0; i < FIRST_PSEUDO_REGISTER; i++)
155 if (TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], i)
156 && targetm.hard_regno_mode_ok (i, mode))
157 {
158 need_memory = false;
159 break;
160 }
161
162 tree asminput = val;
163 tree asmoutput = ret;
164 const char *constraint_out = need_memory ? "=m" : "=g";
165 const char *constraint_in = need_memory ? "m" : "0";
166
167 if (need_memory)
168 {
169 tree temp = create_tmp_var (TREE_TYPE (val), "dtch");
170 mark_addressable (temp);
171
172 gassign *copyin = gimple_build_assign (temp, asminput);
173 gimple_set_location (copyin, loc);
174 gsi_insert_before (gsip, copyin, GSI_SAME_STMT);
175
176 asminput = asmoutput = temp;
177 }
178
179 /* Output an asm statement with matching input and output. It does
180 nothing, but after it the compiler no longer knows the output
181 still holds the same value as the input. */
182 vec<tree, va_gc> *inputs = NULL;
183 vec<tree, va_gc> *outputs = NULL;
184 vec_safe_push (outputs,
185 build_tree_list
186 (build_tree_list
187 (NULL_TREE, build_string (strlen (constraint_out),
188 constraint_out)),
189 asmoutput));
190 vec_safe_push (inputs,
191 build_tree_list
192 (build_tree_list
193 (NULL_TREE, build_string (strlen (constraint_in),
194 constraint_in)),
195 asminput));
196 gasm *detach = gimple_build_asm_vec ("", inputs, outputs,
197 NULL, NULL);
198 gimple_set_location (detach, loc);
199 gsi_insert_before (gsip, detach, GSI_SAME_STMT);
200
201 if (need_memory)
202 {
203 gassign *copyout = gimple_build_assign (ret, asmoutput);
204 gimple_set_location (copyout, loc);
205 gsi_insert_before (gsip, copyout, GSI_SAME_STMT);
206 SSA_NAME_DEF_STMT (ret) = copyout;
207
208 gassign *clobber = gimple_build_assign (asmoutput,
209 build_clobber
210 (TREE_TYPE (asmoutput)));
211 gimple_set_location (clobber, loc);
212 gsi_insert_before (gsip, clobber, GSI_SAME_STMT);
213 }
214 else
215 SSA_NAME_DEF_STMT (ret) = detach;
216
217 return ret;
218 }
219
220 /* Build a cond stmt out of COP, LHS, RHS, insert it before *GSIP with
221 location LOC. *GSIP must be at the end of a basic block. The succ
222 edge out of the block becomes the true or false edge opposite to
223 that in FLAGS. Create a new block with a single trap stmt, in the
224 cold partition if the function is partitioned,, and a new edge to
225 it as the other edge for the cond. */
226
227 static inline void
228 insert_check_and_trap (location_t loc, gimple_stmt_iterator *gsip,
229 int flags, enum tree_code cop, tree lhs, tree rhs)
230 {
231 basic_block chk = gsi_bb (*gsip);
232
233 gcond *cond = gimple_build_cond (cop, lhs, rhs, NULL, NULL);
234 gimple_set_location (cond, loc);
235 gsi_insert_before (gsip, cond, GSI_SAME_STMT);
236
237 basic_block trp = create_empty_bb (chk);
238
239 gimple_stmt_iterator gsit = gsi_after_labels (trp);
240 gcall *trap = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
241 gimple_call_set_ctrl_altering (trap, true);
242 gimple_set_location (trap, loc);
243 gsi_insert_before (&gsit, trap, GSI_SAME_STMT);
244
245 if (dump_file)
246 fprintf (dump_file,
247 "Adding reversed compare to block %i, and trap to block %i\n",
248 chk->index, trp->index);
249
250 if (BB_PARTITION (chk))
251 BB_SET_PARTITION (trp, BB_COLD_PARTITION);
252
253 int true_false_flag = flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
254 gcc_assert (true_false_flag);
255 int neg_true_false_flag = (~flags) & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
256
257 /* Remove the fallthru bit, and set the truth value for the
258 preexisting edge and for the newly-created one. In hardcbr,
259 FLAGS is taken from the edge of the original cond expr that we're
260 dealing with, so the reversed compare is expected to yield the
261 negated result, and the same result calls for a trap. In
262 hardcmp, we're comparing the boolean results of the original and
263 of the reversed compare, so we're passed FLAGS to trap on
264 equality. */
265 single_succ_edge (chk)->flags &= ~EDGE_FALLTHRU;
266 single_succ_edge (chk)->flags |= neg_true_false_flag;
267 single_succ_edge (chk)->probability = profile_probability::always ();
268 edge e = make_edge (chk, trp, true_false_flag);
269 e->goto_locus = loc;
270 e->probability = profile_probability::never ();
271
272 if (dom_info_available_p (CDI_DOMINATORS))
273 set_immediate_dominator (CDI_DOMINATORS, trp, chk);
274 if (current_loops)
275 add_bb_to_loop (trp, current_loops->tree_root);
276 }
277
278 /* Split edge E, and insert_check_and_trap (see above) in the
279 newly-created block, using detached copies of LHS's and RHS's
280 values (see detach_value above) for the COP compare. */
281
282 static inline void
283 insert_edge_check_and_trap (location_t loc, edge e,
284 enum tree_code cop, tree lhs, tree rhs)
285 {
286 int flags = e->flags;
287 basic_block src = e->src;
288 basic_block dest = e->dest;
289 location_t eloc = e->goto_locus;
290
291 basic_block chk = split_edge (e);
292 e = NULL;
293
294 single_pred_edge (chk)->goto_locus = loc;
295 single_succ_edge (chk)->goto_locus = eloc;
296
297 if (dump_file)
298 fprintf (dump_file,
299 "Splitting edge %i->%i into block %i\n",
300 src->index, dest->index, chk->index);
301
302 gimple_stmt_iterator gsik = gsi_after_labels (chk);
303
304 bool same_p = (lhs == rhs);
305 lhs = detach_value (loc, &gsik, lhs);
306 rhs = same_p ? lhs : detach_value (loc, &gsik, rhs);
307
308 insert_check_and_trap (loc, &gsik, flags, cop, lhs, rhs);
309 }
310
311 /* Harden cond stmts at the end of FUN's blocks. */
312
313 unsigned int
314 pass_harden_conditional_branches::execute (function *fun)
315 {
316 /* Record the preexisting blocks, to avoid visiting newly-created
317 blocks. */
318 auto_sbitmap to_visit (last_basic_block_for_fn (fun));
319 bitmap_clear (to_visit);
320
321 basic_block bb;
322 FOR_EACH_BB_FN (bb, fun)
323 bitmap_set_bit (to_visit, bb->index);
324
325 sbitmap_iterator it;
326 unsigned i;
327 EXECUTE_IF_SET_IN_BITMAP (to_visit, 0, i, it)
328 {
329 bb = BASIC_BLOCK_FOR_FN (fun, i);
330
331 gimple_stmt_iterator gsi = gsi_last_bb (bb);
332
333 if (gsi_end_p (gsi))
334 continue;
335
336 gcond *cond = dyn_cast <gcond *> (gsi_stmt (gsi));
337 if (!cond)
338 continue;
339
340 /* Turn:
341
342 if (x op y) goto l1; else goto l2;
343
344 into:
345
346 if (x op y) goto l1'; else goto l2';
347 l1': if (x' cop y') goto l1'trap; else goto l1;
348 l1'trap: __builtin_trap ();
349 l2': if (x' cop y') goto l2; else goto l2'trap;
350 l2'trap: __builtin_trap ();
351
352 where cop is a complementary boolean operation to op; l1', l1'trap,
353 l2' and l2'trap are newly-created labels; and x' and y' hold the same
354 value as x and y, but in a way that does not enable the compiler to
355 optimize the redundant compare away.
356 */
357
358 enum tree_code op = gimple_cond_code (cond);
359 tree lhs = gimple_cond_lhs (cond);
360 tree rhs = gimple_cond_rhs (cond);
361 location_t loc = gimple_location (cond);
362
363 enum tree_code cop = invert_tree_comparison (op, HONOR_NANS (lhs));
364
365 if (cop == ERROR_MARK)
366 /* ??? Can we do better? */
367 continue;
368
369 insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 0), cop, lhs, rhs);
370 insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 1), cop, lhs, rhs);
371 }
372
373 return 0;
374 }
375
376 /* Instantiate a hardcbr pass. */
377
378 gimple_opt_pass *
379 make_pass_harden_conditional_branches (gcc::context *ctxt)
380 {
381 return new pass_harden_conditional_branches (ctxt);
382 }
383
384 /* Return the fallthru edge of a block whose other edge is an EH
385 edge. If EHP is not NULL, store the EH edge in it. */
386 static inline edge
387 non_eh_succ_edge (basic_block bb, edge *ehp = NULL)
388 {
389 gcc_checking_assert (EDGE_COUNT (bb->succs) == 2);
390
391 edge ret = find_fallthru_edge (bb->succs);
392
393 int eh_idx = EDGE_SUCC (bb, 0) == ret;
394 edge eh = EDGE_SUCC (bb, eh_idx);
395
396 gcc_checking_assert (!(ret->flags & EDGE_EH)
397 && (eh->flags & EDGE_EH));
398
399 if (ehp)
400 *ehp = eh;
401
402 return ret;
403 }
404
405 /* Harden boolean-yielding compares in FUN. */
406
407 unsigned int
408 pass_harden_compares::execute (function *fun)
409 {
410 /* Record the preexisting blocks, to avoid visiting newly-created
411 blocks. */
412 auto_sbitmap to_visit (last_basic_block_for_fn (fun));
413 bitmap_clear (to_visit);
414
415 basic_block bb;
416 FOR_EACH_BB_FN (bb, fun)
417 bitmap_set_bit (to_visit, bb->index);
418
419 sbitmap_iterator it;
420 unsigned i;
421 EXECUTE_IF_SET_IN_BITMAP (to_visit, 0, i, it)
422 {
423 bb = BASIC_BLOCK_FOR_FN (fun, i);
424
425 for (gimple_stmt_iterator gsi = gsi_last_bb (bb);
426 !gsi_end_p (gsi); gsi_prev (&gsi))
427 {
428 gassign *asgn = dyn_cast <gassign *> (gsi_stmt (gsi));
429 if (!asgn)
430 continue;
431
432 /* Turn:
433
434 z = x op y;
435
436 into:
437
438 z = x op y;
439 z' = x' cop y';
440 if (z == z') __builtin_trap ();
441
442 where cop is a complementary boolean operation to op; and x'
443 and y' hold the same value as x and y, but in a way that does
444 not enable the compiler to optimize the redundant compare
445 away.
446 */
447
448 enum tree_code op = gimple_assign_rhs_code (asgn);
449
450 enum tree_code cop;
451
452 switch (op)
453 {
454 case EQ_EXPR:
455 case NE_EXPR:
456 case GT_EXPR:
457 case GE_EXPR:
458 case LT_EXPR:
459 case LE_EXPR:
460 case LTGT_EXPR:
461 case UNEQ_EXPR:
462 case UNGT_EXPR:
463 case UNGE_EXPR:
464 case UNLT_EXPR:
465 case UNLE_EXPR:
466 case ORDERED_EXPR:
467 case UNORDERED_EXPR:
468 cop = invert_tree_comparison (op,
469 HONOR_NANS
470 (gimple_assign_rhs1 (asgn)));
471
472 if (cop == ERROR_MARK)
473 /* ??? Can we do better? */
474 continue;
475
476 break;
477
478 /* ??? Maybe handle these too? */
479 case TRUTH_NOT_EXPR:
480 /* ??? The code below assumes binary ops, it would have to
481 be adjusted for TRUTH_NOT_EXPR, since it's unary. */
482 case TRUTH_ANDIF_EXPR:
483 case TRUTH_ORIF_EXPR:
484 case TRUTH_AND_EXPR:
485 case TRUTH_OR_EXPR:
486 case TRUTH_XOR_EXPR:
487 default:
488 continue;
489 }
490
491 /* These are the operands for the verification. */
492 tree lhs = gimple_assign_lhs (asgn);
493 tree op1 = gimple_assign_rhs1 (asgn);
494 tree op2 = gimple_assign_rhs2 (asgn);
495 location_t loc = gimple_location (asgn);
496
497 /* Vector booleans can't be used in conditional branches. ???
498 Can we do better? How to reduce compare and
499 reversed-compare result vectors to a single boolean? */
500 if (VECTOR_TYPE_P (TREE_TYPE (op1)))
501 continue;
502
503 /* useless_type_conversion_p enables conversions from 1-bit
504 integer types to boolean to be discarded. */
505 gcc_checking_assert (TREE_CODE (TREE_TYPE (lhs)) == BOOLEAN_TYPE
506 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
507 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1));
508
509 tree rhs = copy_ssa_name (lhs);
510
511 gimple_stmt_iterator gsi_split = gsi;
512 /* Don't separate the original assignment from debug stmts
513 that might be associated with it, and arrange to split the
514 block after debug stmts, so as to make sure the split block
515 won't be debug stmts only. */
516 gsi_next_nondebug (&gsi_split);
517
518 bool throwing_compare_p = stmt_ends_bb_p (asgn);
519 if (throwing_compare_p)
520 {
521 basic_block nbb = split_edge (non_eh_succ_edge
522 (gimple_bb (asgn)));
523 gsi_split = gsi_start_bb (nbb);
524
525 if (dump_file)
526 fprintf (dump_file,
527 "Splitting non-EH edge from block %i into %i"
528 " after a throwing compare\n",
529 gimple_bb (asgn)->index, nbb->index);
530 }
531
532 bool same_p = (op1 == op2);
533 op1 = detach_value (loc, &gsi_split, op1);
534 op2 = same_p ? op1 : detach_value (loc, &gsi_split, op2);
535
536 gassign *asgnck = gimple_build_assign (rhs, cop, op1, op2);
537 gimple_set_location (asgnck, loc);
538 gsi_insert_before (&gsi_split, asgnck, GSI_SAME_STMT);
539
540 /* We wish to insert a cond_expr after the compare, so arrange
541 for it to be at the end of a block if it isn't, and for it
542 to have a single successor in case there's more than
543 one, as in PR104975. */
544 if (!gsi_end_p (gsi_split)
545 || !single_succ_p (gsi_bb (gsi_split)))
546 {
547 if (!gsi_end_p (gsi_split))
548 gsi_prev (&gsi_split);
549 else
550 gsi_split = gsi_last_bb (gsi_bb (gsi_split));
551 basic_block obb = gsi_bb (gsi_split);
552 basic_block nbb = split_block (obb, gsi_stmt (gsi_split))->dest;
553 gsi_next (&gsi_split);
554 gcc_checking_assert (gsi_end_p (gsi_split));
555
556 single_succ_edge (bb)->goto_locus = loc;
557
558 if (dump_file)
559 fprintf (dump_file,
560 "Splitting block %i into %i"
561 " before the conditional trap branch\n",
562 obb->index, nbb->index);
563 }
564
565 /* If the check assignment must end a basic block, we can't
566 insert the conditional branch in the same block, so split
567 the block again, and prepare to insert the conditional
568 branch in the new block.
569
570 Also assign an EH region to the compare. Even though it's
571 unlikely that the hardening compare will throw after the
572 original compare didn't, the compiler won't even know that
573 it's the same compare operands, so add the EH edge anyway. */
574 if (throwing_compare_p)
575 {
576 add_stmt_to_eh_lp (asgnck, lookup_stmt_eh_lp (asgn));
577 make_eh_edges (asgnck);
578
579 edge ckeh;
580 basic_block nbb = split_edge (non_eh_succ_edge
581 (gimple_bb (asgnck), &ckeh));
582 gsi_split = gsi_start_bb (nbb);
583
584 if (dump_file)
585 fprintf (dump_file,
586 "Splitting non-EH edge from block %i into %i after"
587 " the newly-inserted reversed throwing compare\n",
588 gimple_bb (asgnck)->index, nbb->index);
589
590 if (!gimple_seq_empty_p (phi_nodes (ckeh->dest)))
591 {
592 edge aseh;
593 non_eh_succ_edge (gimple_bb (asgn), &aseh);
594
595 gcc_checking_assert (aseh->dest == ckeh->dest);
596
597 for (gphi_iterator psi = gsi_start_phis (ckeh->dest);
598 !gsi_end_p (psi); gsi_next (&psi))
599 {
600 gphi *phi = psi.phi ();
601 add_phi_arg (phi, PHI_ARG_DEF_FROM_EDGE (phi, aseh), ckeh,
602 gimple_phi_arg_location_from_edge (phi, aseh));
603 }
604
605 if (dump_file)
606 fprintf (dump_file,
607 "Copying PHI args in EH block %i from %i to %i\n",
608 aseh->dest->index, aseh->src->index,
609 ckeh->src->index);
610 }
611 }
612
613 gcc_checking_assert (single_succ_p (gsi_bb (gsi_split)));
614
615 insert_check_and_trap (loc, &gsi_split, EDGE_TRUE_VALUE,
616 EQ_EXPR, lhs, rhs);
617 }
618 }
619
620 return 0;
621 }
622
623 /* Instantiate a hardcmp pass. */
624
625 gimple_opt_pass *
626 make_pass_harden_compares (gcc::context *ctxt)
627 {
628 return new pass_harden_compares (ctxt);
629 }