]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/sanopt.c
Fix missed IPA-CP on by-ref argument directly passed through (PR 93429)
[thirdparty/gcc.git] / gcc / sanopt.c
CommitLineData
06cefae9 1/* Optimize and expand sanitizer functions.
8d9254fc 2 Copyright (C) 2014-2020 Free Software Foundation, Inc.
06cefae9
MP
3 Contributed by Marek Polacek <polacek@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
06cefae9 25#include "tree.h"
c7131fb2 26#include "gimple.h"
2f75d6eb 27#include "ssa.h"
957060b5 28#include "tree-pass.h"
957060b5
AM
29#include "tree-ssa-operands.h"
30#include "gimple-pretty-print.h"
c7131fb2 31#include "fold-const.h"
06cefae9 32#include "gimple-iterator.h"
314e6352
ML
33#include "stringpool.h"
34#include "attribs.h"
06cefae9 35#include "asan.h"
06cefae9 36#include "ubsan.h"
d6a818c5 37#include "tree-hash-traits.h"
94087e88
JJ
38#include "gimple-ssa.h"
39#include "tree-phinodes.h"
40#include "ssa-iterators.h"
da402967
ML
41#include "gimplify.h"
42#include "gimple-iterator.h"
43#include "gimple-walk.h"
44#include "cfghooks.h"
45#include "tree-dfa.h"
46#include "tree-ssa.h"
513d5564 47#include "varasm.h"
06cefae9 48
06cefae9
MP
49/* This is used to carry information about basic blocks. It is
50 attached to the AUX field of the standard CFG block. */
51
52struct sanopt_info
53{
ab9a4330
JJ
54 /* True if this BB might call (directly or indirectly) free/munmap
55 or similar operation. */
56 bool has_freeing_call_p;
57
58 /* True if HAS_FREEING_CALL_P flag has been computed. */
59 bool has_freeing_call_computed_p;
60
61 /* True if there is a block with HAS_FREEING_CALL_P flag set
62 on any path between an immediate dominator of BB, denoted
63 imm(BB), and BB. */
64 bool imm_dom_path_with_freeing_call_p;
65
66 /* True if IMM_DOM_PATH_WITH_FREEING_CALL_P has been computed. */
67 bool imm_dom_path_with_freeing_call_computed_p;
68
69 /* Number of possibly freeing calls encountered in this bb
70 (so far). */
71 uint64_t freeing_call_events;
72
73 /* True if BB is currently being visited during computation
74 of IMM_DOM_PATH_WITH_FREEING_CALL_P flag. */
75 bool being_visited_p;
76
77 /* True if this BB has been visited in the dominator walk. */
06cefae9
MP
78 bool visited_p;
79};
80
e28f2090
YG
81/* If T has a single definition of form T = T2, return T2. */
82
83static tree
84maybe_get_single_definition (tree t)
85{
86 if (TREE_CODE (t) == SSA_NAME)
87 {
355fe088 88 gimple *g = SSA_NAME_DEF_STMT (t);
e28f2090
YG
89 if (gimple_assign_single_p (g))
90 return gimple_assign_rhs1 (g);
91 }
92 return NULL_TREE;
93}
94
35228ac7
JJ
95/* Tree triplet for vptr_check_map. */
96struct sanopt_tree_triplet
97{
98 tree t1, t2, t3;
99};
100
101/* Traits class for tree triplet hash maps below. */
102
9654754b 103struct sanopt_tree_triplet_hash : typed_noop_remove <sanopt_tree_triplet>
35228ac7 104{
9654754b
RS
105 typedef sanopt_tree_triplet value_type;
106 typedef sanopt_tree_triplet compare_type;
107
513d5564 108 static hashval_t
35228ac7
JJ
109 hash (const sanopt_tree_triplet &ref)
110 {
111 inchash::hash hstate (0);
112 inchash::add_expr (ref.t1, hstate);
113 inchash::add_expr (ref.t2, hstate);
114 inchash::add_expr (ref.t3, hstate);
115 return hstate.end ();
116 }
117
513d5564 118 static bool
9654754b 119 equal (const sanopt_tree_triplet &ref1, const sanopt_tree_triplet &ref2)
35228ac7
JJ
120 {
121 return operand_equal_p (ref1.t1, ref2.t1, 0)
122 && operand_equal_p (ref1.t2, ref2.t2, 0)
123 && operand_equal_p (ref1.t3, ref2.t3, 0);
124 }
125
513d5564 126 static void
9654754b 127 mark_deleted (sanopt_tree_triplet &ref)
35228ac7 128 {
9654754b 129 ref.t1 = reinterpret_cast<tree> (1);
35228ac7
JJ
130 }
131
7ca50de0
DM
132 static const bool empty_zero_p = true;
133
513d5564 134 static void
9654754b 135 mark_empty (sanopt_tree_triplet &ref)
35228ac7 136 {
9654754b 137 ref.t1 = NULL;
35228ac7
JJ
138 }
139
513d5564 140 static bool
9654754b 141 is_deleted (const sanopt_tree_triplet &ref)
35228ac7 142 {
513d5564 143 return ref.t1 == reinterpret_cast<tree> (1);
35228ac7
JJ
144 }
145
513d5564 146 static bool
9654754b 147 is_empty (const sanopt_tree_triplet &ref)
35228ac7 148 {
9654754b 149 return ref.t1 == NULL;
35228ac7
JJ
150 }
151};
152
513d5564
ML
153/* Tree couple for ptr_check_map. */
154struct sanopt_tree_couple
155{
156 tree ptr;
157 bool pos_p;
158};
159
160/* Traits class for tree triplet hash maps below. */
161
162struct sanopt_tree_couple_hash : typed_noop_remove <sanopt_tree_couple>
163{
164 typedef sanopt_tree_couple value_type;
165 typedef sanopt_tree_couple compare_type;
166
167 static hashval_t
168 hash (const sanopt_tree_couple &ref)
169 {
170 inchash::hash hstate (0);
171 inchash::add_expr (ref.ptr, hstate);
172 hstate.add_int (ref.pos_p);
173 return hstate.end ();
174 }
175
176 static bool
177 equal (const sanopt_tree_couple &ref1, const sanopt_tree_couple &ref2)
178 {
179 return operand_equal_p (ref1.ptr, ref2.ptr, 0)
180 && ref1.pos_p == ref2.pos_p;
181 }
182
183 static void
184 mark_deleted (sanopt_tree_couple &ref)
185 {
186 ref.ptr = reinterpret_cast<tree> (1);
187 }
188
7ca50de0
DM
189 static const bool empty_zero_p = true;
190
513d5564
ML
191 static void
192 mark_empty (sanopt_tree_couple &ref)
193 {
194 ref.ptr = NULL;
195 }
196
197 static bool
198 is_deleted (const sanopt_tree_couple &ref)
199 {
200 return ref.ptr == reinterpret_cast<tree> (1);
201 }
202
203 static bool
204 is_empty (const sanopt_tree_couple &ref)
205 {
206 return ref.ptr == NULL;
207 }
208};
209
06cefae9
MP
210/* This is used to carry various hash maps and variables used
211 in sanopt_optimize_walker. */
212
6c1dae73 213class sanopt_ctx
06cefae9 214{
6c1dae73 215public:
06cefae9
MP
216 /* This map maps a pointer (the first argument of UBSAN_NULL) to
217 a vector of UBSAN_NULL call statements that check this pointer. */
355fe088 218 hash_map<tree, auto_vec<gimple *> > null_check_map;
06cefae9 219
ab9a4330
JJ
220 /* This map maps a pointer (the second argument of ASAN_CHECK) to
221 a vector of ASAN_CHECK call statements that check the access. */
355fe088 222 hash_map<tree_operand_hash, auto_vec<gimple *> > asan_check_map;
35228ac7
JJ
223
224 /* This map maps a tree triplet (the first, second and fourth argument
225 of UBSAN_VPTR) to a vector of UBSAN_VPTR call statements that check
226 that virtual table pointer. */
355fe088 227 hash_map<sanopt_tree_triplet_hash, auto_vec<gimple *> > vptr_check_map;
ab9a4330 228
513d5564
ML
229 /* This map maps a couple (tree and boolean) to a vector of UBSAN_PTR
230 call statements that check that pointer overflow. */
231 hash_map<sanopt_tree_couple_hash, auto_vec<gimple *> > ptr_check_map;
232
06cefae9
MP
233 /* Number of IFN_ASAN_CHECK statements. */
234 int asan_num_accesses;
06cefae9 235
2f75d6eb
ML
236 /* True when the current functions constains an ASAN_MARK. */
237 bool contains_asan_mark;
238};
06cefae9 239
ab9a4330
JJ
240/* Return true if there might be any call to free/munmap operation
241 on any path in between DOM (which should be imm(BB)) and BB. */
242
243static bool
244imm_dom_path_with_freeing_call (basic_block bb, basic_block dom)
245{
246 sanopt_info *info = (sanopt_info *) bb->aux;
247 edge e;
248 edge_iterator ei;
249
250 if (info->imm_dom_path_with_freeing_call_computed_p)
251 return info->imm_dom_path_with_freeing_call_p;
252
253 info->being_visited_p = true;
254
255 FOR_EACH_EDGE (e, ei, bb->preds)
256 {
257 sanopt_info *pred_info = (sanopt_info *) e->src->aux;
258
259 if (e->src == dom)
260 continue;
261
262 if ((pred_info->imm_dom_path_with_freeing_call_computed_p
263 && pred_info->imm_dom_path_with_freeing_call_p)
264 || (pred_info->has_freeing_call_computed_p
265 && pred_info->has_freeing_call_p))
266 {
267 info->imm_dom_path_with_freeing_call_computed_p = true;
268 info->imm_dom_path_with_freeing_call_p = true;
269 info->being_visited_p = false;
270 return true;
271 }
272 }
273
274 FOR_EACH_EDGE (e, ei, bb->preds)
275 {
276 sanopt_info *pred_info = (sanopt_info *) e->src->aux;
277
278 if (e->src == dom)
279 continue;
280
281 if (pred_info->has_freeing_call_computed_p)
282 continue;
283
284 gimple_stmt_iterator gsi;
285 for (gsi = gsi_start_bb (e->src); !gsi_end_p (gsi); gsi_next (&gsi))
286 {
355fe088 287 gimple *stmt = gsi_stmt (gsi);
8e104951 288 gasm *asm_stmt;
ab9a4330 289
8e104951
ML
290 if ((is_gimple_call (stmt) && !nonfreeing_call_p (stmt))
291 || ((asm_stmt = dyn_cast <gasm *> (stmt))
292 && (gimple_asm_clobbers_memory_p (asm_stmt)
293 || gimple_asm_volatile_p (asm_stmt))))
ab9a4330
JJ
294 {
295 pred_info->has_freeing_call_p = true;
296 break;
297 }
298 }
299
300 pred_info->has_freeing_call_computed_p = true;
301 if (pred_info->has_freeing_call_p)
302 {
303 info->imm_dom_path_with_freeing_call_computed_p = true;
304 info->imm_dom_path_with_freeing_call_p = true;
305 info->being_visited_p = false;
306 return true;
307 }
308 }
309
310 FOR_EACH_EDGE (e, ei, bb->preds)
311 {
312 if (e->src == dom)
313 continue;
314
315 basic_block src;
316 for (src = e->src; src != dom; )
317 {
318 sanopt_info *pred_info = (sanopt_info *) src->aux;
319 if (pred_info->being_visited_p)
320 break;
321 basic_block imm = get_immediate_dominator (CDI_DOMINATORS, src);
322 if (imm_dom_path_with_freeing_call (src, imm))
323 {
324 info->imm_dom_path_with_freeing_call_computed_p = true;
325 info->imm_dom_path_with_freeing_call_p = true;
326 info->being_visited_p = false;
327 return true;
328 }
329 src = imm;
330 }
331 }
332
333 info->imm_dom_path_with_freeing_call_computed_p = true;
334 info->imm_dom_path_with_freeing_call_p = false;
335 info->being_visited_p = false;
336 return false;
337}
338
e28f2090
YG
339/* Get the first dominating check from the list of stored checks.
340 Non-dominating checks are silently dropped. */
341
355fe088
TS
342static gimple *
343maybe_get_dominating_check (auto_vec<gimple *> &v)
e28f2090
YG
344{
345 for (; !v.is_empty (); v.pop ())
346 {
355fe088 347 gimple *g = v.last ();
e28f2090
YG
348 sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux;
349 if (!si->visited_p)
350 /* At this point we shouldn't have any statements
351 that aren't dominating the current BB. */
352 return g;
353 }
354 return NULL;
355}
356
ab9a4330
JJ
357/* Optimize away redundant UBSAN_NULL calls. */
358
359static bool
99b1c316 360maybe_optimize_ubsan_null_ifn (class sanopt_ctx *ctx, gimple *stmt)
ab9a4330
JJ
361{
362 gcc_assert (gimple_call_num_args (stmt) == 3);
363 tree ptr = gimple_call_arg (stmt, 0);
364 tree cur_align = gimple_call_arg (stmt, 2);
365 gcc_assert (TREE_CODE (cur_align) == INTEGER_CST);
366 bool remove = false;
367
355fe088
TS
368 auto_vec<gimple *> &v = ctx->null_check_map.get_or_insert (ptr);
369 gimple *g = maybe_get_dominating_check (v);
e28f2090 370 if (!g)
ab9a4330
JJ
371 {
372 /* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's
373 nothing to optimize yet. */
374 v.safe_push (stmt);
375 return false;
376 }
377
378 /* We already have recorded a UBSAN_NULL check for this pointer. Perhaps we
379 can drop this one. But only if this check doesn't specify stricter
380 alignment. */
ab9a4330 381
e28f2090
YG
382 tree align = gimple_call_arg (g, 2);
383 int kind = tree_to_shwi (gimple_call_arg (g, 1));
384 /* If this is a NULL pointer check where we had segv anyway, we can
385 remove it. */
386 if (integer_zerop (align)
387 && (kind == UBSAN_LOAD_OF
388 || kind == UBSAN_STORE_OF
389 || kind == UBSAN_MEMBER_ACCESS))
390 remove = true;
391 /* Otherwise remove the check in non-recovering mode, or if the
392 stmts have same location. */
393 else if (integer_zerop (align))
394 remove = (flag_sanitize_recover & SANITIZE_NULL) == 0
395 || flag_sanitize_undefined_trap_on_error
396 || gimple_location (g) == gimple_location (stmt);
397 else if (tree_int_cst_le (cur_align, align))
398 remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0
399 || flag_sanitize_undefined_trap_on_error
400 || gimple_location (g) == gimple_location (stmt);
401
402 if (!remove && gimple_bb (g) == gimple_bb (stmt)
403 && tree_int_cst_compare (cur_align, align) == 0)
404 v.pop ();
ab9a4330
JJ
405
406 if (!remove)
407 v.safe_push (stmt);
408 return remove;
409}
410
513d5564
ML
411/* Return true when pointer PTR for a given CUR_OFFSET is already sanitized
412 in a given sanitization context CTX. */
413
414static bool
415has_dominating_ubsan_ptr_check (sanopt_ctx *ctx, tree ptr,
416 offset_int &cur_offset)
417{
418 bool pos_p = !wi::neg_p (cur_offset);
419 sanopt_tree_couple couple;
420 couple.ptr = ptr;
421 couple.pos_p = pos_p;
422
423 auto_vec<gimple *> &v = ctx->ptr_check_map.get_or_insert (couple);
424 gimple *g = maybe_get_dominating_check (v);
425 if (!g)
426 return false;
427
428 /* We already have recorded a UBSAN_PTR check for this pointer. Perhaps we
429 can drop this one. But only if this check doesn't specify larger offset.
430 */
431 tree offset = gimple_call_arg (g, 1);
432 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
433 offset_int ooffset = wi::sext (wi::to_offset (offset), POINTER_SIZE);
434
435 if (pos_p)
436 {
437 if (wi::les_p (cur_offset, ooffset))
438 return true;
439 }
440 else if (!pos_p && wi::les_p (ooffset, cur_offset))
441 return true;
442
443 return false;
444}
445
446/* Record UBSAN_PTR check of given context CTX. Register pointer PTR on
447 a given OFFSET that it's handled by GIMPLE STMT. */
448
449static void
450record_ubsan_ptr_check_stmt (sanopt_ctx *ctx, gimple *stmt, tree ptr,
451 const offset_int &offset)
452{
453 sanopt_tree_couple couple;
454 couple.ptr = ptr;
455 couple.pos_p = !wi::neg_p (offset);
456
457 auto_vec<gimple *> &v = ctx->ptr_check_map.get_or_insert (couple);
458 v.safe_push (stmt);
459}
460
461/* Optimize away redundant UBSAN_PTR calls. */
462
463static bool
464maybe_optimize_ubsan_ptr_ifn (sanopt_ctx *ctx, gimple *stmt)
465{
f37fac2b 466 poly_int64 bitsize, pbitpos;
513d5564
ML
467 machine_mode mode;
468 int volatilep = 0, reversep, unsignedp = 0;
469 tree offset;
470
471 gcc_assert (gimple_call_num_args (stmt) == 2);
472 tree ptr = gimple_call_arg (stmt, 0);
473 tree off = gimple_call_arg (stmt, 1);
474
475 if (TREE_CODE (off) != INTEGER_CST)
476 return false;
477
478 if (integer_zerop (off))
479 return true;
480
481 offset_int cur_offset = wi::sext (wi::to_offset (off), POINTER_SIZE);
482 if (has_dominating_ubsan_ptr_check (ctx, ptr, cur_offset))
483 return true;
484
485 tree base = ptr;
486 if (TREE_CODE (base) == ADDR_EXPR)
487 {
488 base = TREE_OPERAND (base, 0);
489
f37fac2b
RS
490 HOST_WIDE_INT bitpos;
491 base = get_inner_reference (base, &bitsize, &pbitpos, &offset, &mode,
513d5564 492 &unsignedp, &reversep, &volatilep);
be9dd15c 493 if ((offset == NULL_TREE || TREE_CODE (offset) == INTEGER_CST)
f37fac2b 494 && DECL_P (base)
ee3a999f 495 && !DECL_REGISTER (base)
f37fac2b 496 && pbitpos.is_constant (&bitpos))
513d5564 497 {
be9dd15c
JJ
498 offset_int expr_offset;
499 if (offset)
500 expr_offset = wi::to_offset (offset) + bitpos / BITS_PER_UNIT;
501 else
502 expr_offset = bitpos / BITS_PER_UNIT;
503 expr_offset = wi::sext (expr_offset, POINTER_SIZE);
513d5564
ML
504 offset_int total_offset = expr_offset + cur_offset;
505 if (total_offset != wi::sext (total_offset, POINTER_SIZE))
506 {
507 record_ubsan_ptr_check_stmt (ctx, stmt, ptr, cur_offset);
508 return false;
509 }
510
511 /* If BASE is a fixed size automatic variable or
512 global variable defined in the current TU, we don't have
513 to instrument anything if offset is within address
514 of the variable. */
515 if ((VAR_P (base)
516 || TREE_CODE (base) == PARM_DECL
517 || TREE_CODE (base) == RESULT_DECL)
518 && DECL_SIZE_UNIT (base)
519 && TREE_CODE (DECL_SIZE_UNIT (base)) == INTEGER_CST
520 && (!is_global_var (base) || decl_binds_to_current_def_p (base)))
521 {
522 offset_int base_size = wi::to_offset (DECL_SIZE_UNIT (base));
be9dd15c 523 if (!wi::neg_p (expr_offset)
513d5564
ML
524 && wi::les_p (total_offset, base_size))
525 {
526 if (!wi::neg_p (total_offset)
527 && wi::les_p (total_offset, base_size))
528 return true;
529 }
530 }
531
532 /* Following expression: UBSAN_PTR (&MEM_REF[ptr + x], y) can be
533 handled as follows:
534
535 1) sign (x) == sign (y), then check for dominating check of (x + y)
536 2) sign (x) != sign (y), then first check if we have a dominating
537 check for ptr + x. If so, then we have 2 situations:
538 a) sign (x) == sign (x + y), here we are done, example:
539 UBSAN_PTR (&MEM_REF[ptr + 100], -50)
540 b) check for dominating check of ptr + x + y.
541 */
542
543 bool sign_cur_offset = !wi::neg_p (cur_offset);
be9dd15c 544 bool sign_expr_offset = !wi::neg_p (expr_offset);
513d5564
ML
545
546 tree base_addr
547 = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (base)), base);
548
549 bool add = false;
550 if (sign_cur_offset == sign_expr_offset)
551 {
552 if (has_dominating_ubsan_ptr_check (ctx, base_addr, total_offset))
553 return true;
554 else
555 add = true;
556 }
557 else
558 {
559 if (!has_dominating_ubsan_ptr_check (ctx, base_addr, expr_offset))
560 ; /* Don't record base_addr + expr_offset, it's not a guarding
561 check. */
562 else
563 {
564 bool sign_total_offset = !wi::neg_p (total_offset);
565 if (sign_expr_offset == sign_total_offset)
566 return true;
567 else
568 {
569 if (has_dominating_ubsan_ptr_check (ctx, base_addr,
570 total_offset))
571 return true;
572 else
573 add = true;
574 }
575 }
576 }
577
578 /* Record a new dominating check for base_addr + total_offset. */
579 if (add && !operand_equal_p (base, base_addr, 0))
580 record_ubsan_ptr_check_stmt (ctx, stmt, base_addr,
581 total_offset);
582 }
583 }
584
585 /* For this PTR we don't have any UBSAN_PTR stmts recorded, so there's
586 nothing to optimize yet. */
587 record_ubsan_ptr_check_stmt (ctx, stmt, ptr, cur_offset);
588
589 return false;
590}
591
35228ac7
JJ
592/* Optimize away redundant UBSAN_VPTR calls. The second argument
593 is the value loaded from the virtual table, so rely on FRE to find out
594 when we can actually optimize. */
595
596static bool
99b1c316 597maybe_optimize_ubsan_vptr_ifn (class sanopt_ctx *ctx, gimple *stmt)
35228ac7
JJ
598{
599 gcc_assert (gimple_call_num_args (stmt) == 5);
600 sanopt_tree_triplet triplet;
601 triplet.t1 = gimple_call_arg (stmt, 0);
602 triplet.t2 = gimple_call_arg (stmt, 1);
603 triplet.t3 = gimple_call_arg (stmt, 3);
604
355fe088
TS
605 auto_vec<gimple *> &v = ctx->vptr_check_map.get_or_insert (triplet);
606 gimple *g = maybe_get_dominating_check (v);
35228ac7
JJ
607 if (!g)
608 {
609 /* For this PTR we don't have any UBSAN_VPTR stmts recorded, so there's
610 nothing to optimize yet. */
611 v.safe_push (stmt);
612 return false;
613 }
614
615 return true;
616}
617
e28f2090
YG
618/* Returns TRUE if ASan check of length LEN in block BB can be removed
619 if preceded by checks in V. */
ab9a4330
JJ
620
621static bool
355fe088 622can_remove_asan_check (auto_vec<gimple *> &v, tree len, basic_block bb)
ab9a4330 623{
ab9a4330 624 unsigned int i;
355fe088
TS
625 gimple *g;
626 gimple *to_pop = NULL;
ab9a4330
JJ
627 bool remove = false;
628 basic_block last_bb = bb;
629 bool cleanup = false;
630
631 FOR_EACH_VEC_ELT_REVERSE (v, i, g)
632 {
633 basic_block gbb = gimple_bb (g);
634 sanopt_info *si = (sanopt_info *) gbb->aux;
635 if (gimple_uid (g) < si->freeing_call_events)
636 {
637 /* If there is a potentially freeing call after g in gbb, we should
638 remove it from the vector, can't use in optimization. */
639 cleanup = true;
640 continue;
641 }
642
ab9a4330 643 tree glen = gimple_call_arg (g, 2);
e28f2090
YG
644 gcc_assert (TREE_CODE (glen) == INTEGER_CST);
645
ab9a4330
JJ
646 /* If we've checked only smaller length than we want to check now,
647 we can't remove the current stmt. If g is in the same basic block,
648 we want to remove it though, as the current stmt is better. */
649 if (tree_int_cst_lt (glen, len))
650 {
651 if (gbb == bb)
652 {
653 to_pop = g;
654 cleanup = true;
655 }
656 continue;
657 }
658
659 while (last_bb != gbb)
660 {
661 /* Paths from last_bb to bb have been checked before.
662 gbb is necessarily a dominator of last_bb, but not necessarily
663 immediate dominator. */
664 if (((sanopt_info *) last_bb->aux)->freeing_call_events)
665 break;
666
667 basic_block imm = get_immediate_dominator (CDI_DOMINATORS, last_bb);
668 gcc_assert (imm);
669 if (imm_dom_path_with_freeing_call (last_bb, imm))
670 break;
671
672 last_bb = imm;
673 }
674 if (last_bb == gbb)
675 remove = true;
676 break;
677 }
678
679 if (cleanup)
680 {
681 unsigned int j = 0, l = v.length ();
682 for (i = 0; i < l; i++)
683 if (v[i] != to_pop
684 && (gimple_uid (v[i])
685 == ((sanopt_info *)
686 gimple_bb (v[i])->aux)->freeing_call_events))
687 {
688 if (i != j)
689 v[j] = v[i];
690 j++;
691 }
692 v.truncate (j);
693 }
694
e28f2090
YG
695 return remove;
696}
697
698/* Optimize away redundant ASAN_CHECK calls. */
699
700static bool
99b1c316 701maybe_optimize_asan_check_ifn (class sanopt_ctx *ctx, gimple *stmt)
e28f2090
YG
702{
703 gcc_assert (gimple_call_num_args (stmt) == 4);
704 tree ptr = gimple_call_arg (stmt, 1);
705 tree len = gimple_call_arg (stmt, 2);
706 basic_block bb = gimple_bb (stmt);
707 sanopt_info *info = (sanopt_info *) bb->aux;
708
709 if (TREE_CODE (len) != INTEGER_CST)
710 return false;
711 if (integer_zerop (len))
712 return false;
713
714 gimple_set_uid (stmt, info->freeing_call_events);
715
355fe088 716 auto_vec<gimple *> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr);
e28f2090
YG
717
718 tree base_addr = maybe_get_single_definition (ptr);
355fe088 719 auto_vec<gimple *> *base_checks = NULL;
e28f2090
YG
720 if (base_addr)
721 {
722 base_checks = &ctx->asan_check_map.get_or_insert (base_addr);
723 /* Original pointer might have been invalidated. */
724 ptr_checks = ctx->asan_check_map.get (ptr);
725 }
726
355fe088
TS
727 gimple *g = maybe_get_dominating_check (*ptr_checks);
728 gimple *g2 = NULL;
e28f2090 729
06dd2ace 730 if (base_checks)
e28f2090 731 /* Try with base address as well. */
06dd2ace 732 g2 = maybe_get_dominating_check (*base_checks);
e28f2090 733
06dd2ace 734 if (g == NULL && g2 == NULL)
e28f2090
YG
735 {
736 /* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's
737 nothing to optimize yet. */
738 ptr_checks->safe_push (stmt);
739 if (base_checks)
740 base_checks->safe_push (stmt);
741 return false;
742 }
743
744 bool remove = false;
745
746 if (ptr_checks)
747 remove = can_remove_asan_check (*ptr_checks, len, bb);
748
749 if (!remove && base_checks)
750 /* Try with base address as well. */
751 remove = can_remove_asan_check (*base_checks, len, bb);
752
ab9a4330 753 if (!remove)
e28f2090
YG
754 {
755 ptr_checks->safe_push (stmt);
756 if (base_checks)
757 base_checks->safe_push (stmt);
758 }
759
ab9a4330
JJ
760 return remove;
761}
762
763/* Try to optimize away redundant UBSAN_NULL and ASAN_CHECK calls.
764
06cefae9 765 We walk blocks in the CFG via a depth first search of the dominator
ab9a4330
JJ
766 tree; we push unique UBSAN_NULL or ASAN_CHECK statements into a vector
767 in the NULL_CHECK_MAP or ASAN_CHECK_MAP hash maps as we enter the
768 blocks. When leaving a block, we mark the block as visited; then
769 when checking the statements in the vector, we ignore statements that
770 are coming from already visited blocks, because these cannot dominate
771 anything anymore. CTX is a sanopt context. */
06cefae9
MP
772
773static void
99b1c316 774sanopt_optimize_walker (basic_block bb, class sanopt_ctx *ctx)
06cefae9
MP
775{
776 basic_block son;
777 gimple_stmt_iterator gsi;
ab9a4330 778 sanopt_info *info = (sanopt_info *) bb->aux;
e28f2090 779 bool asan_check_optimize = (flag_sanitize & SANITIZE_ADDRESS) != 0;
06cefae9
MP
780
781 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
782 {
355fe088 783 gimple *stmt = gsi_stmt (gsi);
06cefae9
MP
784 bool remove = false;
785
ab9a4330
JJ
786 if (!is_gimple_call (stmt))
787 {
788 /* Handle asm volatile or asm with "memory" clobber
789 the same as potentionally freeing call. */
538dd0b7
DM
790 gasm *asm_stmt = dyn_cast <gasm *> (stmt);
791 if (asm_stmt
ab9a4330 792 && asan_check_optimize
538dd0b7
DM
793 && (gimple_asm_clobbers_memory_p (asm_stmt)
794 || gimple_asm_volatile_p (asm_stmt)))
ab9a4330
JJ
795 info->freeing_call_events++;
796 gsi_next (&gsi);
797 continue;
798 }
799
800 if (asan_check_optimize && !nonfreeing_call_p (stmt))
801 info->freeing_call_events++;
802
94087e88
JJ
803 /* If __asan_before_dynamic_init ("module"); is followed by
804 __asan_after_dynamic_init (); without intervening memory loads/stores,
805 there is nothing to guard, so optimize both away. */
806 if (asan_check_optimize
807 && gimple_call_builtin_p (stmt, BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT))
808 {
809 use_operand_p use;
810 gimple *use_stmt;
811 if (single_imm_use (gimple_vdef (stmt), &use, &use_stmt))
812 {
813 if (is_gimple_call (use_stmt)
814 && gimple_call_builtin_p (use_stmt,
815 BUILT_IN_ASAN_AFTER_DYNAMIC_INIT))
816 {
817 unlink_stmt_vdef (use_stmt);
818 gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
819 gsi_remove (&gsi2, true);
820 remove = true;
821 }
822 }
823 }
824
ab9a4330 825 if (gimple_call_internal_p (stmt))
06cefae9
MP
826 switch (gimple_call_internal_fn (stmt))
827 {
828 case IFN_UBSAN_NULL:
ab9a4330
JJ
829 remove = maybe_optimize_ubsan_null_ifn (ctx, stmt);
830 break;
35228ac7
JJ
831 case IFN_UBSAN_VPTR:
832 remove = maybe_optimize_ubsan_vptr_ifn (ctx, stmt);
833 break;
513d5564
ML
834 case IFN_UBSAN_PTR:
835 remove = maybe_optimize_ubsan_ptr_ifn (ctx, stmt);
836 break;
06cefae9 837 case IFN_ASAN_CHECK:
ab9a4330
JJ
838 if (asan_check_optimize)
839 remove = maybe_optimize_asan_check_ifn (ctx, stmt);
840 if (!remove)
841 ctx->asan_num_accesses++;
06cefae9 842 break;
2f75d6eb
ML
843 case IFN_ASAN_MARK:
844 ctx->contains_asan_mark = true;
845 break;
06cefae9
MP
846 default:
847 break;
848 }
849
ab9a4330
JJ
850 if (remove)
851 {
852 /* Drop this check. */
853 if (dump_file && (dump_flags & TDF_DETAILS))
854 {
513d5564 855 fprintf (dump_file, "Optimizing out: ");
ab9a4330 856 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
ab9a4330
JJ
857 }
858 unlink_stmt_vdef (stmt);
859 gsi_remove (&gsi, true);
860 }
861 else
513d5564
ML
862 {
863 if (dump_file && (dump_flags & TDF_DETAILS))
864 {
865 fprintf (dump_file, "Leaving: ");
866 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
867 }
868
869 gsi_next (&gsi);
870 }
06cefae9
MP
871 }
872
ab9a4330
JJ
873 if (asan_check_optimize)
874 {
875 info->has_freeing_call_p = info->freeing_call_events != 0;
876 info->has_freeing_call_computed_p = true;
877 }
878
06cefae9
MP
879 for (son = first_dom_son (CDI_DOMINATORS, bb);
880 son;
881 son = next_dom_son (CDI_DOMINATORS, son))
882 sanopt_optimize_walker (son, ctx);
883
884 /* We're leaving this BB, so mark it to that effect. */
06cefae9
MP
885 info->visited_p = true;
886}
887
888/* Try to remove redundant sanitizer checks in function FUN. */
889
890static int
2f75d6eb 891sanopt_optimize (function *fun, bool *contains_asan_mark)
06cefae9 892{
99b1c316 893 class sanopt_ctx ctx;
06cefae9 894 ctx.asan_num_accesses = 0;
2f75d6eb 895 ctx.contains_asan_mark = false;
06cefae9
MP
896
897 /* Set up block info for each basic block. */
898 alloc_aux_for_blocks (sizeof (sanopt_info));
899
900 /* We're going to do a dominator walk, so ensure that we have
901 dominance information. */
902 calculate_dominance_info (CDI_DOMINATORS);
903
904 /* Recursively walk the dominator tree optimizing away
905 redundant checks. */
906 sanopt_optimize_walker (ENTRY_BLOCK_PTR_FOR_FN (fun), &ctx);
907
908 free_aux_for_blocks ();
909
2f75d6eb 910 *contains_asan_mark = ctx.contains_asan_mark;
06cefae9
MP
911 return ctx.asan_num_accesses;
912}
913
914/* Perform optimization of sanitize functions. */
915
17795822
TS
916namespace {
917
918const pass_data pass_data_sanopt =
06cefae9
MP
919{
920 GIMPLE_PASS, /* type */
921 "sanopt", /* name */
922 OPTGROUP_NONE, /* optinfo_flags */
923 TV_NONE, /* tv_id */
924 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
925 0, /* properties_provided */
926 0, /* properties_destroyed */
927 0, /* todo_flags_start */
928 TODO_update_ssa, /* todo_flags_finish */
929};
930
17795822 931class pass_sanopt : public gimple_opt_pass
06cefae9
MP
932{
933public:
934 pass_sanopt (gcc::context *ctxt)
935 : gimple_opt_pass (pass_data_sanopt, ctxt)
936 {}
937
938 /* opt_pass methods: */
939 virtual bool gate (function *) { return flag_sanitize; }
940 virtual unsigned int execute (function *);
941
942}; // class pass_sanopt
943
2f75d6eb
ML
944/* Sanitize all ASAN_MARK unpoison calls that are not reachable by a BB
945 that contains an ASAN_MARK poison. All these ASAN_MARK unpoison call
946 can be removed as all variables are unpoisoned in a function prologue. */
947
948static void
949sanitize_asan_mark_unpoison (void)
950{
951 /* 1) Find all BBs that contain an ASAN_MARK poison call. */
952 auto_sbitmap with_poison (last_basic_block_for_fn (cfun) + 1);
953 bitmap_clear (with_poison);
954 basic_block bb;
955
956 FOR_EACH_BB_FN (bb, cfun)
957 {
958 if (bitmap_bit_p (with_poison, bb->index))
959 continue;
960
961 gimple_stmt_iterator gsi;
962 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
963 {
964 gimple *stmt = gsi_stmt (gsi);
965 if (asan_mark_p (stmt, ASAN_MARK_POISON))
966 {
967 bitmap_set_bit (with_poison, bb->index);
968 break;
969 }
970 }
971 }
972
973 auto_sbitmap poisoned (last_basic_block_for_fn (cfun) + 1);
974 bitmap_clear (poisoned);
975 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
976 bitmap_copy (worklist, with_poison);
977
978 /* 2) Propagate the information to all reachable blocks. */
979 while (!bitmap_empty_p (worklist))
980 {
981 unsigned i = bitmap_first_set_bit (worklist);
982 bitmap_clear_bit (worklist, i);
983 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
984 gcc_assert (bb);
985
986 edge e;
987 edge_iterator ei;
988 FOR_EACH_EDGE (e, ei, bb->succs)
989 if (!bitmap_bit_p (poisoned, e->dest->index))
990 {
991 bitmap_set_bit (poisoned, e->dest->index);
992 bitmap_set_bit (worklist, e->dest->index);
993 }
994 }
995
996 /* 3) Iterate all BBs not included in POISONED BBs and remove unpoison
997 ASAN_MARK preceding an ASAN_MARK poison (which can still happen). */
998 FOR_EACH_BB_FN (bb, cfun)
999 {
1000 if (bitmap_bit_p (poisoned, bb->index))
1001 continue;
1002
1003 gimple_stmt_iterator gsi;
1004 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1005 {
2f75d6eb
ML
1006 gimple *stmt = gsi_stmt (gsi);
1007 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1008 {
1009 if (asan_mark_p (stmt, ASAN_MARK_POISON))
1010 break;
1011 else
1012 {
1013 if (dump_file)
1014 fprintf (dump_file, "Removing ASAN_MARK unpoison\n");
1015 unlink_stmt_vdef (stmt);
1016 release_defs (stmt);
1017 gsi_remove (&gsi, true);
8ccaace8 1018 continue;
2f75d6eb
ML
1019 }
1020 }
1021
8ccaace8 1022 gsi_next (&gsi);
2f75d6eb
ML
1023 }
1024 }
1025}
1026
1027/* Return true when STMT is either ASAN_CHECK call or a call of a function
1028 that can contain an ASAN_CHECK. */
1029
1030static bool
1031maybe_contains_asan_check (gimple *stmt)
1032{
1033 if (is_gimple_call (stmt))
1034 {
1035 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1036 return false;
1037 else
1038 return !(gimple_call_flags (stmt) & ECF_CONST);
1039 }
1040 else if (is_a<gasm *> (stmt))
1041 return true;
1042
1043 return false;
1044}
1045
1046/* Sanitize all ASAN_MARK poison calls that are not followed by an ASAN_CHECK
1047 call. These calls can be removed. */
1048
1049static void
1050sanitize_asan_mark_poison (void)
1051{
1052 /* 1) Find all BBs that possibly contain an ASAN_CHECK. */
1053 auto_sbitmap with_check (last_basic_block_for_fn (cfun) + 1);
1054 bitmap_clear (with_check);
1055 basic_block bb;
1056
1057 FOR_EACH_BB_FN (bb, cfun)
1058 {
1059 gimple_stmt_iterator gsi;
1060 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
1061 {
1062 gimple *stmt = gsi_stmt (gsi);
1063 if (maybe_contains_asan_check (stmt))
1064 {
1065 bitmap_set_bit (with_check, bb->index);
1066 break;
1067 }
1068 }
1069 }
1070
1071 auto_sbitmap can_reach_check (last_basic_block_for_fn (cfun) + 1);
1072 bitmap_clear (can_reach_check);
1073 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
1074 bitmap_copy (worklist, with_check);
1075
1076 /* 2) Propagate the information to all definitions blocks. */
1077 while (!bitmap_empty_p (worklist))
1078 {
1079 unsigned i = bitmap_first_set_bit (worklist);
1080 bitmap_clear_bit (worklist, i);
1081 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1082 gcc_assert (bb);
1083
1084 edge e;
1085 edge_iterator ei;
1086 FOR_EACH_EDGE (e, ei, bb->preds)
1087 if (!bitmap_bit_p (can_reach_check, e->src->index))
1088 {
1089 bitmap_set_bit (can_reach_check, e->src->index);
1090 bitmap_set_bit (worklist, e->src->index);
1091 }
1092 }
1093
1094 /* 3) Iterate all BBs not included in CAN_REACH_CHECK BBs and remove poison
1095 ASAN_MARK not followed by a call to function having an ASAN_CHECK. */
1096 FOR_EACH_BB_FN (bb, cfun)
1097 {
1098 if (bitmap_bit_p (can_reach_check, bb->index))
1099 continue;
1100
1101 gimple_stmt_iterator gsi;
1102 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
1103 {
2f75d6eb
ML
1104 gimple *stmt = gsi_stmt (gsi);
1105 if (maybe_contains_asan_check (stmt))
1106 break;
1107 else if (asan_mark_p (stmt, ASAN_MARK_POISON))
1108 {
1109 if (dump_file)
1110 fprintf (dump_file, "Removing ASAN_MARK poison\n");
1111 unlink_stmt_vdef (stmt);
1112 release_defs (stmt);
8ccaace8
JJ
1113 gimple_stmt_iterator gsi2 = gsi;
1114 gsi_prev (&gsi);
1115 gsi_remove (&gsi2, true);
1116 continue;
2f75d6eb
ML
1117 }
1118
8ccaace8 1119 gsi_prev (&gsi);
2f75d6eb
ML
1120 }
1121 }
1122}
1123
da402967
ML
1124/* Rewrite all usages of tree OP which is a PARM_DECL with a VAR_DECL
1125 that is it's DECL_VALUE_EXPR. */
1126
1127static tree
1128rewrite_usage_of_param (tree *op, int *walk_subtrees, void *)
1129{
1130 if (TREE_CODE (*op) == PARM_DECL && DECL_HAS_VALUE_EXPR_P (*op))
1131 {
1132 *op = DECL_VALUE_EXPR (*op);
1133 *walk_subtrees = 0;
1134 }
1135
1136 return NULL;
1137}
1138
1139/* For a given function FUN, rewrite all addressable parameters so that
1140 a new automatic variable is introduced. Right after function entry
1141 a parameter is assigned to the variable. */
1142
1143static void
1144sanitize_rewrite_addressable_params (function *fun)
1145{
1146 gimple *g;
1147 gimple_seq stmts = NULL;
1148 bool has_any_addressable_param = false;
1149 auto_vec<tree> clear_value_expr_list;
1150
1151 for (tree arg = DECL_ARGUMENTS (current_function_decl);
1152 arg; arg = DECL_CHAIN (arg))
1153 {
70affe6a 1154 tree type = TREE_TYPE (arg);
99daa7a9
ML
1155 if (TREE_ADDRESSABLE (arg)
1156 && !TREE_ADDRESSABLE (type)
1157 && !TREE_THIS_VOLATILE (arg)
70affe6a 1158 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
da402967
ML
1159 {
1160 TREE_ADDRESSABLE (arg) = 0;
6b41920b 1161 DECL_NOT_GIMPLE_REG_P (arg) = 0;
da402967 1162 /* The parameter is no longer addressable. */
da402967
ML
1163 has_any_addressable_param = true;
1164
1165 /* Create a new automatic variable. */
1166 tree var = build_decl (DECL_SOURCE_LOCATION (arg),
1167 VAR_DECL, DECL_NAME (arg), type);
1168 TREE_ADDRESSABLE (var) = 1;
7c819e8a 1169 DECL_IGNORED_P (var) = 1;
da402967
ML
1170
1171 gimple_add_tmp_var (var);
1172
47c3496b
ML
1173 /* We skip parameters that have a DECL_VALUE_EXPR. */
1174 if (DECL_HAS_VALUE_EXPR_P (arg))
1175 continue;
1176
da402967 1177 if (dump_file)
ab6cd364
MP
1178 {
1179 fprintf (dump_file,
1180 "Rewriting parameter whose address is taken: ");
1181 print_generic_expr (dump_file, arg, dump_flags);
1182 fputc ('\n', dump_file);
1183 }
da402967 1184
da402967
ML
1185 SET_DECL_PT_UID (var, DECL_PT_UID (arg));
1186
1187 /* Assign value of parameter to newly created variable. */
1188 if ((TREE_CODE (type) == COMPLEX_TYPE
1189 || TREE_CODE (type) == VECTOR_TYPE))
1190 {
1191 /* We need to create a SSA name that will be used for the
1192 assignment. */
da402967
ML
1193 tree tmp = get_or_create_ssa_default_def (cfun, arg);
1194 g = gimple_build_assign (var, tmp);
1195 gimple_set_location (g, DECL_SOURCE_LOCATION (arg));
1196 gimple_seq_add_stmt (&stmts, g);
1197 }
1198 else
1199 {
1200 g = gimple_build_assign (var, arg);
1201 gimple_set_location (g, DECL_SOURCE_LOCATION (arg));
1202 gimple_seq_add_stmt (&stmts, g);
1203 }
1204
1205 if (target_for_debug_bind (arg))
1206 {
1207 g = gimple_build_debug_bind (arg, var, NULL);
1208 gimple_seq_add_stmt (&stmts, g);
1209 clear_value_expr_list.safe_push (arg);
1210 }
6d7649f8
ML
1211
1212 DECL_HAS_VALUE_EXPR_P (arg) = 1;
1213 SET_DECL_VALUE_EXPR (arg, var);
da402967
ML
1214 }
1215 }
1216
1217 if (!has_any_addressable_param)
1218 return;
1219
1220 /* Replace all usages of PARM_DECLs with the newly
1221 created variable VAR. */
1222 basic_block bb;
1223 FOR_EACH_BB_FN (bb, fun)
1224 {
1225 gimple_stmt_iterator gsi;
1226 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1227 {
1228 gimple *stmt = gsi_stmt (gsi);
1229 gimple_stmt_iterator it = gsi_for_stmt (stmt);
1230 walk_gimple_stmt (&it, NULL, rewrite_usage_of_param, NULL);
1231 }
1232 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1233 {
1234 gphi *phi = dyn_cast<gphi *> (gsi_stmt (gsi));
1235 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
1236 {
1237 hash_set<tree> visited_nodes;
1238 walk_tree (gimple_phi_arg_def_ptr (phi, i),
1239 rewrite_usage_of_param, NULL, &visited_nodes);
1240 }
1241 }
1242 }
1243
1244 /* Unset value expr for parameters for which we created debug bind
1245 expressions. */
1246 unsigned i;
1247 tree arg;
1248 FOR_EACH_VEC_ELT (clear_value_expr_list, i, arg)
1249 {
1250 DECL_HAS_VALUE_EXPR_P (arg) = 0;
1251 SET_DECL_VALUE_EXPR (arg, NULL_TREE);
1252 }
1253
1254 /* Insert default assignments at the beginning of a function. */
1255 basic_block entry_bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
1256 entry_bb = split_edge (single_succ_edge (entry_bb));
1257
1258 gimple_stmt_iterator gsi = gsi_start_bb (entry_bb);
1259 gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT);
1260}
1261
06cefae9
MP
1262unsigned int
1263pass_sanopt::execute (function *fun)
1264{
1265 basic_block bb;
1266 int asan_num_accesses = 0;
2f75d6eb 1267 bool contains_asan_mark = false;
06cefae9
MP
1268
1269 /* Try to remove redundant checks. */
1270 if (optimize
ab9a4330 1271 && (flag_sanitize
35228ac7 1272 & (SANITIZE_NULL | SANITIZE_ALIGNMENT
513d5564 1273 | SANITIZE_ADDRESS | SANITIZE_VPTR | SANITIZE_POINTER_OVERFLOW)))
2f75d6eb 1274 asan_num_accesses = sanopt_optimize (fun, &contains_asan_mark);
06cefae9
MP
1275 else if (flag_sanitize & SANITIZE_ADDRESS)
1276 {
1277 gimple_stmt_iterator gsi;
1278 FOR_EACH_BB_FN (bb, fun)
1279 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1280 {
355fe088 1281 gimple *stmt = gsi_stmt (gsi);
8e4284d0 1282 if (gimple_call_internal_p (stmt, IFN_ASAN_CHECK))
06cefae9 1283 ++asan_num_accesses;
2f75d6eb
ML
1284 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1285 contains_asan_mark = true;
06cefae9
MP
1286 }
1287 }
1288
2f75d6eb
ML
1289 if (contains_asan_mark)
1290 {
1291 sanitize_asan_mark_unpoison ();
1292 sanitize_asan_mark_poison ();
1293 }
1294
da402967
ML
1295 if (asan_sanitize_stack_p ())
1296 sanitize_rewrite_addressable_params (fun);
1297
028d4092
ML
1298 bool use_calls = param_asan_instrumentation_with_call_threshold < INT_MAX
1299 && asan_num_accesses >= param_asan_instrumentation_with_call_threshold;
06cefae9 1300
c7775327
ML
1301 hash_map<tree, tree> shadow_vars_mapping;
1302 bool need_commit_edge_insert = false;
06cefae9
MP
1303 FOR_EACH_BB_FN (bb, fun)
1304 {
1305 gimple_stmt_iterator gsi;
1306 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1307 {
355fe088 1308 gimple *stmt = gsi_stmt (gsi);
06cefae9
MP
1309 bool no_next = false;
1310
1311 if (!is_gimple_call (stmt))
1312 {
1313 gsi_next (&gsi);
1314 continue;
1315 }
1316
1317 if (gimple_call_internal_p (stmt))
1318 {
1319 enum internal_fn ifn = gimple_call_internal_fn (stmt);
1320 switch (ifn)
1321 {
1322 case IFN_UBSAN_NULL:
1323 no_next = ubsan_expand_null_ifn (&gsi);
1324 break;
1325 case IFN_UBSAN_BOUNDS:
1326 no_next = ubsan_expand_bounds_ifn (&gsi);
1327 break;
1328 case IFN_UBSAN_OBJECT_SIZE:
1329 no_next = ubsan_expand_objsize_ifn (&gsi);
1330 break;
c9b39a49
JJ
1331 case IFN_UBSAN_PTR:
1332 no_next = ubsan_expand_ptr_ifn (&gsi);
1333 break;
35228ac7
JJ
1334 case IFN_UBSAN_VPTR:
1335 no_next = ubsan_expand_vptr_ifn (&gsi);
1336 break;
06cefae9
MP
1337 case IFN_ASAN_CHECK:
1338 no_next = asan_expand_check_ifn (&gsi, use_calls);
1339 break;
6dc4a604
ML
1340 case IFN_ASAN_MARK:
1341 no_next = asan_expand_mark_ifn (&gsi);
1342 break;
c7775327
ML
1343 case IFN_ASAN_POISON:
1344 no_next = asan_expand_poison_ifn (&gsi,
1345 &need_commit_edge_insert,
1346 shadow_vars_mapping);
1347 break;
06cefae9
MP
1348 default:
1349 break;
1350 }
1351 }
4088b790
MP
1352 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1353 {
1354 tree callee = gimple_call_fndecl (stmt);
1355 switch (DECL_FUNCTION_CODE (callee))
1356 {
1357 case BUILT_IN_UNREACHABLE:
45b2222a 1358 if (sanitize_flags_p (SANITIZE_UNREACHABLE))
4088b790
MP
1359 no_next = ubsan_instrument_unreachable (&gsi);
1360 break;
1361 default:
1362 break;
1363 }
1364 }
06cefae9
MP
1365
1366 if (dump_file && (dump_flags & TDF_DETAILS))
1367 {
513d5564 1368 fprintf (dump_file, "Expanded: ");
06cefae9 1369 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
06cefae9
MP
1370 }
1371
1372 if (!no_next)
1373 gsi_next (&gsi);
1374 }
1375 }
c7775327
ML
1376
1377 if (need_commit_edge_insert)
1378 gsi_commit_edge_inserts ();
1379
06cefae9
MP
1380 return 0;
1381}
1382
17795822
TS
1383} // anon namespace
1384
06cefae9
MP
1385gimple_opt_pass *
1386make_pass_sanopt (gcc::context *ctxt)
1387{
1388 return new pass_sanopt (ctxt);
1389}