]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/sanopt.c
2015-06-25 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / sanopt.c
1 /* Optimize and expand sanitizer functions.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Marek Polacek <polacek@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "options.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "predict.h"
30 #include "tm.h"
31 #include "hard-reg-set.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
39 #include "gimple.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-pass.h"
43 #include "asan.h"
44 #include "gimple-pretty-print.h"
45 #include "tm_p.h"
46 #include "langhooks.h"
47 #include "ubsan.h"
48 #include "params.h"
49 #include "tree-ssa-operands.h"
50 #include "tree-hash-traits.h"
51
52
53 /* This is used to carry information about basic blocks. It is
54 attached to the AUX field of the standard CFG block. */
55
56 struct sanopt_info
57 {
58 /* True if this BB might call (directly or indirectly) free/munmap
59 or similar operation. */
60 bool has_freeing_call_p;
61
62 /* True if HAS_FREEING_CALL_P flag has been computed. */
63 bool has_freeing_call_computed_p;
64
65 /* True if there is a block with HAS_FREEING_CALL_P flag set
66 on any path between an immediate dominator of BB, denoted
67 imm(BB), and BB. */
68 bool imm_dom_path_with_freeing_call_p;
69
70 /* True if IMM_DOM_PATH_WITH_FREEING_CALL_P has been computed. */
71 bool imm_dom_path_with_freeing_call_computed_p;
72
73 /* Number of possibly freeing calls encountered in this bb
74 (so far). */
75 uint64_t freeing_call_events;
76
77 /* True if BB is currently being visited during computation
78 of IMM_DOM_PATH_WITH_FREEING_CALL_P flag. */
79 bool being_visited_p;
80
81 /* True if this BB has been visited in the dominator walk. */
82 bool visited_p;
83 };
84
85 /* If T has a single definition of form T = T2, return T2. */
86
87 static tree
88 maybe_get_single_definition (tree t)
89 {
90 if (TREE_CODE (t) == SSA_NAME)
91 {
92 gimple g = SSA_NAME_DEF_STMT (t);
93 if (gimple_assign_single_p (g))
94 return gimple_assign_rhs1 (g);
95 }
96 return NULL_TREE;
97 }
98
99 /* Tree triplet for vptr_check_map. */
100 struct sanopt_tree_triplet
101 {
102 tree t1, t2, t3;
103 };
104
105 /* Traits class for tree triplet hash maps below. */
106
107 struct sanopt_tree_triplet_hash : typed_noop_remove <sanopt_tree_triplet>
108 {
109 typedef sanopt_tree_triplet value_type;
110 typedef sanopt_tree_triplet compare_type;
111
112 static inline hashval_t
113 hash (const sanopt_tree_triplet &ref)
114 {
115 inchash::hash hstate (0);
116 inchash::add_expr (ref.t1, hstate);
117 inchash::add_expr (ref.t2, hstate);
118 inchash::add_expr (ref.t3, hstate);
119 return hstate.end ();
120 }
121
122 static inline bool
123 equal (const sanopt_tree_triplet &ref1, const sanopt_tree_triplet &ref2)
124 {
125 return operand_equal_p (ref1.t1, ref2.t1, 0)
126 && operand_equal_p (ref1.t2, ref2.t2, 0)
127 && operand_equal_p (ref1.t3, ref2.t3, 0);
128 }
129
130 static inline void
131 mark_deleted (sanopt_tree_triplet &ref)
132 {
133 ref.t1 = reinterpret_cast<tree> (1);
134 }
135
136 static inline void
137 mark_empty (sanopt_tree_triplet &ref)
138 {
139 ref.t1 = NULL;
140 }
141
142 static inline bool
143 is_deleted (const sanopt_tree_triplet &ref)
144 {
145 return ref.t1 == (void *) 1;
146 }
147
148 static inline bool
149 is_empty (const sanopt_tree_triplet &ref)
150 {
151 return ref.t1 == NULL;
152 }
153 };
154
155 /* This is used to carry various hash maps and variables used
156 in sanopt_optimize_walker. */
157
158 struct sanopt_ctx
159 {
160 /* This map maps a pointer (the first argument of UBSAN_NULL) to
161 a vector of UBSAN_NULL call statements that check this pointer. */
162 hash_map<tree, auto_vec<gimple> > null_check_map;
163
164 /* This map maps a pointer (the second argument of ASAN_CHECK) to
165 a vector of ASAN_CHECK call statements that check the access. */
166 hash_map<tree_operand_hash, auto_vec<gimple> > asan_check_map;
167
168 /* This map maps a tree triplet (the first, second and fourth argument
169 of UBSAN_VPTR) to a vector of UBSAN_VPTR call statements that check
170 that virtual table pointer. */
171 hash_map<sanopt_tree_triplet_hash, auto_vec<gimple> > vptr_check_map;
172
173 /* Number of IFN_ASAN_CHECK statements. */
174 int asan_num_accesses;
175 };
176
177
178 /* Return true if there might be any call to free/munmap operation
179 on any path in between DOM (which should be imm(BB)) and BB. */
180
181 static bool
182 imm_dom_path_with_freeing_call (basic_block bb, basic_block dom)
183 {
184 sanopt_info *info = (sanopt_info *) bb->aux;
185 edge e;
186 edge_iterator ei;
187
188 if (info->imm_dom_path_with_freeing_call_computed_p)
189 return info->imm_dom_path_with_freeing_call_p;
190
191 info->being_visited_p = true;
192
193 FOR_EACH_EDGE (e, ei, bb->preds)
194 {
195 sanopt_info *pred_info = (sanopt_info *) e->src->aux;
196
197 if (e->src == dom)
198 continue;
199
200 if ((pred_info->imm_dom_path_with_freeing_call_computed_p
201 && pred_info->imm_dom_path_with_freeing_call_p)
202 || (pred_info->has_freeing_call_computed_p
203 && pred_info->has_freeing_call_p))
204 {
205 info->imm_dom_path_with_freeing_call_computed_p = true;
206 info->imm_dom_path_with_freeing_call_p = true;
207 info->being_visited_p = false;
208 return true;
209 }
210 }
211
212 FOR_EACH_EDGE (e, ei, bb->preds)
213 {
214 sanopt_info *pred_info = (sanopt_info *) e->src->aux;
215
216 if (e->src == dom)
217 continue;
218
219 if (pred_info->has_freeing_call_computed_p)
220 continue;
221
222 gimple_stmt_iterator gsi;
223 for (gsi = gsi_start_bb (e->src); !gsi_end_p (gsi); gsi_next (&gsi))
224 {
225 gimple stmt = gsi_stmt (gsi);
226
227 if (is_gimple_call (stmt) && !nonfreeing_call_p (stmt))
228 {
229 pred_info->has_freeing_call_p = true;
230 break;
231 }
232 }
233
234 pred_info->has_freeing_call_computed_p = true;
235 if (pred_info->has_freeing_call_p)
236 {
237 info->imm_dom_path_with_freeing_call_computed_p = true;
238 info->imm_dom_path_with_freeing_call_p = true;
239 info->being_visited_p = false;
240 return true;
241 }
242 }
243
244 FOR_EACH_EDGE (e, ei, bb->preds)
245 {
246 if (e->src == dom)
247 continue;
248
249 basic_block src;
250 for (src = e->src; src != dom; )
251 {
252 sanopt_info *pred_info = (sanopt_info *) src->aux;
253 if (pred_info->being_visited_p)
254 break;
255 basic_block imm = get_immediate_dominator (CDI_DOMINATORS, src);
256 if (imm_dom_path_with_freeing_call (src, imm))
257 {
258 info->imm_dom_path_with_freeing_call_computed_p = true;
259 info->imm_dom_path_with_freeing_call_p = true;
260 info->being_visited_p = false;
261 return true;
262 }
263 src = imm;
264 }
265 }
266
267 info->imm_dom_path_with_freeing_call_computed_p = true;
268 info->imm_dom_path_with_freeing_call_p = false;
269 info->being_visited_p = false;
270 return false;
271 }
272
273 /* Get the first dominating check from the list of stored checks.
274 Non-dominating checks are silently dropped. */
275
276 static gimple
277 maybe_get_dominating_check (auto_vec<gimple> &v)
278 {
279 for (; !v.is_empty (); v.pop ())
280 {
281 gimple g = v.last ();
282 sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux;
283 if (!si->visited_p)
284 /* At this point we shouldn't have any statements
285 that aren't dominating the current BB. */
286 return g;
287 }
288 return NULL;
289 }
290
291 /* Optimize away redundant UBSAN_NULL calls. */
292
293 static bool
294 maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt)
295 {
296 gcc_assert (gimple_call_num_args (stmt) == 3);
297 tree ptr = gimple_call_arg (stmt, 0);
298 tree cur_align = gimple_call_arg (stmt, 2);
299 gcc_assert (TREE_CODE (cur_align) == INTEGER_CST);
300 bool remove = false;
301
302 auto_vec<gimple> &v = ctx->null_check_map.get_or_insert (ptr);
303 gimple g = maybe_get_dominating_check (v);
304 if (!g)
305 {
306 /* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's
307 nothing to optimize yet. */
308 v.safe_push (stmt);
309 return false;
310 }
311
312 /* We already have recorded a UBSAN_NULL check for this pointer. Perhaps we
313 can drop this one. But only if this check doesn't specify stricter
314 alignment. */
315
316 tree align = gimple_call_arg (g, 2);
317 int kind = tree_to_shwi (gimple_call_arg (g, 1));
318 /* If this is a NULL pointer check where we had segv anyway, we can
319 remove it. */
320 if (integer_zerop (align)
321 && (kind == UBSAN_LOAD_OF
322 || kind == UBSAN_STORE_OF
323 || kind == UBSAN_MEMBER_ACCESS))
324 remove = true;
325 /* Otherwise remove the check in non-recovering mode, or if the
326 stmts have same location. */
327 else if (integer_zerop (align))
328 remove = (flag_sanitize_recover & SANITIZE_NULL) == 0
329 || flag_sanitize_undefined_trap_on_error
330 || gimple_location (g) == gimple_location (stmt);
331 else if (tree_int_cst_le (cur_align, align))
332 remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0
333 || flag_sanitize_undefined_trap_on_error
334 || gimple_location (g) == gimple_location (stmt);
335
336 if (!remove && gimple_bb (g) == gimple_bb (stmt)
337 && tree_int_cst_compare (cur_align, align) == 0)
338 v.pop ();
339
340 if (!remove)
341 v.safe_push (stmt);
342 return remove;
343 }
344
345 /* Optimize away redundant UBSAN_VPTR calls. The second argument
346 is the value loaded from the virtual table, so rely on FRE to find out
347 when we can actually optimize. */
348
349 static bool
350 maybe_optimize_ubsan_vptr_ifn (struct sanopt_ctx *ctx, gimple stmt)
351 {
352 gcc_assert (gimple_call_num_args (stmt) == 5);
353 sanopt_tree_triplet triplet;
354 triplet.t1 = gimple_call_arg (stmt, 0);
355 triplet.t2 = gimple_call_arg (stmt, 1);
356 triplet.t3 = gimple_call_arg (stmt, 3);
357
358 auto_vec<gimple> &v = ctx->vptr_check_map.get_or_insert (triplet);
359 gimple g = maybe_get_dominating_check (v);
360 if (!g)
361 {
362 /* For this PTR we don't have any UBSAN_VPTR stmts recorded, so there's
363 nothing to optimize yet. */
364 v.safe_push (stmt);
365 return false;
366 }
367
368 return true;
369 }
370
371 /* Returns TRUE if ASan check of length LEN in block BB can be removed
372 if preceded by checks in V. */
373
374 static bool
375 can_remove_asan_check (auto_vec<gimple> &v, tree len, basic_block bb)
376 {
377 unsigned int i;
378 gimple g;
379 gimple to_pop = NULL;
380 bool remove = false;
381 basic_block last_bb = bb;
382 bool cleanup = false;
383
384 FOR_EACH_VEC_ELT_REVERSE (v, i, g)
385 {
386 basic_block gbb = gimple_bb (g);
387 sanopt_info *si = (sanopt_info *) gbb->aux;
388 if (gimple_uid (g) < si->freeing_call_events)
389 {
390 /* If there is a potentially freeing call after g in gbb, we should
391 remove it from the vector, can't use in optimization. */
392 cleanup = true;
393 continue;
394 }
395
396 tree glen = gimple_call_arg (g, 2);
397 gcc_assert (TREE_CODE (glen) == INTEGER_CST);
398
399 /* If we've checked only smaller length than we want to check now,
400 we can't remove the current stmt. If g is in the same basic block,
401 we want to remove it though, as the current stmt is better. */
402 if (tree_int_cst_lt (glen, len))
403 {
404 if (gbb == bb)
405 {
406 to_pop = g;
407 cleanup = true;
408 }
409 continue;
410 }
411
412 while (last_bb != gbb)
413 {
414 /* Paths from last_bb to bb have been checked before.
415 gbb is necessarily a dominator of last_bb, but not necessarily
416 immediate dominator. */
417 if (((sanopt_info *) last_bb->aux)->freeing_call_events)
418 break;
419
420 basic_block imm = get_immediate_dominator (CDI_DOMINATORS, last_bb);
421 gcc_assert (imm);
422 if (imm_dom_path_with_freeing_call (last_bb, imm))
423 break;
424
425 last_bb = imm;
426 }
427 if (last_bb == gbb)
428 remove = true;
429 break;
430 }
431
432 if (cleanup)
433 {
434 unsigned int j = 0, l = v.length ();
435 for (i = 0; i < l; i++)
436 if (v[i] != to_pop
437 && (gimple_uid (v[i])
438 == ((sanopt_info *)
439 gimple_bb (v[i])->aux)->freeing_call_events))
440 {
441 if (i != j)
442 v[j] = v[i];
443 j++;
444 }
445 v.truncate (j);
446 }
447
448 return remove;
449 }
450
451 /* Optimize away redundant ASAN_CHECK calls. */
452
453 static bool
454 maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt)
455 {
456 gcc_assert (gimple_call_num_args (stmt) == 4);
457 tree ptr = gimple_call_arg (stmt, 1);
458 tree len = gimple_call_arg (stmt, 2);
459 basic_block bb = gimple_bb (stmt);
460 sanopt_info *info = (sanopt_info *) bb->aux;
461
462 if (TREE_CODE (len) != INTEGER_CST)
463 return false;
464 if (integer_zerop (len))
465 return false;
466
467 gimple_set_uid (stmt, info->freeing_call_events);
468
469 auto_vec<gimple> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr);
470
471 tree base_addr = maybe_get_single_definition (ptr);
472 auto_vec<gimple> *base_checks = NULL;
473 if (base_addr)
474 {
475 base_checks = &ctx->asan_check_map.get_or_insert (base_addr);
476 /* Original pointer might have been invalidated. */
477 ptr_checks = ctx->asan_check_map.get (ptr);
478 }
479
480 gimple g = maybe_get_dominating_check (*ptr_checks);
481 gimple g2 = NULL;
482
483 if (base_checks)
484 /* Try with base address as well. */
485 g2 = maybe_get_dominating_check (*base_checks);
486
487 if (g == NULL && g2 == NULL)
488 {
489 /* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's
490 nothing to optimize yet. */
491 ptr_checks->safe_push (stmt);
492 if (base_checks)
493 base_checks->safe_push (stmt);
494 return false;
495 }
496
497 bool remove = false;
498
499 if (ptr_checks)
500 remove = can_remove_asan_check (*ptr_checks, len, bb);
501
502 if (!remove && base_checks)
503 /* Try with base address as well. */
504 remove = can_remove_asan_check (*base_checks, len, bb);
505
506 if (!remove)
507 {
508 ptr_checks->safe_push (stmt);
509 if (base_checks)
510 base_checks->safe_push (stmt);
511 }
512
513 return remove;
514 }
515
516 /* Try to optimize away redundant UBSAN_NULL and ASAN_CHECK calls.
517
518 We walk blocks in the CFG via a depth first search of the dominator
519 tree; we push unique UBSAN_NULL or ASAN_CHECK statements into a vector
520 in the NULL_CHECK_MAP or ASAN_CHECK_MAP hash maps as we enter the
521 blocks. When leaving a block, we mark the block as visited; then
522 when checking the statements in the vector, we ignore statements that
523 are coming from already visited blocks, because these cannot dominate
524 anything anymore. CTX is a sanopt context. */
525
526 static void
527 sanopt_optimize_walker (basic_block bb, struct sanopt_ctx *ctx)
528 {
529 basic_block son;
530 gimple_stmt_iterator gsi;
531 sanopt_info *info = (sanopt_info *) bb->aux;
532 bool asan_check_optimize = (flag_sanitize & SANITIZE_ADDRESS) != 0;
533
534 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
535 {
536 gimple stmt = gsi_stmt (gsi);
537 bool remove = false;
538
539 if (!is_gimple_call (stmt))
540 {
541 /* Handle asm volatile or asm with "memory" clobber
542 the same as potentionally freeing call. */
543 gasm *asm_stmt = dyn_cast <gasm *> (stmt);
544 if (asm_stmt
545 && asan_check_optimize
546 && (gimple_asm_clobbers_memory_p (asm_stmt)
547 || gimple_asm_volatile_p (asm_stmt)))
548 info->freeing_call_events++;
549 gsi_next (&gsi);
550 continue;
551 }
552
553 if (asan_check_optimize && !nonfreeing_call_p (stmt))
554 info->freeing_call_events++;
555
556 if (gimple_call_internal_p (stmt))
557 switch (gimple_call_internal_fn (stmt))
558 {
559 case IFN_UBSAN_NULL:
560 remove = maybe_optimize_ubsan_null_ifn (ctx, stmt);
561 break;
562 case IFN_UBSAN_VPTR:
563 remove = maybe_optimize_ubsan_vptr_ifn (ctx, stmt);
564 break;
565 case IFN_ASAN_CHECK:
566 if (asan_check_optimize)
567 remove = maybe_optimize_asan_check_ifn (ctx, stmt);
568 if (!remove)
569 ctx->asan_num_accesses++;
570 break;
571 default:
572 break;
573 }
574
575 if (remove)
576 {
577 /* Drop this check. */
578 if (dump_file && (dump_flags & TDF_DETAILS))
579 {
580 fprintf (dump_file, "Optimizing out\n ");
581 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
582 fprintf (dump_file, "\n");
583 }
584 unlink_stmt_vdef (stmt);
585 gsi_remove (&gsi, true);
586 }
587 else
588 gsi_next (&gsi);
589 }
590
591 if (asan_check_optimize)
592 {
593 info->has_freeing_call_p = info->freeing_call_events != 0;
594 info->has_freeing_call_computed_p = true;
595 }
596
597 for (son = first_dom_son (CDI_DOMINATORS, bb);
598 son;
599 son = next_dom_son (CDI_DOMINATORS, son))
600 sanopt_optimize_walker (son, ctx);
601
602 /* We're leaving this BB, so mark it to that effect. */
603 info->visited_p = true;
604 }
605
606 /* Try to remove redundant sanitizer checks in function FUN. */
607
608 static int
609 sanopt_optimize (function *fun)
610 {
611 struct sanopt_ctx ctx;
612 ctx.asan_num_accesses = 0;
613
614 /* Set up block info for each basic block. */
615 alloc_aux_for_blocks (sizeof (sanopt_info));
616
617 /* We're going to do a dominator walk, so ensure that we have
618 dominance information. */
619 calculate_dominance_info (CDI_DOMINATORS);
620
621 /* Recursively walk the dominator tree optimizing away
622 redundant checks. */
623 sanopt_optimize_walker (ENTRY_BLOCK_PTR_FOR_FN (fun), &ctx);
624
625 free_aux_for_blocks ();
626
627 return ctx.asan_num_accesses;
628 }
629
630 /* Perform optimization of sanitize functions. */
631
632 namespace {
633
634 const pass_data pass_data_sanopt =
635 {
636 GIMPLE_PASS, /* type */
637 "sanopt", /* name */
638 OPTGROUP_NONE, /* optinfo_flags */
639 TV_NONE, /* tv_id */
640 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
641 0, /* properties_provided */
642 0, /* properties_destroyed */
643 0, /* todo_flags_start */
644 TODO_update_ssa, /* todo_flags_finish */
645 };
646
647 class pass_sanopt : public gimple_opt_pass
648 {
649 public:
650 pass_sanopt (gcc::context *ctxt)
651 : gimple_opt_pass (pass_data_sanopt, ctxt)
652 {}
653
654 /* opt_pass methods: */
655 virtual bool gate (function *) { return flag_sanitize; }
656 virtual unsigned int execute (function *);
657
658 }; // class pass_sanopt
659
660 unsigned int
661 pass_sanopt::execute (function *fun)
662 {
663 basic_block bb;
664 int asan_num_accesses = 0;
665
666 /* Try to remove redundant checks. */
667 if (optimize
668 && (flag_sanitize
669 & (SANITIZE_NULL | SANITIZE_ALIGNMENT
670 | SANITIZE_ADDRESS | SANITIZE_VPTR)))
671 asan_num_accesses = sanopt_optimize (fun);
672 else if (flag_sanitize & SANITIZE_ADDRESS)
673 {
674 gimple_stmt_iterator gsi;
675 FOR_EACH_BB_FN (bb, fun)
676 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
677 {
678 gimple stmt = gsi_stmt (gsi);
679 if (is_gimple_call (stmt) && gimple_call_internal_p (stmt)
680 && gimple_call_internal_fn (stmt) == IFN_ASAN_CHECK)
681 ++asan_num_accesses;
682 }
683 }
684
685 bool use_calls = ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD < INT_MAX
686 && asan_num_accesses >= ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD;
687
688 FOR_EACH_BB_FN (bb, fun)
689 {
690 gimple_stmt_iterator gsi;
691 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
692 {
693 gimple stmt = gsi_stmt (gsi);
694 bool no_next = false;
695
696 if (!is_gimple_call (stmt))
697 {
698 gsi_next (&gsi);
699 continue;
700 }
701
702 if (gimple_call_internal_p (stmt))
703 {
704 enum internal_fn ifn = gimple_call_internal_fn (stmt);
705 switch (ifn)
706 {
707 case IFN_UBSAN_NULL:
708 no_next = ubsan_expand_null_ifn (&gsi);
709 break;
710 case IFN_UBSAN_BOUNDS:
711 no_next = ubsan_expand_bounds_ifn (&gsi);
712 break;
713 case IFN_UBSAN_OBJECT_SIZE:
714 no_next = ubsan_expand_objsize_ifn (&gsi);
715 break;
716 case IFN_UBSAN_VPTR:
717 no_next = ubsan_expand_vptr_ifn (&gsi);
718 break;
719 case IFN_ASAN_CHECK:
720 no_next = asan_expand_check_ifn (&gsi, use_calls);
721 break;
722 default:
723 break;
724 }
725 }
726 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
727 {
728 tree callee = gimple_call_fndecl (stmt);
729 switch (DECL_FUNCTION_CODE (callee))
730 {
731 case BUILT_IN_UNREACHABLE:
732 if (flag_sanitize & SANITIZE_UNREACHABLE
733 && !lookup_attribute ("no_sanitize_undefined",
734 DECL_ATTRIBUTES (fun->decl)))
735 no_next = ubsan_instrument_unreachable (&gsi);
736 break;
737 default:
738 break;
739 }
740 }
741
742 if (dump_file && (dump_flags & TDF_DETAILS))
743 {
744 fprintf (dump_file, "Expanded\n ");
745 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
746 fprintf (dump_file, "\n");
747 }
748
749 if (!no_next)
750 gsi_next (&gsi);
751 }
752 }
753 return 0;
754 }
755
756 } // anon namespace
757
758 gimple_opt_pass *
759 make_pass_sanopt (gcc::context *ctxt)
760 {
761 return new pass_sanopt (ctxt);
762 }