]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dce.c
[Ada] Improved support for aspect alignment in CCG
[thirdparty/gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2020 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-ssa-propagate.h"
69 #include "gimple-fold.h"
70
71 static struct stmt_stats
72 {
73 int total;
74 int total_phis;
75 int removed;
76 int removed_phis;
77 } stats;
78
79 #define STMT_NECESSARY GF_PLF_1
80
81 static vec<gimple *> worklist;
82
83 /* Vector indicating an SSA name has already been processed and marked
84 as necessary. */
85 static sbitmap processed;
86
87 /* Vector indicating that the last statement of a basic block has already
88 been marked as necessary. */
89 static sbitmap last_stmt_necessary;
90
91 /* Vector indicating that BB contains statements that are live. */
92 static sbitmap bb_contains_live_stmts;
93
94 /* Before we can determine whether a control branch is dead, we need to
95 compute which blocks are control dependent on which edges.
96
97 We expect each block to be control dependent on very few edges so we
98 use a bitmap for each block recording its edges. An array holds the
99 bitmap. The Ith bit in the bitmap is set if that block is dependent
100 on the Ith edge. */
101 static control_dependences *cd;
102
103 /* Vector indicating that a basic block has already had all the edges
104 processed that it is control dependent on. */
105 static sbitmap visited_control_parents;
106
107 /* TRUE if this pass alters the CFG (by removing control statements).
108 FALSE otherwise.
109
110 If this pass alters the CFG, then it will arrange for the dominators
111 to be recomputed. */
112 static bool cfg_altered;
113
114 /* When non-NULL holds map from basic block index into the postorder. */
115 static int *bb_postorder;
116
117
118 /* True if we should treat any stmt with a vdef as necessary. */
119
120 static inline bool
121 keep_all_vdefs_p ()
122 {
123 return optimize_debug;
124 }
125
126 /* If STMT is not already marked necessary, mark it, and add it to the
127 worklist if ADD_TO_WORKLIST is true. */
128
129 static inline void
130 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
131 {
132 gcc_assert (stmt);
133
134 if (gimple_plf (stmt, STMT_NECESSARY))
135 return;
136
137 if (dump_file && (dump_flags & TDF_DETAILS))
138 {
139 fprintf (dump_file, "Marking useful stmt: ");
140 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
141 fprintf (dump_file, "\n");
142 }
143
144 gimple_set_plf (stmt, STMT_NECESSARY, true);
145 if (add_to_worklist)
146 worklist.safe_push (stmt);
147 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
148 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
149 }
150
151
152 /* Mark the statement defining operand OP as necessary. */
153
154 static inline void
155 mark_operand_necessary (tree op)
156 {
157 gimple *stmt;
158 int ver;
159
160 gcc_assert (op);
161
162 ver = SSA_NAME_VERSION (op);
163 if (bitmap_bit_p (processed, ver))
164 {
165 stmt = SSA_NAME_DEF_STMT (op);
166 gcc_assert (gimple_nop_p (stmt)
167 || gimple_plf (stmt, STMT_NECESSARY));
168 return;
169 }
170 bitmap_set_bit (processed, ver);
171
172 stmt = SSA_NAME_DEF_STMT (op);
173 gcc_assert (stmt);
174
175 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
176 return;
177
178 if (dump_file && (dump_flags & TDF_DETAILS))
179 {
180 fprintf (dump_file, "marking necessary through ");
181 print_generic_expr (dump_file, op);
182 fprintf (dump_file, " stmt ");
183 print_gimple_stmt (dump_file, stmt, 0);
184 }
185
186 gimple_set_plf (stmt, STMT_NECESSARY, true);
187 if (bb_contains_live_stmts)
188 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
189 worklist.safe_push (stmt);
190 }
191
192
193 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
194 it can make other statements necessary.
195
196 If AGGRESSIVE is false, control statements are conservatively marked as
197 necessary. */
198
199 static void
200 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
201 {
202 /* With non-call exceptions, we have to assume that all statements could
203 throw. If a statement could throw, it can be deemed necessary. */
204 if (cfun->can_throw_non_call_exceptions
205 && !cfun->can_delete_dead_exceptions
206 && stmt_could_throw_p (cfun, stmt))
207 {
208 mark_stmt_necessary (stmt, true);
209 return;
210 }
211
212 /* Statements that are implicitly live. Most function calls, asm
213 and return statements are required. Labels and GIMPLE_BIND nodes
214 are kept because they are control flow, and we have no way of
215 knowing whether they can be removed. DCE can eliminate all the
216 other statements in a block, and CFG can then remove the block
217 and labels. */
218 switch (gimple_code (stmt))
219 {
220 case GIMPLE_PREDICT:
221 case GIMPLE_LABEL:
222 mark_stmt_necessary (stmt, false);
223 return;
224
225 case GIMPLE_ASM:
226 case GIMPLE_RESX:
227 case GIMPLE_RETURN:
228 mark_stmt_necessary (stmt, true);
229 return;
230
231 case GIMPLE_CALL:
232 {
233 tree callee = gimple_call_fndecl (stmt);
234 if (callee != NULL_TREE
235 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
236 switch (DECL_FUNCTION_CODE (callee))
237 {
238 case BUILT_IN_MALLOC:
239 case BUILT_IN_ALIGNED_ALLOC:
240 case BUILT_IN_CALLOC:
241 CASE_BUILT_IN_ALLOCA:
242 case BUILT_IN_STRDUP:
243 case BUILT_IN_STRNDUP:
244 return;
245
246 default:;
247 }
248
249 if (callee != NULL_TREE
250 && flag_allocation_dce
251 && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
252 return;
253
254 /* Most, but not all function calls are required. Function calls that
255 produce no result and have no side effects (i.e. const pure
256 functions) are unnecessary. */
257 if (gimple_has_side_effects (stmt))
258 {
259 mark_stmt_necessary (stmt, true);
260 return;
261 }
262 /* IFN_GOACC_LOOP calls are necessary in that they are used to
263 represent parameter (i.e. step, bound) of a lowered OpenACC
264 partitioned loop. But this kind of partitioned loop might not
265 survive from aggressive loop removal for it has loop exit and
266 is assumed to be finite. Therefore, we need to explicitly mark
267 these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
268 if (gimple_call_internal_p (stmt, IFN_GOACC_LOOP))
269 {
270 mark_stmt_necessary (stmt, true);
271 return;
272 }
273 if (!gimple_call_lhs (stmt))
274 return;
275 break;
276 }
277
278 case GIMPLE_DEBUG:
279 /* Debug temps without a value are not useful. ??? If we could
280 easily locate the debug temp bind stmt for a use thereof,
281 would could refrain from marking all debug temps here, and
282 mark them only if they're used. */
283 if (gimple_debug_nonbind_marker_p (stmt)
284 || !gimple_debug_bind_p (stmt)
285 || gimple_debug_bind_has_value_p (stmt)
286 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
287 mark_stmt_necessary (stmt, false);
288 return;
289
290 case GIMPLE_GOTO:
291 gcc_assert (!simple_goto_p (stmt));
292 mark_stmt_necessary (stmt, true);
293 return;
294
295 case GIMPLE_COND:
296 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
297 /* Fall through. */
298
299 case GIMPLE_SWITCH:
300 if (! aggressive)
301 mark_stmt_necessary (stmt, true);
302 break;
303
304 case GIMPLE_ASSIGN:
305 if (gimple_clobber_p (stmt))
306 return;
307 break;
308
309 default:
310 break;
311 }
312
313 /* If the statement has volatile operands, it needs to be preserved.
314 Same for statements that can alter control flow in unpredictable
315 ways. */
316 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
317 {
318 mark_stmt_necessary (stmt, true);
319 return;
320 }
321
322 if (stmt_may_clobber_global_p (stmt))
323 {
324 mark_stmt_necessary (stmt, true);
325 return;
326 }
327
328 if (gimple_vdef (stmt) && keep_all_vdefs_p ())
329 {
330 mark_stmt_necessary (stmt, true);
331 return;
332 }
333
334 return;
335 }
336
337
338 /* Mark the last statement of BB as necessary. */
339
340 static void
341 mark_last_stmt_necessary (basic_block bb)
342 {
343 gimple *stmt = last_stmt (bb);
344
345 bitmap_set_bit (last_stmt_necessary, bb->index);
346 bitmap_set_bit (bb_contains_live_stmts, bb->index);
347
348 /* We actually mark the statement only if it is a control statement. */
349 if (stmt && is_ctrl_stmt (stmt))
350 mark_stmt_necessary (stmt, true);
351 }
352
353
354 /* Mark control dependent edges of BB as necessary. We have to do this only
355 once for each basic block so we set the appropriate bit after we're done.
356
357 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
358
359 static void
360 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
361 {
362 bitmap_iterator bi;
363 unsigned edge_number;
364 bool skipped = false;
365
366 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
367
368 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
369 return;
370
371 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
372 0, edge_number, bi)
373 {
374 basic_block cd_bb = cd->get_edge_src (edge_number);
375
376 if (ignore_self && cd_bb == bb)
377 {
378 skipped = true;
379 continue;
380 }
381
382 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
383 mark_last_stmt_necessary (cd_bb);
384 }
385
386 if (!skipped)
387 bitmap_set_bit (visited_control_parents, bb->index);
388 }
389
390
391 /* Find obviously necessary statements. These are things like most function
392 calls, and stores to file level variables.
393
394 If EL is NULL, control statements are conservatively marked as
395 necessary. Otherwise it contains the list of edges used by control
396 dependence analysis. */
397
398 static void
399 find_obviously_necessary_stmts (bool aggressive)
400 {
401 basic_block bb;
402 gimple_stmt_iterator gsi;
403 edge e;
404 gimple *phi, *stmt;
405 int flags;
406
407 FOR_EACH_BB_FN (bb, cfun)
408 {
409 /* PHI nodes are never inherently necessary. */
410 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
411 {
412 phi = gsi_stmt (gsi);
413 gimple_set_plf (phi, STMT_NECESSARY, false);
414 }
415
416 /* Check all statements in the block. */
417 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
418 {
419 stmt = gsi_stmt (gsi);
420 gimple_set_plf (stmt, STMT_NECESSARY, false);
421 mark_stmt_if_obviously_necessary (stmt, aggressive);
422 }
423 }
424
425 /* Pure and const functions are finite and thus have no infinite loops in
426 them. */
427 flags = flags_from_decl_or_type (current_function_decl);
428 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
429 return;
430
431 /* Prevent the empty possibly infinite loops from being removed. */
432 if (aggressive)
433 {
434 class loop *loop;
435 if (mark_irreducible_loops ())
436 FOR_EACH_BB_FN (bb, cfun)
437 {
438 edge_iterator ei;
439 FOR_EACH_EDGE (e, ei, bb->succs)
440 if ((e->flags & EDGE_DFS_BACK)
441 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
442 {
443 if (dump_file)
444 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
445 e->src->index, e->dest->index);
446 mark_control_dependent_edges_necessary (e->dest, false);
447 }
448 }
449
450 FOR_EACH_LOOP (loop, 0)
451 if (!finite_loop_p (loop))
452 {
453 if (dump_file)
454 fprintf (dump_file, "cannot prove finiteness of loop %i\n", loop->num);
455 mark_control_dependent_edges_necessary (loop->latch, false);
456 }
457 }
458 }
459
460
461 /* Return true if REF is based on an aliased base, otherwise false. */
462
463 static bool
464 ref_may_be_aliased (tree ref)
465 {
466 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
467 while (handled_component_p (ref))
468 ref = TREE_OPERAND (ref, 0);
469 if (TREE_CODE (ref) == MEM_REF
470 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
471 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
472 return !(DECL_P (ref)
473 && !may_be_aliased (ref));
474 }
475
476 static bitmap visited = NULL;
477 static unsigned int longest_chain = 0;
478 static unsigned int total_chain = 0;
479 static unsigned int nr_walks = 0;
480 static bool chain_ovfl = false;
481
482 /* Worker for the walker that marks reaching definitions of REF,
483 which is based on a non-aliased decl, necessary. It returns
484 true whenever the defining statement of the current VDEF is
485 a kill for REF, as no dominating may-defs are necessary for REF
486 anymore. DATA points to the basic-block that contains the
487 stmt that refers to REF. */
488
489 static bool
490 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
491 {
492 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
493
494 /* All stmts we visit are necessary. */
495 if (! gimple_clobber_p (def_stmt))
496 mark_operand_necessary (vdef);
497
498 /* If the stmt lhs kills ref, then we can stop walking. */
499 if (gimple_has_lhs (def_stmt)
500 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
501 /* The assignment is not necessarily carried out if it can throw
502 and we can catch it in the current function where we could inspect
503 the previous value.
504 ??? We only need to care about the RHS throwing. For aggregate
505 assignments or similar calls and non-call exceptions the LHS
506 might throw as well. */
507 && !stmt_can_throw_internal (cfun, def_stmt))
508 {
509 tree base, lhs = gimple_get_lhs (def_stmt);
510 poly_int64 size, offset, max_size;
511 bool reverse;
512 ao_ref_base (ref);
513 base
514 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
515 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
516 so base == refd->base does not always hold. */
517 if (base == ref->base)
518 {
519 /* For a must-alias check we need to be able to constrain
520 the accesses properly. */
521 if (known_eq (size, max_size)
522 && known_subrange_p (ref->offset, ref->max_size, offset, size))
523 return true;
524 /* Or they need to be exactly the same. */
525 else if (ref->ref
526 /* Make sure there is no induction variable involved
527 in the references (gcc.c-torture/execute/pr42142.c).
528 The simplest way is to check if the kill dominates
529 the use. */
530 /* But when both are in the same block we cannot
531 easily tell whether we came from a backedge
532 unless we decide to compute stmt UIDs
533 (see PR58246). */
534 && (basic_block) data != gimple_bb (def_stmt)
535 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
536 gimple_bb (def_stmt))
537 && operand_equal_p (ref->ref, lhs, 0))
538 return true;
539 }
540 }
541
542 /* Otherwise keep walking. */
543 return false;
544 }
545
546 static void
547 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
548 {
549 /* Should have been caught before calling this function. */
550 gcc_checking_assert (!keep_all_vdefs_p ());
551
552 unsigned int chain;
553 ao_ref refd;
554 gcc_assert (!chain_ovfl);
555 ao_ref_init (&refd, ref);
556 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
557 mark_aliased_reaching_defs_necessary_1,
558 gimple_bb (stmt), NULL);
559 if (chain > longest_chain)
560 longest_chain = chain;
561 total_chain += chain;
562 nr_walks++;
563 }
564
565 /* Worker for the walker that marks reaching definitions of REF, which
566 is not based on a non-aliased decl. For simplicity we need to end
567 up marking all may-defs necessary that are not based on a non-aliased
568 decl. The only job of this walker is to skip may-defs based on
569 a non-aliased decl. */
570
571 static bool
572 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
573 tree vdef, void *data ATTRIBUTE_UNUSED)
574 {
575 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
576
577 /* We have to skip already visited (and thus necessary) statements
578 to make the chaining work after we dropped back to simple mode. */
579 if (chain_ovfl
580 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
581 {
582 gcc_assert (gimple_nop_p (def_stmt)
583 || gimple_plf (def_stmt, STMT_NECESSARY));
584 return false;
585 }
586
587 /* We want to skip stores to non-aliased variables. */
588 if (!chain_ovfl
589 && gimple_assign_single_p (def_stmt))
590 {
591 tree lhs = gimple_assign_lhs (def_stmt);
592 if (!ref_may_be_aliased (lhs))
593 return false;
594 }
595
596 /* We want to skip statments that do not constitute stores but have
597 a virtual definition. */
598 if (is_gimple_call (def_stmt))
599 {
600 tree callee = gimple_call_fndecl (def_stmt);
601 if (callee != NULL_TREE
602 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
603 switch (DECL_FUNCTION_CODE (callee))
604 {
605 case BUILT_IN_MALLOC:
606 case BUILT_IN_ALIGNED_ALLOC:
607 case BUILT_IN_CALLOC:
608 CASE_BUILT_IN_ALLOCA:
609 case BUILT_IN_FREE:
610 return false;
611
612 default:;
613 }
614
615 if (callee != NULL_TREE
616 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
617 || DECL_IS_REPLACEABLE_OPERATOR_DELETE_P (callee)))
618 return false;
619 }
620
621 if (! gimple_clobber_p (def_stmt))
622 mark_operand_necessary (vdef);
623
624 return false;
625 }
626
627 static void
628 mark_all_reaching_defs_necessary (gimple *stmt)
629 {
630 /* Should have been caught before calling this function. */
631 gcc_checking_assert (!keep_all_vdefs_p ());
632 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
633 mark_all_reaching_defs_necessary_1, NULL, &visited);
634 }
635
636 /* Return true for PHI nodes with one or identical arguments
637 can be removed. */
638 static bool
639 degenerate_phi_p (gimple *phi)
640 {
641 unsigned int i;
642 tree op = gimple_phi_arg_def (phi, 0);
643 for (i = 1; i < gimple_phi_num_args (phi); i++)
644 if (gimple_phi_arg_def (phi, i) != op)
645 return false;
646 return true;
647 }
648
649 /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
650 and delete operators. */
651
652 static bool
653 valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
654 {
655 tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
656 tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
657 const char *new_name = IDENTIFIER_POINTER (new_asm);
658 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
659 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
660 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
661
662 if (new_len < 5 || delete_len < 6)
663 return false;
664 if (new_name[0] == '_')
665 ++new_name, --new_len;
666 if (new_name[0] == '_')
667 ++new_name, --new_len;
668 if (delete_name[0] == '_')
669 ++delete_name, --delete_len;
670 if (delete_name[0] == '_')
671 ++delete_name, --delete_len;
672 if (new_len < 4 || delete_len < 5)
673 return false;
674 /* *_len is now just the length after initial underscores. */
675 if (new_name[0] != 'Z' || new_name[1] != 'n')
676 return false;
677 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
678 return false;
679 /* _Znw must match _Zdl, _Zna must match _Zda. */
680 if ((new_name[2] != 'w' || delete_name[2] != 'l')
681 && (new_name[2] != 'a' || delete_name[2] != 'a'))
682 return false;
683 /* 'j', 'm' and 'y' correspond to size_t. */
684 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
685 return false;
686 if (delete_name[3] != 'P' || delete_name[4] != 'v')
687 return false;
688 if (new_len == 4
689 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
690 {
691 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
692 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
693 if (delete_len == 5)
694 return true;
695 if (delete_len == 6 && delete_name[5] == new_name[3])
696 return true;
697 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
698 return true;
699 }
700 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
701 || (new_len == 33
702 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
703 {
704 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
705 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
706 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
707 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
708 return true;
709 if (delete_len == 21
710 && delete_name[5] == new_name[3]
711 && !memcmp (delete_name + 6, "St11align_val_t", 15))
712 return true;
713 if (delete_len == 34
714 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
715 return true;
716 }
717 return false;
718 }
719
720 /* Propagate necessity using the operands of necessary statements.
721 Process the uses on each statement in the worklist, and add all
722 feeding statements which contribute to the calculation of this
723 value to the worklist.
724
725 In conservative mode, EL is NULL. */
726
727 static void
728 propagate_necessity (bool aggressive)
729 {
730 gimple *stmt;
731
732 if (dump_file && (dump_flags & TDF_DETAILS))
733 fprintf (dump_file, "\nProcessing worklist:\n");
734
735 while (worklist.length () > 0)
736 {
737 /* Take STMT from worklist. */
738 stmt = worklist.pop ();
739
740 if (dump_file && (dump_flags & TDF_DETAILS))
741 {
742 fprintf (dump_file, "processing: ");
743 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
744 fprintf (dump_file, "\n");
745 }
746
747 if (aggressive)
748 {
749 /* Mark the last statement of the basic blocks on which the block
750 containing STMT is control dependent, but only if we haven't
751 already done so. */
752 basic_block bb = gimple_bb (stmt);
753 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
754 && !bitmap_bit_p (visited_control_parents, bb->index))
755 mark_control_dependent_edges_necessary (bb, false);
756 }
757
758 if (gimple_code (stmt) == GIMPLE_PHI
759 /* We do not process virtual PHI nodes nor do we track their
760 necessity. */
761 && !virtual_operand_p (gimple_phi_result (stmt)))
762 {
763 /* PHI nodes are somewhat special in that each PHI alternative has
764 data and control dependencies. All the statements feeding the
765 PHI node's arguments are always necessary. In aggressive mode,
766 we also consider the control dependent edges leading to the
767 predecessor block associated with each PHI alternative as
768 necessary. */
769 gphi *phi = as_a <gphi *> (stmt);
770 size_t k;
771
772 for (k = 0; k < gimple_phi_num_args (stmt); k++)
773 {
774 tree arg = PHI_ARG_DEF (stmt, k);
775 if (TREE_CODE (arg) == SSA_NAME)
776 mark_operand_necessary (arg);
777 }
778
779 /* For PHI operands it matters from where the control flow arrives
780 to the BB. Consider the following example:
781
782 a=exp1;
783 b=exp2;
784 if (test)
785 ;
786 else
787 ;
788 c=PHI(a,b)
789
790 We need to mark control dependence of the empty basic blocks, since they
791 contains computation of PHI operands.
792
793 Doing so is too restrictive in the case the predecestor block is in
794 the loop. Consider:
795
796 if (b)
797 {
798 int i;
799 for (i = 0; i<1000; ++i)
800 ;
801 j = 0;
802 }
803 return j;
804
805 There is PHI for J in the BB containing return statement.
806 In this case the control dependence of predecestor block (that is
807 within the empty loop) also contains the block determining number
808 of iterations of the block that would prevent removing of empty
809 loop in this case.
810
811 This scenario can be avoided by splitting critical edges.
812 To save the critical edge splitting pass we identify how the control
813 dependence would look like if the edge was split.
814
815 Consider the modified CFG created from current CFG by splitting
816 edge B->C. In the postdominance tree of modified CFG, C' is
817 always child of C. There are two cases how chlids of C' can look
818 like:
819
820 1) C' is leaf
821
822 In this case the only basic block C' is control dependent on is B.
823
824 2) C' has single child that is B
825
826 In this case control dependence of C' is same as control
827 dependence of B in original CFG except for block B itself.
828 (since C' postdominate B in modified CFG)
829
830 Now how to decide what case happens? There are two basic options:
831
832 a) C postdominate B. Then C immediately postdominate B and
833 case 2 happens iff there is no other way from B to C except
834 the edge B->C.
835
836 There is other way from B to C iff there is succesor of B that
837 is not postdominated by B. Testing this condition is somewhat
838 expensive, because we need to iterate all succesors of B.
839 We are safe to assume that this does not happen: we will mark B
840 as needed when processing the other path from B to C that is
841 conrol dependent on B and marking control dependencies of B
842 itself is harmless because they will be processed anyway after
843 processing control statement in B.
844
845 b) C does not postdominate B. Always case 1 happens since there is
846 path from C to exit that does not go through B and thus also C'. */
847
848 if (aggressive && !degenerate_phi_p (stmt))
849 {
850 for (k = 0; k < gimple_phi_num_args (stmt); k++)
851 {
852 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
853
854 if (gimple_bb (stmt)
855 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
856 {
857 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
858 mark_last_stmt_necessary (arg_bb);
859 }
860 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
861 && !bitmap_bit_p (visited_control_parents,
862 arg_bb->index))
863 mark_control_dependent_edges_necessary (arg_bb, true);
864 }
865 }
866 }
867 else
868 {
869 /* Propagate through the operands. Examine all the USE, VUSE and
870 VDEF operands in this statement. Mark all the statements
871 which feed this statement's uses as necessary. */
872 ssa_op_iter iter;
873 tree use;
874
875 /* If this is a call to free which is directly fed by an
876 allocation function do not mark that necessary through
877 processing the argument. */
878 bool is_delete_operator
879 = (is_gimple_call (stmt)
880 && gimple_call_replaceable_operator_delete_p (as_a <gcall *> (stmt)));
881 if (is_delete_operator
882 || gimple_call_builtin_p (stmt, BUILT_IN_FREE))
883 {
884 tree ptr = gimple_call_arg (stmt, 0);
885 gimple *def_stmt;
886 tree def_callee;
887 /* If the pointer we free is defined by an allocation
888 function do not add the call to the worklist. */
889 if (TREE_CODE (ptr) == SSA_NAME
890 && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr))
891 && (def_callee = gimple_call_fndecl (def_stmt))
892 && ((DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
893 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
894 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
895 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
896 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee)))
897 {
898 if (is_delete_operator)
899 {
900 if (!valid_new_delete_pair_p (def_stmt, stmt))
901 mark_operand_necessary (gimple_call_arg (stmt, 0));
902
903 /* Delete operators can have alignment and (or) size
904 as next arguments. When being a SSA_NAME, they
905 must be marked as necessary. */
906 if (gimple_call_num_args (stmt) >= 2)
907 for (unsigned i = 1; i < gimple_call_num_args (stmt);
908 i++)
909 {
910 tree arg = gimple_call_arg (stmt, i);
911 if (TREE_CODE (arg) == SSA_NAME)
912 mark_operand_necessary (arg);
913 }
914 }
915
916 continue;
917 }
918 }
919
920 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
921 mark_operand_necessary (use);
922
923 use = gimple_vuse (stmt);
924 if (!use)
925 continue;
926
927 /* No need to search for vdefs if we intrinsicly keep them all. */
928 if (keep_all_vdefs_p ())
929 continue;
930
931 /* If we dropped to simple mode make all immediately
932 reachable definitions necessary. */
933 if (chain_ovfl)
934 {
935 mark_all_reaching_defs_necessary (stmt);
936 continue;
937 }
938
939 /* For statements that may load from memory (have a VUSE) we
940 have to mark all reaching (may-)definitions as necessary.
941 We partition this task into two cases:
942 1) explicit loads based on decls that are not aliased
943 2) implicit loads (like calls) and explicit loads not
944 based on decls that are not aliased (like indirect
945 references or loads from globals)
946 For 1) we mark all reaching may-defs as necessary, stopping
947 at dominating kills. For 2) we want to mark all dominating
948 references necessary, but non-aliased ones which we handle
949 in 1). By keeping a global visited bitmap for references
950 we walk for 2) we avoid quadratic behavior for those. */
951
952 if (is_gimple_call (stmt))
953 {
954 tree callee = gimple_call_fndecl (stmt);
955 unsigned i;
956
957 /* Calls to functions that are merely acting as barriers
958 or that only store to memory do not make any previous
959 stores necessary. */
960 if (callee != NULL_TREE
961 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
962 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
963 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
964 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
965 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
966 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
967 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
968 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
969 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
970 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
971 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
972 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
973 continue;
974
975 if (callee != NULL_TREE
976 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
977 || DECL_IS_REPLACEABLE_OPERATOR_DELETE_P (callee)))
978 continue;
979
980 /* Calls implicitly load from memory, their arguments
981 in addition may explicitly perform memory loads. */
982 mark_all_reaching_defs_necessary (stmt);
983 for (i = 0; i < gimple_call_num_args (stmt); ++i)
984 {
985 tree arg = gimple_call_arg (stmt, i);
986 if (TREE_CODE (arg) == SSA_NAME
987 || is_gimple_min_invariant (arg))
988 continue;
989 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
990 arg = TREE_OPERAND (arg, 0);
991 if (!ref_may_be_aliased (arg))
992 mark_aliased_reaching_defs_necessary (stmt, arg);
993 }
994 }
995 else if (gimple_assign_single_p (stmt))
996 {
997 tree rhs;
998 /* If this is a load mark things necessary. */
999 rhs = gimple_assign_rhs1 (stmt);
1000 if (TREE_CODE (rhs) != SSA_NAME
1001 && !is_gimple_min_invariant (rhs)
1002 && TREE_CODE (rhs) != CONSTRUCTOR)
1003 {
1004 if (!ref_may_be_aliased (rhs))
1005 mark_aliased_reaching_defs_necessary (stmt, rhs);
1006 else
1007 mark_all_reaching_defs_necessary (stmt);
1008 }
1009 }
1010 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1011 {
1012 tree rhs = gimple_return_retval (return_stmt);
1013 /* A return statement may perform a load. */
1014 if (rhs
1015 && TREE_CODE (rhs) != SSA_NAME
1016 && !is_gimple_min_invariant (rhs)
1017 && TREE_CODE (rhs) != CONSTRUCTOR)
1018 {
1019 if (!ref_may_be_aliased (rhs))
1020 mark_aliased_reaching_defs_necessary (stmt, rhs);
1021 else
1022 mark_all_reaching_defs_necessary (stmt);
1023 }
1024 }
1025 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
1026 {
1027 unsigned i;
1028 mark_all_reaching_defs_necessary (stmt);
1029 /* Inputs may perform loads. */
1030 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1031 {
1032 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1033 if (TREE_CODE (op) != SSA_NAME
1034 && !is_gimple_min_invariant (op)
1035 && TREE_CODE (op) != CONSTRUCTOR
1036 && !ref_may_be_aliased (op))
1037 mark_aliased_reaching_defs_necessary (stmt, op);
1038 }
1039 }
1040 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
1041 {
1042 /* The beginning of a transaction is a memory barrier. */
1043 /* ??? If we were really cool, we'd only be a barrier
1044 for the memories touched within the transaction. */
1045 mark_all_reaching_defs_necessary (stmt);
1046 }
1047 else
1048 gcc_unreachable ();
1049
1050 /* If we over-used our alias oracle budget drop to simple
1051 mode. The cost metric allows quadratic behavior
1052 (number of uses times number of may-defs queries) up to
1053 a constant maximal number of queries and after that falls back to
1054 super-linear complexity. */
1055 if (/* Constant but quadratic for small functions. */
1056 total_chain > 128 * 128
1057 /* Linear in the number of may-defs. */
1058 && total_chain > 32 * longest_chain
1059 /* Linear in the number of uses. */
1060 && total_chain > nr_walks * 32)
1061 {
1062 chain_ovfl = true;
1063 if (visited)
1064 bitmap_clear (visited);
1065 }
1066 }
1067 }
1068 }
1069
1070 /* Remove dead PHI nodes from block BB. */
1071
1072 static bool
1073 remove_dead_phis (basic_block bb)
1074 {
1075 bool something_changed = false;
1076 gphi *phi;
1077 gphi_iterator gsi;
1078
1079 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1080 {
1081 stats.total_phis++;
1082 phi = gsi.phi ();
1083
1084 /* We do not track necessity of virtual PHI nodes. Instead do
1085 very simple dead PHI removal here. */
1086 if (virtual_operand_p (gimple_phi_result (phi)))
1087 {
1088 /* Virtual PHI nodes with one or identical arguments
1089 can be removed. */
1090 if (degenerate_phi_p (phi))
1091 {
1092 tree vdef = gimple_phi_result (phi);
1093 tree vuse = gimple_phi_arg_def (phi, 0);
1094
1095 use_operand_p use_p;
1096 imm_use_iterator iter;
1097 gimple *use_stmt;
1098 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1099 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1100 SET_USE (use_p, vuse);
1101 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1102 && TREE_CODE (vuse) == SSA_NAME)
1103 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1104 }
1105 else
1106 gimple_set_plf (phi, STMT_NECESSARY, true);
1107 }
1108
1109 if (!gimple_plf (phi, STMT_NECESSARY))
1110 {
1111 something_changed = true;
1112 if (dump_file && (dump_flags & TDF_DETAILS))
1113 {
1114 fprintf (dump_file, "Deleting : ");
1115 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1116 fprintf (dump_file, "\n");
1117 }
1118
1119 remove_phi_node (&gsi, true);
1120 stats.removed_phis++;
1121 continue;
1122 }
1123
1124 gsi_next (&gsi);
1125 }
1126 return something_changed;
1127 }
1128
1129
1130 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1131 containing I so that we don't have to look it up. */
1132
1133 static void
1134 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1135 vec<edge> &to_remove_edges)
1136 {
1137 gimple *stmt = gsi_stmt (*i);
1138
1139 if (dump_file && (dump_flags & TDF_DETAILS))
1140 {
1141 fprintf (dump_file, "Deleting : ");
1142 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1143 fprintf (dump_file, "\n");
1144 }
1145
1146 stats.removed++;
1147
1148 /* If we have determined that a conditional branch statement contributes
1149 nothing to the program, then we not only remove it, but we need to update
1150 the CFG. We can chose any of edges out of BB as long as we are sure to not
1151 close infinite loops. This is done by always choosing the edge closer to
1152 exit in inverted_post_order_compute order. */
1153 if (is_ctrl_stmt (stmt))
1154 {
1155 edge_iterator ei;
1156 edge e = NULL, e2;
1157
1158 /* See if there is only one non-abnormal edge. */
1159 if (single_succ_p (bb))
1160 e = single_succ_edge (bb);
1161 /* Otherwise chose one that is closer to bb with live statement in it.
1162 To be able to chose one, we compute inverted post order starting from
1163 all BBs with live statements. */
1164 if (!e)
1165 {
1166 if (!bb_postorder)
1167 {
1168 auto_vec<int, 20> postorder;
1169 inverted_post_order_compute (&postorder,
1170 &bb_contains_live_stmts);
1171 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1172 for (unsigned int i = 0; i < postorder.length (); ++i)
1173 bb_postorder[postorder[i]] = i;
1174 }
1175 FOR_EACH_EDGE (e2, ei, bb->succs)
1176 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1177 || bb_postorder [e->dest->index]
1178 < bb_postorder [e2->dest->index])
1179 e = e2;
1180 }
1181 gcc_assert (e);
1182 e->probability = profile_probability::always ();
1183
1184 /* The edge is no longer associated with a conditional, so it does
1185 not have TRUE/FALSE flags.
1186 We are also safe to drop EH/ABNORMAL flags and turn them into
1187 normal control flow, because we know that all the destinations (including
1188 those odd edges) are equivalent for program execution. */
1189 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1190
1191 /* The lone outgoing edge from BB will be a fallthru edge. */
1192 e->flags |= EDGE_FALLTHRU;
1193
1194 /* Remove the remaining outgoing edges. */
1195 FOR_EACH_EDGE (e2, ei, bb->succs)
1196 if (e != e2)
1197 {
1198 /* If we made a BB unconditionally exit a loop or removed
1199 an entry into an irreducible region, then this transform
1200 alters the set of BBs in the loop. Schedule a fixup. */
1201 if (loop_exit_edge_p (bb->loop_father, e)
1202 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1203 loops_state_set (LOOPS_NEED_FIXUP);
1204 to_remove_edges.safe_push (e2);
1205 }
1206 }
1207
1208 /* If this is a store into a variable that is being optimized away,
1209 add a debug bind stmt if possible. */
1210 if (MAY_HAVE_DEBUG_BIND_STMTS
1211 && gimple_assign_single_p (stmt)
1212 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1213 {
1214 tree lhs = gimple_assign_lhs (stmt);
1215 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1216 && !DECL_IGNORED_P (lhs)
1217 && is_gimple_reg_type (TREE_TYPE (lhs))
1218 && !is_global_var (lhs)
1219 && !DECL_HAS_VALUE_EXPR_P (lhs))
1220 {
1221 tree rhs = gimple_assign_rhs1 (stmt);
1222 gdebug *note
1223 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1224 gsi_insert_after (i, note, GSI_SAME_STMT);
1225 }
1226 }
1227
1228 unlink_stmt_vdef (stmt);
1229 gsi_remove (i, true);
1230 release_defs (stmt);
1231 }
1232
1233 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1234 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1235
1236 static tree
1237 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1238 {
1239 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1240 *walk_subtrees = 0;
1241 if (*tp == (tree) data)
1242 return *tp;
1243 return NULL_TREE;
1244 }
1245
1246 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1247 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1248 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1249 uses. */
1250
1251 static void
1252 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1253 enum tree_code subcode)
1254 {
1255 gimple *stmt = gsi_stmt (*gsi);
1256 tree lhs = gimple_call_lhs (stmt);
1257
1258 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1259 return;
1260
1261 imm_use_iterator imm_iter;
1262 use_operand_p use_p;
1263 bool has_debug_uses = false;
1264 bool has_realpart_uses = false;
1265 bool has_other_uses = false;
1266 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1267 {
1268 gimple *use_stmt = USE_STMT (use_p);
1269 if (is_gimple_debug (use_stmt))
1270 has_debug_uses = true;
1271 else if (is_gimple_assign (use_stmt)
1272 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1273 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1274 has_realpart_uses = true;
1275 else
1276 {
1277 has_other_uses = true;
1278 break;
1279 }
1280 }
1281
1282 if (!has_realpart_uses || has_other_uses)
1283 return;
1284
1285 tree arg0 = gimple_call_arg (stmt, 0);
1286 tree arg1 = gimple_call_arg (stmt, 1);
1287 location_t loc = gimple_location (stmt);
1288 tree type = TREE_TYPE (TREE_TYPE (lhs));
1289 tree utype = type;
1290 if (!TYPE_UNSIGNED (type))
1291 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1292 tree result = fold_build2_loc (loc, subcode, utype,
1293 fold_convert_loc (loc, utype, arg0),
1294 fold_convert_loc (loc, utype, arg1));
1295 result = fold_convert_loc (loc, type, result);
1296
1297 if (has_debug_uses)
1298 {
1299 gimple *use_stmt;
1300 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1301 {
1302 if (!gimple_debug_bind_p (use_stmt))
1303 continue;
1304 tree v = gimple_debug_bind_get_value (use_stmt);
1305 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1306 {
1307 gimple_debug_bind_reset_value (use_stmt);
1308 update_stmt (use_stmt);
1309 }
1310 }
1311 }
1312
1313 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1314 result = drop_tree_overflow (result);
1315 tree overflow = build_zero_cst (type);
1316 tree ctype = build_complex_type (type);
1317 if (TREE_CODE (result) == INTEGER_CST)
1318 result = build_complex (ctype, result, overflow);
1319 else
1320 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1321 ctype, result, overflow);
1322
1323 if (dump_file && (dump_flags & TDF_DETAILS))
1324 {
1325 fprintf (dump_file, "Transforming call: ");
1326 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1327 fprintf (dump_file, "because the overflow result is never used into: ");
1328 print_generic_stmt (dump_file, result, TDF_SLIM);
1329 fprintf (dump_file, "\n");
1330 }
1331
1332 if (!update_call_from_tree (gsi, result))
1333 gimplify_and_update_call_from_tree (gsi, result);
1334 }
1335
1336 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1337 contributes nothing to the program, and can be deleted. */
1338
1339 static bool
1340 eliminate_unnecessary_stmts (void)
1341 {
1342 bool something_changed = false;
1343 basic_block bb;
1344 gimple_stmt_iterator gsi, psi;
1345 gimple *stmt;
1346 tree call;
1347 vec<basic_block> h;
1348 auto_vec<edge> to_remove_edges;
1349
1350 if (dump_file && (dump_flags & TDF_DETAILS))
1351 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1352
1353 clear_special_calls ();
1354
1355 /* Walking basic blocks and statements in reverse order avoids
1356 releasing SSA names before any other DEFs that refer to them are
1357 released. This helps avoid loss of debug information, as we get
1358 a chance to propagate all RHSs of removed SSAs into debug uses,
1359 rather than only the latest ones. E.g., consider:
1360
1361 x_3 = y_1 + z_2;
1362 a_5 = x_3 - b_4;
1363 # DEBUG a => a_5
1364
1365 If we were to release x_3 before a_5, when we reached a_5 and
1366 tried to substitute it into the debug stmt, we'd see x_3 there,
1367 but x_3's DEF, type, etc would have already been disconnected.
1368 By going backwards, the debug stmt first changes to:
1369
1370 # DEBUG a => x_3 - b_4
1371
1372 and then to:
1373
1374 # DEBUG a => y_1 + z_2 - b_4
1375
1376 as desired. */
1377 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1378 h = get_all_dominated_blocks (CDI_DOMINATORS,
1379 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1380
1381 while (h.length ())
1382 {
1383 bb = h.pop ();
1384
1385 /* Remove dead statements. */
1386 auto_bitmap debug_seen;
1387 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1388 {
1389 stmt = gsi_stmt (gsi);
1390
1391 psi = gsi;
1392 gsi_prev (&psi);
1393
1394 stats.total++;
1395
1396 /* We can mark a call to free as not necessary if the
1397 defining statement of its argument is not necessary
1398 (and thus is getting removed). */
1399 if (gimple_plf (stmt, STMT_NECESSARY)
1400 && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1401 || (is_gimple_call (stmt)
1402 && gimple_call_replaceable_operator_delete_p (as_a <gcall *> (stmt)))))
1403 {
1404 tree ptr = gimple_call_arg (stmt, 0);
1405 if (TREE_CODE (ptr) == SSA_NAME)
1406 {
1407 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1408 if (!gimple_nop_p (def_stmt)
1409 && !gimple_plf (def_stmt, STMT_NECESSARY))
1410 gimple_set_plf (stmt, STMT_NECESSARY, false);
1411 }
1412 }
1413
1414 /* If GSI is not necessary then remove it. */
1415 if (!gimple_plf (stmt, STMT_NECESSARY))
1416 {
1417 /* Keep clobbers that we can keep live live. */
1418 if (gimple_clobber_p (stmt))
1419 {
1420 ssa_op_iter iter;
1421 use_operand_p use_p;
1422 bool dead = false;
1423 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1424 {
1425 tree name = USE_FROM_PTR (use_p);
1426 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1427 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1428 {
1429 dead = true;
1430 break;
1431 }
1432 }
1433 if (!dead)
1434 {
1435 bitmap_clear (debug_seen);
1436 continue;
1437 }
1438 }
1439 if (!is_gimple_debug (stmt))
1440 something_changed = true;
1441 remove_dead_stmt (&gsi, bb, to_remove_edges);
1442 continue;
1443 }
1444 else if (is_gimple_call (stmt))
1445 {
1446 tree name = gimple_call_lhs (stmt);
1447
1448 notice_special_calls (as_a <gcall *> (stmt));
1449
1450 /* When LHS of var = call (); is dead, simplify it into
1451 call (); saving one operand. */
1452 if (name
1453 && TREE_CODE (name) == SSA_NAME
1454 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1455 /* Avoid doing so for allocation calls which we
1456 did not mark as necessary, it will confuse the
1457 special logic we apply to malloc/free pair removal. */
1458 && (!(call = gimple_call_fndecl (stmt))
1459 || ((DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1460 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1461 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1462 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1463 && !ALLOCA_FUNCTION_CODE_P
1464 (DECL_FUNCTION_CODE (call))))
1465 && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call))))
1466 {
1467 something_changed = true;
1468 if (dump_file && (dump_flags & TDF_DETAILS))
1469 {
1470 fprintf (dump_file, "Deleting LHS of call: ");
1471 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1472 fprintf (dump_file, "\n");
1473 }
1474
1475 gimple_call_set_lhs (stmt, NULL_TREE);
1476 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1477 update_stmt (stmt);
1478 release_ssa_name (name);
1479
1480 /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1481 without lhs is not needed. */
1482 if (gimple_call_internal_p (stmt))
1483 switch (gimple_call_internal_fn (stmt))
1484 {
1485 case IFN_GOMP_SIMD_LANE:
1486 if (gimple_call_num_args (stmt) >= 3
1487 && !integer_nonzerop (gimple_call_arg (stmt, 2)))
1488 break;
1489 /* FALLTHRU */
1490 case IFN_ASAN_POISON:
1491 remove_dead_stmt (&gsi, bb, to_remove_edges);
1492 break;
1493 default:
1494 break;
1495 }
1496 }
1497 else if (gimple_call_internal_p (stmt))
1498 switch (gimple_call_internal_fn (stmt))
1499 {
1500 case IFN_ADD_OVERFLOW:
1501 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1502 break;
1503 case IFN_SUB_OVERFLOW:
1504 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1505 break;
1506 case IFN_MUL_OVERFLOW:
1507 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1508 break;
1509 default:
1510 break;
1511 }
1512 }
1513 else if (gimple_debug_bind_p (stmt))
1514 {
1515 /* We are only keeping the last debug-bind of a
1516 non-DEBUG_EXPR_DECL variable in a series of
1517 debug-bind stmts. */
1518 tree var = gimple_debug_bind_get_var (stmt);
1519 if (TREE_CODE (var) != DEBUG_EXPR_DECL
1520 && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1521 remove_dead_stmt (&gsi, bb, to_remove_edges);
1522 continue;
1523 }
1524 bitmap_clear (debug_seen);
1525 }
1526
1527 /* Remove dead PHI nodes. */
1528 something_changed |= remove_dead_phis (bb);
1529 }
1530
1531 h.release ();
1532
1533 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1534 rendered some PHI nodes unreachable while they are still in use.
1535 Mark them for renaming. */
1536 if (!to_remove_edges.is_empty ())
1537 {
1538 basic_block prev_bb;
1539
1540 /* Remove edges. We've delayed this to not get bogus debug stmts
1541 during PHI node removal. */
1542 for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1543 remove_edge (to_remove_edges[i]);
1544 cfg_altered = true;
1545
1546 find_unreachable_blocks ();
1547
1548 /* Delete all unreachable basic blocks in reverse dominator order. */
1549 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1550 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1551 {
1552 prev_bb = bb->prev_bb;
1553
1554 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1555 || !(bb->flags & BB_REACHABLE))
1556 {
1557 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1558 gsi_next (&gsi))
1559 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1560 {
1561 bool found = false;
1562 imm_use_iterator iter;
1563
1564 FOR_EACH_IMM_USE_STMT (stmt, iter,
1565 gimple_phi_result (gsi.phi ()))
1566 {
1567 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1568 continue;
1569 if (gimple_code (stmt) == GIMPLE_PHI
1570 || gimple_plf (stmt, STMT_NECESSARY))
1571 {
1572 found = true;
1573 BREAK_FROM_IMM_USE_STMT (iter);
1574 }
1575 }
1576 if (found)
1577 mark_virtual_phi_result_for_renaming (gsi.phi ());
1578 }
1579
1580 if (!(bb->flags & BB_REACHABLE))
1581 {
1582 /* Speed up the removal of blocks that don't
1583 dominate others. Walking backwards, this should
1584 be the common case. ??? Do we need to recompute
1585 dominators because of cfg_altered? */
1586 if (!first_dom_son (CDI_DOMINATORS, bb))
1587 delete_basic_block (bb);
1588 else
1589 {
1590 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1591
1592 while (h.length ())
1593 {
1594 bb = h.pop ();
1595 prev_bb = bb->prev_bb;
1596 /* Rearrangements to the CFG may have failed
1597 to update the dominators tree, so that
1598 formerly-dominated blocks are now
1599 otherwise reachable. */
1600 if (!!(bb->flags & BB_REACHABLE))
1601 continue;
1602 delete_basic_block (bb);
1603 }
1604
1605 h.release ();
1606 }
1607 }
1608 }
1609 }
1610 }
1611
1612 if (bb_postorder)
1613 free (bb_postorder);
1614 bb_postorder = NULL;
1615
1616 return something_changed;
1617 }
1618
1619
1620 /* Print out removed statement statistics. */
1621
1622 static void
1623 print_stats (void)
1624 {
1625 float percg;
1626
1627 percg = ((float) stats.removed / (float) stats.total) * 100;
1628 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1629 stats.removed, stats.total, (int) percg);
1630
1631 if (stats.total_phis == 0)
1632 percg = 0;
1633 else
1634 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1635
1636 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1637 stats.removed_phis, stats.total_phis, (int) percg);
1638 }
1639
1640 /* Initialization for this pass. Set up the used data structures. */
1641
1642 static void
1643 tree_dce_init (bool aggressive)
1644 {
1645 memset ((void *) &stats, 0, sizeof (stats));
1646
1647 if (aggressive)
1648 {
1649 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1650 bitmap_clear (last_stmt_necessary);
1651 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1652 bitmap_clear (bb_contains_live_stmts);
1653 }
1654
1655 processed = sbitmap_alloc (num_ssa_names + 1);
1656 bitmap_clear (processed);
1657
1658 worklist.create (64);
1659 cfg_altered = false;
1660 }
1661
1662 /* Cleanup after this pass. */
1663
1664 static void
1665 tree_dce_done (bool aggressive)
1666 {
1667 if (aggressive)
1668 {
1669 delete cd;
1670 sbitmap_free (visited_control_parents);
1671 sbitmap_free (last_stmt_necessary);
1672 sbitmap_free (bb_contains_live_stmts);
1673 bb_contains_live_stmts = NULL;
1674 }
1675
1676 sbitmap_free (processed);
1677
1678 worklist.release ();
1679 }
1680
1681 /* Main routine to eliminate dead code.
1682
1683 AGGRESSIVE controls the aggressiveness of the algorithm.
1684 In conservative mode, we ignore control dependence and simply declare
1685 all but the most trivially dead branches necessary. This mode is fast.
1686 In aggressive mode, control dependences are taken into account, which
1687 results in more dead code elimination, but at the cost of some time.
1688
1689 FIXME: Aggressive mode before PRE doesn't work currently because
1690 the dominance info is not invalidated after DCE1. This is
1691 not an issue right now because we only run aggressive DCE
1692 as the last tree SSA pass, but keep this in mind when you
1693 start experimenting with pass ordering. */
1694
1695 static unsigned int
1696 perform_tree_ssa_dce (bool aggressive)
1697 {
1698 bool something_changed = 0;
1699
1700 calculate_dominance_info (CDI_DOMINATORS);
1701
1702 /* Preheaders are needed for SCEV to work.
1703 Simple lateches and recorded exits improve chances that loop will
1704 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1705 bool in_loop_pipeline = scev_initialized_p ();
1706 if (aggressive && ! in_loop_pipeline)
1707 {
1708 scev_initialize ();
1709 loop_optimizer_init (LOOPS_NORMAL
1710 | LOOPS_HAVE_RECORDED_EXITS);
1711 }
1712
1713 tree_dce_init (aggressive);
1714
1715 if (aggressive)
1716 {
1717 /* Compute control dependence. */
1718 calculate_dominance_info (CDI_POST_DOMINATORS);
1719 cd = new control_dependences ();
1720
1721 visited_control_parents =
1722 sbitmap_alloc (last_basic_block_for_fn (cfun));
1723 bitmap_clear (visited_control_parents);
1724
1725 mark_dfs_back_edges ();
1726 }
1727
1728 find_obviously_necessary_stmts (aggressive);
1729
1730 if (aggressive && ! in_loop_pipeline)
1731 {
1732 loop_optimizer_finalize ();
1733 scev_finalize ();
1734 }
1735
1736 longest_chain = 0;
1737 total_chain = 0;
1738 nr_walks = 0;
1739 chain_ovfl = false;
1740 visited = BITMAP_ALLOC (NULL);
1741 propagate_necessity (aggressive);
1742 BITMAP_FREE (visited);
1743
1744 something_changed |= eliminate_unnecessary_stmts ();
1745 something_changed |= cfg_altered;
1746
1747 /* We do not update postdominators, so free them unconditionally. */
1748 free_dominance_info (CDI_POST_DOMINATORS);
1749
1750 /* If we removed paths in the CFG, then we need to update
1751 dominators as well. I haven't investigated the possibility
1752 of incrementally updating dominators. */
1753 if (cfg_altered)
1754 free_dominance_info (CDI_DOMINATORS);
1755
1756 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1757 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1758
1759 /* Debugging dumps. */
1760 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1761 print_stats ();
1762
1763 tree_dce_done (aggressive);
1764
1765 if (something_changed)
1766 {
1767 free_numbers_of_iterations_estimates (cfun);
1768 if (in_loop_pipeline)
1769 scev_reset ();
1770 return TODO_update_ssa | TODO_cleanup_cfg;
1771 }
1772 return 0;
1773 }
1774
1775 /* Pass entry points. */
1776 static unsigned int
1777 tree_ssa_dce (void)
1778 {
1779 return perform_tree_ssa_dce (/*aggressive=*/false);
1780 }
1781
1782 static unsigned int
1783 tree_ssa_cd_dce (void)
1784 {
1785 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1786 }
1787
1788 namespace {
1789
1790 const pass_data pass_data_dce =
1791 {
1792 GIMPLE_PASS, /* type */
1793 "dce", /* name */
1794 OPTGROUP_NONE, /* optinfo_flags */
1795 TV_TREE_DCE, /* tv_id */
1796 ( PROP_cfg | PROP_ssa ), /* properties_required */
1797 0, /* properties_provided */
1798 0, /* properties_destroyed */
1799 0, /* todo_flags_start */
1800 0, /* todo_flags_finish */
1801 };
1802
1803 class pass_dce : public gimple_opt_pass
1804 {
1805 public:
1806 pass_dce (gcc::context *ctxt)
1807 : gimple_opt_pass (pass_data_dce, ctxt)
1808 {}
1809
1810 /* opt_pass methods: */
1811 opt_pass * clone () { return new pass_dce (m_ctxt); }
1812 virtual bool gate (function *) { return flag_tree_dce != 0; }
1813 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1814
1815 }; // class pass_dce
1816
1817 } // anon namespace
1818
1819 gimple_opt_pass *
1820 make_pass_dce (gcc::context *ctxt)
1821 {
1822 return new pass_dce (ctxt);
1823 }
1824
1825 namespace {
1826
1827 const pass_data pass_data_cd_dce =
1828 {
1829 GIMPLE_PASS, /* type */
1830 "cddce", /* name */
1831 OPTGROUP_NONE, /* optinfo_flags */
1832 TV_TREE_CD_DCE, /* tv_id */
1833 ( PROP_cfg | PROP_ssa ), /* properties_required */
1834 0, /* properties_provided */
1835 0, /* properties_destroyed */
1836 0, /* todo_flags_start */
1837 0, /* todo_flags_finish */
1838 };
1839
1840 class pass_cd_dce : public gimple_opt_pass
1841 {
1842 public:
1843 pass_cd_dce (gcc::context *ctxt)
1844 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1845 {}
1846
1847 /* opt_pass methods: */
1848 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1849 virtual bool gate (function *) { return flag_tree_dce != 0; }
1850 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1851
1852 }; // class pass_cd_dce
1853
1854 } // anon namespace
1855
1856 gimple_opt_pass *
1857 make_pass_cd_dce (gcc::context *ctxt)
1858 {
1859 return new pass_cd_dce (ctxt);
1860 }
1861
1862
1863 /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
1864 is consumed by this function. The function has linear complexity in
1865 the number of dead stmts with a constant factor like the average SSA
1866 use operands number. */
1867
1868 void
1869 simple_dce_from_worklist (bitmap worklist)
1870 {
1871 while (! bitmap_empty_p (worklist))
1872 {
1873 /* Pop item. */
1874 unsigned i = bitmap_first_set_bit (worklist);
1875 bitmap_clear_bit (worklist, i);
1876
1877 tree def = ssa_name (i);
1878 /* Removed by somebody else or still in use. */
1879 if (! def || ! has_zero_uses (def))
1880 continue;
1881
1882 gimple *t = SSA_NAME_DEF_STMT (def);
1883 if (gimple_has_side_effects (t))
1884 continue;
1885
1886 /* Add uses to the worklist. */
1887 ssa_op_iter iter;
1888 use_operand_p use_p;
1889 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
1890 {
1891 tree use = USE_FROM_PTR (use_p);
1892 if (TREE_CODE (use) == SSA_NAME
1893 && ! SSA_NAME_IS_DEFAULT_DEF (use))
1894 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
1895 }
1896
1897 /* Remove stmt. */
1898 if (dump_file && (dump_flags & TDF_DETAILS))
1899 {
1900 fprintf (dump_file, "Removing dead stmt:");
1901 print_gimple_stmt (dump_file, t, 0);
1902 }
1903 gimple_stmt_iterator gsi = gsi_for_stmt (t);
1904 if (gimple_code (t) == GIMPLE_PHI)
1905 remove_phi_node (&gsi, true);
1906 else
1907 {
1908 gsi_remove (&gsi, true);
1909 release_defs (t);
1910 }
1911 }
1912 }