]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-dce.c
openmp: Implement allocate clause in omp lowering.
[thirdparty/gcc.git] / gcc / tree-ssa-dce.c
1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2020 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
12 later version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* Dead code elimination.
24
25 References:
26
27 Building an Optimizing Compiler,
28 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
29
30 Advanced Compiler Design and Implementation,
31 Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
32
33 Dead-code elimination is the removal of statements which have no
34 impact on the program's output. "Dead statements" have no impact
35 on the program's output, while "necessary statements" may have
36 impact on the output.
37
38 The algorithm consists of three phases:
39 1. Marking as necessary all statements known to be necessary,
40 e.g. most function calls, writing a value to memory, etc;
41 2. Propagating necessary statements, e.g., the statements
42 giving values to operands in necessary statements; and
43 3. Removing dead statements. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "backend.h"
49 #include "rtl.h"
50 #include "tree.h"
51 #include "gimple.h"
52 #include "cfghooks.h"
53 #include "tree-pass.h"
54 #include "ssa.h"
55 #include "gimple-pretty-print.h"
56 #include "fold-const.h"
57 #include "calls.h"
58 #include "cfganal.h"
59 #include "tree-eh.h"
60 #include "gimplify.h"
61 #include "gimple-iterator.h"
62 #include "tree-cfg.h"
63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h"
66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h"
68 #include "tree-ssa-propagate.h"
69 #include "gimple-fold.h"
70
71 static struct stmt_stats
72 {
73 int total;
74 int total_phis;
75 int removed;
76 int removed_phis;
77 } stats;
78
79 #define STMT_NECESSARY GF_PLF_1
80
81 static vec<gimple *> worklist;
82
83 /* Vector indicating an SSA name has already been processed and marked
84 as necessary. */
85 static sbitmap processed;
86
87 /* Vector indicating that the last statement of a basic block has already
88 been marked as necessary. */
89 static sbitmap last_stmt_necessary;
90
91 /* Vector indicating that BB contains statements that are live. */
92 static sbitmap bb_contains_live_stmts;
93
94 /* Before we can determine whether a control branch is dead, we need to
95 compute which blocks are control dependent on which edges.
96
97 We expect each block to be control dependent on very few edges so we
98 use a bitmap for each block recording its edges. An array holds the
99 bitmap. The Ith bit in the bitmap is set if that block is dependent
100 on the Ith edge. */
101 static control_dependences *cd;
102
103 /* Vector indicating that a basic block has already had all the edges
104 processed that it is control dependent on. */
105 static sbitmap visited_control_parents;
106
107 /* TRUE if this pass alters the CFG (by removing control statements).
108 FALSE otherwise.
109
110 If this pass alters the CFG, then it will arrange for the dominators
111 to be recomputed. */
112 static bool cfg_altered;
113
114 /* When non-NULL holds map from basic block index into the postorder. */
115 static int *bb_postorder;
116
117
118 /* True if we should treat any stmt with a vdef as necessary. */
119
120 static inline bool
121 keep_all_vdefs_p ()
122 {
123 return optimize_debug;
124 }
125
126 /* If STMT is not already marked necessary, mark it, and add it to the
127 worklist if ADD_TO_WORKLIST is true. */
128
129 static inline void
130 mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
131 {
132 gcc_assert (stmt);
133
134 if (gimple_plf (stmt, STMT_NECESSARY))
135 return;
136
137 if (dump_file && (dump_flags & TDF_DETAILS))
138 {
139 fprintf (dump_file, "Marking useful stmt: ");
140 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
141 fprintf (dump_file, "\n");
142 }
143
144 gimple_set_plf (stmt, STMT_NECESSARY, true);
145 if (add_to_worklist)
146 worklist.safe_push (stmt);
147 if (add_to_worklist && bb_contains_live_stmts && !is_gimple_debug (stmt))
148 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
149 }
150
151
152 /* Mark the statement defining operand OP as necessary. */
153
154 static inline void
155 mark_operand_necessary (tree op)
156 {
157 gimple *stmt;
158 int ver;
159
160 gcc_assert (op);
161
162 ver = SSA_NAME_VERSION (op);
163 if (bitmap_bit_p (processed, ver))
164 {
165 stmt = SSA_NAME_DEF_STMT (op);
166 gcc_assert (gimple_nop_p (stmt)
167 || gimple_plf (stmt, STMT_NECESSARY));
168 return;
169 }
170 bitmap_set_bit (processed, ver);
171
172 stmt = SSA_NAME_DEF_STMT (op);
173 gcc_assert (stmt);
174
175 if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
176 return;
177
178 if (dump_file && (dump_flags & TDF_DETAILS))
179 {
180 fprintf (dump_file, "marking necessary through ");
181 print_generic_expr (dump_file, op);
182 fprintf (dump_file, " stmt ");
183 print_gimple_stmt (dump_file, stmt, 0);
184 }
185
186 gimple_set_plf (stmt, STMT_NECESSARY, true);
187 if (bb_contains_live_stmts)
188 bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
189 worklist.safe_push (stmt);
190 }
191
192
193 /* Mark STMT as necessary if it obviously is. Add it to the worklist if
194 it can make other statements necessary.
195
196 If AGGRESSIVE is false, control statements are conservatively marked as
197 necessary. */
198
199 static void
200 mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
201 {
202 /* With non-call exceptions, we have to assume that all statements could
203 throw. If a statement could throw, it can be deemed necessary. */
204 if (stmt_unremovable_because_of_non_call_eh_p (cfun, stmt))
205 {
206 mark_stmt_necessary (stmt, true);
207 return;
208 }
209
210 /* Statements that are implicitly live. Most function calls, asm
211 and return statements are required. Labels and GIMPLE_BIND nodes
212 are kept because they are control flow, and we have no way of
213 knowing whether they can be removed. DCE can eliminate all the
214 other statements in a block, and CFG can then remove the block
215 and labels. */
216 switch (gimple_code (stmt))
217 {
218 case GIMPLE_PREDICT:
219 case GIMPLE_LABEL:
220 mark_stmt_necessary (stmt, false);
221 return;
222
223 case GIMPLE_ASM:
224 case GIMPLE_RESX:
225 case GIMPLE_RETURN:
226 mark_stmt_necessary (stmt, true);
227 return;
228
229 case GIMPLE_CALL:
230 {
231 tree callee = gimple_call_fndecl (stmt);
232 if (callee != NULL_TREE
233 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
234 switch (DECL_FUNCTION_CODE (callee))
235 {
236 case BUILT_IN_MALLOC:
237 case BUILT_IN_ALIGNED_ALLOC:
238 case BUILT_IN_CALLOC:
239 CASE_BUILT_IN_ALLOCA:
240 case BUILT_IN_STRDUP:
241 case BUILT_IN_STRNDUP:
242 case BUILT_IN_GOMP_ALLOC:
243 return;
244
245 default:;
246 }
247
248 if (callee != NULL_TREE
249 && flag_allocation_dce
250 && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee))
251 return;
252
253 /* Most, but not all function calls are required. Function calls that
254 produce no result and have no side effects (i.e. const pure
255 functions) are unnecessary. */
256 if (gimple_has_side_effects (stmt))
257 {
258 mark_stmt_necessary (stmt, true);
259 return;
260 }
261 /* IFN_GOACC_LOOP calls are necessary in that they are used to
262 represent parameter (i.e. step, bound) of a lowered OpenACC
263 partitioned loop. But this kind of partitioned loop might not
264 survive from aggressive loop removal for it has loop exit and
265 is assumed to be finite. Therefore, we need to explicitly mark
266 these calls. (An example is libgomp.oacc-c-c++-common/pr84955.c) */
267 if (gimple_call_internal_p (stmt, IFN_GOACC_LOOP))
268 {
269 mark_stmt_necessary (stmt, true);
270 return;
271 }
272 if (!gimple_call_lhs (stmt))
273 return;
274 break;
275 }
276
277 case GIMPLE_DEBUG:
278 /* Debug temps without a value are not useful. ??? If we could
279 easily locate the debug temp bind stmt for a use thereof,
280 would could refrain from marking all debug temps here, and
281 mark them only if they're used. */
282 if (gimple_debug_nonbind_marker_p (stmt)
283 || !gimple_debug_bind_p (stmt)
284 || gimple_debug_bind_has_value_p (stmt)
285 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
286 mark_stmt_necessary (stmt, false);
287 return;
288
289 case GIMPLE_GOTO:
290 gcc_assert (!simple_goto_p (stmt));
291 mark_stmt_necessary (stmt, true);
292 return;
293
294 case GIMPLE_COND:
295 gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
296 /* Fall through. */
297
298 case GIMPLE_SWITCH:
299 if (! aggressive)
300 mark_stmt_necessary (stmt, true);
301 break;
302
303 case GIMPLE_ASSIGN:
304 if (gimple_clobber_p (stmt))
305 return;
306 break;
307
308 default:
309 break;
310 }
311
312 /* If the statement has volatile operands, it needs to be preserved.
313 Same for statements that can alter control flow in unpredictable
314 ways. */
315 if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
316 {
317 mark_stmt_necessary (stmt, true);
318 return;
319 }
320
321 if (stmt_may_clobber_global_p (stmt))
322 {
323 mark_stmt_necessary (stmt, true);
324 return;
325 }
326
327 if (gimple_vdef (stmt) && keep_all_vdefs_p ())
328 {
329 mark_stmt_necessary (stmt, true);
330 return;
331 }
332
333 return;
334 }
335
336
337 /* Mark the last statement of BB as necessary. */
338
339 static void
340 mark_last_stmt_necessary (basic_block bb)
341 {
342 gimple *stmt = last_stmt (bb);
343
344 bitmap_set_bit (last_stmt_necessary, bb->index);
345 bitmap_set_bit (bb_contains_live_stmts, bb->index);
346
347 /* We actually mark the statement only if it is a control statement. */
348 if (stmt && is_ctrl_stmt (stmt))
349 mark_stmt_necessary (stmt, true);
350 }
351
352
353 /* Mark control dependent edges of BB as necessary. We have to do this only
354 once for each basic block so we set the appropriate bit after we're done.
355
356 When IGNORE_SELF is true, ignore BB in the list of control dependences. */
357
358 static void
359 mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
360 {
361 bitmap_iterator bi;
362 unsigned edge_number;
363 bool skipped = false;
364
365 gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
366
367 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
368 return;
369
370 EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
371 0, edge_number, bi)
372 {
373 basic_block cd_bb = cd->get_edge_src (edge_number);
374
375 if (ignore_self && cd_bb == bb)
376 {
377 skipped = true;
378 continue;
379 }
380
381 if (!bitmap_bit_p (last_stmt_necessary, cd_bb->index))
382 mark_last_stmt_necessary (cd_bb);
383 }
384
385 if (!skipped)
386 bitmap_set_bit (visited_control_parents, bb->index);
387 }
388
389
390 /* Find obviously necessary statements. These are things like most function
391 calls, and stores to file level variables.
392
393 If EL is NULL, control statements are conservatively marked as
394 necessary. Otherwise it contains the list of edges used by control
395 dependence analysis. */
396
397 static void
398 find_obviously_necessary_stmts (bool aggressive)
399 {
400 basic_block bb;
401 gimple_stmt_iterator gsi;
402 edge e;
403 gimple *phi, *stmt;
404 int flags;
405
406 FOR_EACH_BB_FN (bb, cfun)
407 {
408 /* PHI nodes are never inherently necessary. */
409 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
410 {
411 phi = gsi_stmt (gsi);
412 gimple_set_plf (phi, STMT_NECESSARY, false);
413 }
414
415 /* Check all statements in the block. */
416 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
417 {
418 stmt = gsi_stmt (gsi);
419 gimple_set_plf (stmt, STMT_NECESSARY, false);
420 mark_stmt_if_obviously_necessary (stmt, aggressive);
421 }
422 }
423
424 /* Pure and const functions are finite and thus have no infinite loops in
425 them. */
426 flags = flags_from_decl_or_type (current_function_decl);
427 if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE))
428 return;
429
430 /* Prevent the empty possibly infinite loops from being removed. */
431 if (aggressive)
432 {
433 class loop *loop;
434 if (mark_irreducible_loops ())
435 FOR_EACH_BB_FN (bb, cfun)
436 {
437 edge_iterator ei;
438 FOR_EACH_EDGE (e, ei, bb->succs)
439 if ((e->flags & EDGE_DFS_BACK)
440 && (e->flags & EDGE_IRREDUCIBLE_LOOP))
441 {
442 if (dump_file)
443 fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
444 e->src->index, e->dest->index);
445 mark_control_dependent_edges_necessary (e->dest, false);
446 }
447 }
448
449 FOR_EACH_LOOP (loop, 0)
450 if (!finite_loop_p (loop))
451 {
452 if (dump_file)
453 fprintf (dump_file, "cannot prove finiteness of loop %i\n", loop->num);
454 mark_control_dependent_edges_necessary (loop->latch, false);
455 }
456 }
457 }
458
459
460 /* Return true if REF is based on an aliased base, otherwise false. */
461
462 static bool
463 ref_may_be_aliased (tree ref)
464 {
465 gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR);
466 while (handled_component_p (ref))
467 ref = TREE_OPERAND (ref, 0);
468 if (TREE_CODE (ref) == MEM_REF
469 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
470 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
471 return !(DECL_P (ref)
472 && !may_be_aliased (ref));
473 }
474
475 static bitmap visited = NULL;
476 static unsigned int longest_chain = 0;
477 static unsigned int total_chain = 0;
478 static unsigned int nr_walks = 0;
479 static bool chain_ovfl = false;
480
481 /* Worker for the walker that marks reaching definitions of REF,
482 which is based on a non-aliased decl, necessary. It returns
483 true whenever the defining statement of the current VDEF is
484 a kill for REF, as no dominating may-defs are necessary for REF
485 anymore. DATA points to the basic-block that contains the
486 stmt that refers to REF. */
487
488 static bool
489 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
490 {
491 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
492
493 /* All stmts we visit are necessary. */
494 if (! gimple_clobber_p (def_stmt))
495 mark_operand_necessary (vdef);
496
497 /* If the stmt lhs kills ref, then we can stop walking. */
498 if (gimple_has_lhs (def_stmt)
499 && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
500 /* The assignment is not necessarily carried out if it can throw
501 and we can catch it in the current function where we could inspect
502 the previous value.
503 ??? We only need to care about the RHS throwing. For aggregate
504 assignments or similar calls and non-call exceptions the LHS
505 might throw as well. */
506 && !stmt_can_throw_internal (cfun, def_stmt))
507 {
508 tree base, lhs = gimple_get_lhs (def_stmt);
509 poly_int64 size, offset, max_size;
510 bool reverse;
511 ao_ref_base (ref);
512 base
513 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
514 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
515 so base == refd->base does not always hold. */
516 if (base == ref->base)
517 {
518 /* For a must-alias check we need to be able to constrain
519 the accesses properly. */
520 if (known_eq (size, max_size)
521 && known_subrange_p (ref->offset, ref->max_size, offset, size))
522 return true;
523 /* Or they need to be exactly the same. */
524 else if (ref->ref
525 /* Make sure there is no induction variable involved
526 in the references (gcc.c-torture/execute/pr42142.c).
527 The simplest way is to check if the kill dominates
528 the use. */
529 /* But when both are in the same block we cannot
530 easily tell whether we came from a backedge
531 unless we decide to compute stmt UIDs
532 (see PR58246). */
533 && (basic_block) data != gimple_bb (def_stmt)
534 && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
535 gimple_bb (def_stmt))
536 && operand_equal_p (ref->ref, lhs, 0))
537 return true;
538 }
539 }
540
541 /* Otherwise keep walking. */
542 return false;
543 }
544
545 static void
546 mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
547 {
548 /* Should have been caught before calling this function. */
549 gcc_checking_assert (!keep_all_vdefs_p ());
550
551 unsigned int chain;
552 ao_ref refd;
553 gcc_assert (!chain_ovfl);
554 ao_ref_init (&refd, ref);
555 chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
556 mark_aliased_reaching_defs_necessary_1,
557 gimple_bb (stmt), NULL);
558 if (chain > longest_chain)
559 longest_chain = chain;
560 total_chain += chain;
561 nr_walks++;
562 }
563
564 /* Worker for the walker that marks reaching definitions of REF, which
565 is not based on a non-aliased decl. For simplicity we need to end
566 up marking all may-defs necessary that are not based on a non-aliased
567 decl. The only job of this walker is to skip may-defs based on
568 a non-aliased decl. */
569
570 static bool
571 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
572 tree vdef, void *data ATTRIBUTE_UNUSED)
573 {
574 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
575
576 /* We have to skip already visited (and thus necessary) statements
577 to make the chaining work after we dropped back to simple mode. */
578 if (chain_ovfl
579 && bitmap_bit_p (processed, SSA_NAME_VERSION (vdef)))
580 {
581 gcc_assert (gimple_nop_p (def_stmt)
582 || gimple_plf (def_stmt, STMT_NECESSARY));
583 return false;
584 }
585
586 /* We want to skip stores to non-aliased variables. */
587 if (!chain_ovfl
588 && gimple_assign_single_p (def_stmt))
589 {
590 tree lhs = gimple_assign_lhs (def_stmt);
591 if (!ref_may_be_aliased (lhs))
592 return false;
593 }
594
595 /* We want to skip statments that do not constitute stores but have
596 a virtual definition. */
597 if (gcall *call = dyn_cast <gcall *> (def_stmt))
598 {
599 tree callee = gimple_call_fndecl (call);
600 if (callee != NULL_TREE
601 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
602 switch (DECL_FUNCTION_CODE (callee))
603 {
604 case BUILT_IN_MALLOC:
605 case BUILT_IN_ALIGNED_ALLOC:
606 case BUILT_IN_CALLOC:
607 CASE_BUILT_IN_ALLOCA:
608 case BUILT_IN_FREE:
609 case BUILT_IN_GOMP_ALLOC:
610 case BUILT_IN_GOMP_FREE:
611 return false;
612
613 default:;
614 }
615
616 if (callee != NULL_TREE
617 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
618 || DECL_IS_OPERATOR_DELETE_P (callee))
619 && gimple_call_from_new_or_delete (call))
620 return false;
621 }
622
623 if (! gimple_clobber_p (def_stmt))
624 mark_operand_necessary (vdef);
625
626 return false;
627 }
628
629 static void
630 mark_all_reaching_defs_necessary (gimple *stmt)
631 {
632 /* Should have been caught before calling this function. */
633 gcc_checking_assert (!keep_all_vdefs_p ());
634 walk_aliased_vdefs (NULL, gimple_vuse (stmt),
635 mark_all_reaching_defs_necessary_1, NULL, &visited);
636 }
637
638 /* Return true for PHI nodes with one or identical arguments
639 can be removed. */
640 static bool
641 degenerate_phi_p (gimple *phi)
642 {
643 unsigned int i;
644 tree op = gimple_phi_arg_def (phi, 0);
645 for (i = 1; i < gimple_phi_num_args (phi); i++)
646 if (gimple_phi_arg_def (phi, i) != op)
647 return false;
648 return true;
649 }
650
651 /* Return that NEW_CALL and DELETE_CALL are a valid pair of new
652 and delete operators. */
653
654 static bool
655 valid_new_delete_pair_p (gimple *new_call, gimple *delete_call)
656 {
657 tree new_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (new_call));
658 tree delete_asm = DECL_ASSEMBLER_NAME (gimple_call_fndecl (delete_call));
659 const char *new_name = IDENTIFIER_POINTER (new_asm);
660 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
661 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
662 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
663
664 if (new_len < 5 || delete_len < 6)
665 return false;
666 if (new_name[0] == '_')
667 ++new_name, --new_len;
668 if (new_name[0] == '_')
669 ++new_name, --new_len;
670 if (delete_name[0] == '_')
671 ++delete_name, --delete_len;
672 if (delete_name[0] == '_')
673 ++delete_name, --delete_len;
674 if (new_len < 4 || delete_len < 5)
675 return false;
676 /* *_len is now just the length after initial underscores. */
677 if (new_name[0] != 'Z' || new_name[1] != 'n')
678 return false;
679 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
680 return false;
681 /* _Znw must match _Zdl, _Zna must match _Zda. */
682 if ((new_name[2] != 'w' || delete_name[2] != 'l')
683 && (new_name[2] != 'a' || delete_name[2] != 'a'))
684 return false;
685 /* 'j', 'm' and 'y' correspond to size_t. */
686 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
687 return false;
688 if (delete_name[3] != 'P' || delete_name[4] != 'v')
689 return false;
690 if (new_len == 4
691 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
692 {
693 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
694 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
695 if (delete_len == 5)
696 return true;
697 if (delete_len == 6 && delete_name[5] == new_name[3])
698 return true;
699 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
700 return true;
701 }
702 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
703 || (new_len == 33
704 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
705 {
706 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
707 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
708 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
709 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
710 return true;
711 if (delete_len == 21
712 && delete_name[5] == new_name[3]
713 && !memcmp (delete_name + 6, "St11align_val_t", 15))
714 return true;
715 if (delete_len == 34
716 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
717 return true;
718 }
719 return false;
720 }
721
722 /* Propagate necessity using the operands of necessary statements.
723 Process the uses on each statement in the worklist, and add all
724 feeding statements which contribute to the calculation of this
725 value to the worklist.
726
727 In conservative mode, EL is NULL. */
728
729 static void
730 propagate_necessity (bool aggressive)
731 {
732 gimple *stmt;
733
734 if (dump_file && (dump_flags & TDF_DETAILS))
735 fprintf (dump_file, "\nProcessing worklist:\n");
736
737 while (worklist.length () > 0)
738 {
739 /* Take STMT from worklist. */
740 stmt = worklist.pop ();
741
742 if (dump_file && (dump_flags & TDF_DETAILS))
743 {
744 fprintf (dump_file, "processing: ");
745 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
746 fprintf (dump_file, "\n");
747 }
748
749 if (aggressive)
750 {
751 /* Mark the last statement of the basic blocks on which the block
752 containing STMT is control dependent, but only if we haven't
753 already done so. */
754 basic_block bb = gimple_bb (stmt);
755 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
756 && !bitmap_bit_p (visited_control_parents, bb->index))
757 mark_control_dependent_edges_necessary (bb, false);
758 }
759
760 if (gimple_code (stmt) == GIMPLE_PHI
761 /* We do not process virtual PHI nodes nor do we track their
762 necessity. */
763 && !virtual_operand_p (gimple_phi_result (stmt)))
764 {
765 /* PHI nodes are somewhat special in that each PHI alternative has
766 data and control dependencies. All the statements feeding the
767 PHI node's arguments are always necessary. In aggressive mode,
768 we also consider the control dependent edges leading to the
769 predecessor block associated with each PHI alternative as
770 necessary. */
771 gphi *phi = as_a <gphi *> (stmt);
772 size_t k;
773
774 for (k = 0; k < gimple_phi_num_args (stmt); k++)
775 {
776 tree arg = PHI_ARG_DEF (stmt, k);
777 if (TREE_CODE (arg) == SSA_NAME)
778 mark_operand_necessary (arg);
779 }
780
781 /* For PHI operands it matters from where the control flow arrives
782 to the BB. Consider the following example:
783
784 a=exp1;
785 b=exp2;
786 if (test)
787 ;
788 else
789 ;
790 c=PHI(a,b)
791
792 We need to mark control dependence of the empty basic blocks, since they
793 contains computation of PHI operands.
794
795 Doing so is too restrictive in the case the predecestor block is in
796 the loop. Consider:
797
798 if (b)
799 {
800 int i;
801 for (i = 0; i<1000; ++i)
802 ;
803 j = 0;
804 }
805 return j;
806
807 There is PHI for J in the BB containing return statement.
808 In this case the control dependence of predecestor block (that is
809 within the empty loop) also contains the block determining number
810 of iterations of the block that would prevent removing of empty
811 loop in this case.
812
813 This scenario can be avoided by splitting critical edges.
814 To save the critical edge splitting pass we identify how the control
815 dependence would look like if the edge was split.
816
817 Consider the modified CFG created from current CFG by splitting
818 edge B->C. In the postdominance tree of modified CFG, C' is
819 always child of C. There are two cases how chlids of C' can look
820 like:
821
822 1) C' is leaf
823
824 In this case the only basic block C' is control dependent on is B.
825
826 2) C' has single child that is B
827
828 In this case control dependence of C' is same as control
829 dependence of B in original CFG except for block B itself.
830 (since C' postdominate B in modified CFG)
831
832 Now how to decide what case happens? There are two basic options:
833
834 a) C postdominate B. Then C immediately postdominate B and
835 case 2 happens iff there is no other way from B to C except
836 the edge B->C.
837
838 There is other way from B to C iff there is succesor of B that
839 is not postdominated by B. Testing this condition is somewhat
840 expensive, because we need to iterate all succesors of B.
841 We are safe to assume that this does not happen: we will mark B
842 as needed when processing the other path from B to C that is
843 conrol dependent on B and marking control dependencies of B
844 itself is harmless because they will be processed anyway after
845 processing control statement in B.
846
847 b) C does not postdominate B. Always case 1 happens since there is
848 path from C to exit that does not go through B and thus also C'. */
849
850 if (aggressive && !degenerate_phi_p (stmt))
851 {
852 for (k = 0; k < gimple_phi_num_args (stmt); k++)
853 {
854 basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
855
856 if (gimple_bb (stmt)
857 != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
858 {
859 if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
860 mark_last_stmt_necessary (arg_bb);
861 }
862 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
863 && !bitmap_bit_p (visited_control_parents,
864 arg_bb->index))
865 mark_control_dependent_edges_necessary (arg_bb, true);
866 }
867 }
868 }
869 else
870 {
871 /* Propagate through the operands. Examine all the USE, VUSE and
872 VDEF operands in this statement. Mark all the statements
873 which feed this statement's uses as necessary. */
874 ssa_op_iter iter;
875 tree use;
876
877 /* If this is a call to free which is directly fed by an
878 allocation function do not mark that necessary through
879 processing the argument. */
880 bool is_delete_operator
881 = (is_gimple_call (stmt)
882 && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
883 && gimple_call_operator_delete_p (as_a <gcall *> (stmt)));
884 if (is_delete_operator
885 || gimple_call_builtin_p (stmt, BUILT_IN_FREE)
886 || gimple_call_builtin_p (stmt, BUILT_IN_GOMP_FREE))
887 {
888 tree ptr = gimple_call_arg (stmt, 0);
889 gcall *def_stmt;
890 tree def_callee;
891 /* If the pointer we free is defined by an allocation
892 function do not add the call to the worklist. */
893 if (TREE_CODE (ptr) == SSA_NAME
894 && (def_stmt = dyn_cast <gcall *> (SSA_NAME_DEF_STMT (ptr)))
895 && (def_callee = gimple_call_fndecl (def_stmt))
896 && ((DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
897 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
898 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
899 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC
900 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_GOMP_ALLOC))
901 || (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (def_callee)
902 && gimple_call_from_new_or_delete (def_stmt))))
903 {
904 if (is_delete_operator
905 && !valid_new_delete_pair_p (def_stmt, stmt))
906 mark_operand_necessary (gimple_call_arg (stmt, 0));
907
908 /* Delete operators can have alignment and (or) size
909 as next arguments. When being a SSA_NAME, they
910 must be marked as necessary. Similarly GOMP_free. */
911 if (gimple_call_num_args (stmt) >= 2)
912 for (unsigned i = 1; i < gimple_call_num_args (stmt);
913 i++)
914 {
915 tree arg = gimple_call_arg (stmt, i);
916 if (TREE_CODE (arg) == SSA_NAME)
917 mark_operand_necessary (arg);
918 }
919
920 continue;
921 }
922 }
923
924 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
925 mark_operand_necessary (use);
926
927 use = gimple_vuse (stmt);
928 if (!use)
929 continue;
930
931 /* No need to search for vdefs if we intrinsicly keep them all. */
932 if (keep_all_vdefs_p ())
933 continue;
934
935 /* If we dropped to simple mode make all immediately
936 reachable definitions necessary. */
937 if (chain_ovfl)
938 {
939 mark_all_reaching_defs_necessary (stmt);
940 continue;
941 }
942
943 /* For statements that may load from memory (have a VUSE) we
944 have to mark all reaching (may-)definitions as necessary.
945 We partition this task into two cases:
946 1) explicit loads based on decls that are not aliased
947 2) implicit loads (like calls) and explicit loads not
948 based on decls that are not aliased (like indirect
949 references or loads from globals)
950 For 1) we mark all reaching may-defs as necessary, stopping
951 at dominating kills. For 2) we want to mark all dominating
952 references necessary, but non-aliased ones which we handle
953 in 1). By keeping a global visited bitmap for references
954 we walk for 2) we avoid quadratic behavior for those. */
955
956 if (gcall *call = dyn_cast <gcall *> (stmt))
957 {
958 tree callee = gimple_call_fndecl (call);
959 unsigned i;
960
961 /* Calls to functions that are merely acting as barriers
962 or that only store to memory do not make any previous
963 stores necessary. */
964 if (callee != NULL_TREE
965 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
966 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
967 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK
968 || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
969 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALIGNED_ALLOC
970 || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC
971 || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
972 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
973 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))
974 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
975 || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
976 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
977 continue;
978
979 if (callee != NULL_TREE
980 && (DECL_IS_REPLACEABLE_OPERATOR_NEW_P (callee)
981 || DECL_IS_OPERATOR_DELETE_P (callee))
982 && gimple_call_from_new_or_delete (call))
983 continue;
984
985 /* Calls implicitly load from memory, their arguments
986 in addition may explicitly perform memory loads. */
987 mark_all_reaching_defs_necessary (call);
988 for (i = 0; i < gimple_call_num_args (call); ++i)
989 {
990 tree arg = gimple_call_arg (call, i);
991 if (TREE_CODE (arg) == SSA_NAME
992 || is_gimple_min_invariant (arg))
993 continue;
994 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
995 arg = TREE_OPERAND (arg, 0);
996 if (!ref_may_be_aliased (arg))
997 mark_aliased_reaching_defs_necessary (call, arg);
998 }
999 }
1000 else if (gimple_assign_single_p (stmt))
1001 {
1002 tree rhs;
1003 /* If this is a load mark things necessary. */
1004 rhs = gimple_assign_rhs1 (stmt);
1005 if (TREE_CODE (rhs) != SSA_NAME
1006 && !is_gimple_min_invariant (rhs)
1007 && TREE_CODE (rhs) != CONSTRUCTOR)
1008 {
1009 if (!ref_may_be_aliased (rhs))
1010 mark_aliased_reaching_defs_necessary (stmt, rhs);
1011 else
1012 mark_all_reaching_defs_necessary (stmt);
1013 }
1014 }
1015 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1016 {
1017 tree rhs = gimple_return_retval (return_stmt);
1018 /* A return statement may perform a load. */
1019 if (rhs
1020 && TREE_CODE (rhs) != SSA_NAME
1021 && !is_gimple_min_invariant (rhs)
1022 && TREE_CODE (rhs) != CONSTRUCTOR)
1023 {
1024 if (!ref_may_be_aliased (rhs))
1025 mark_aliased_reaching_defs_necessary (stmt, rhs);
1026 else
1027 mark_all_reaching_defs_necessary (stmt);
1028 }
1029 }
1030 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
1031 {
1032 unsigned i;
1033 mark_all_reaching_defs_necessary (stmt);
1034 /* Inputs may perform loads. */
1035 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1036 {
1037 tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1038 if (TREE_CODE (op) != SSA_NAME
1039 && !is_gimple_min_invariant (op)
1040 && TREE_CODE (op) != CONSTRUCTOR
1041 && !ref_may_be_aliased (op))
1042 mark_aliased_reaching_defs_necessary (stmt, op);
1043 }
1044 }
1045 else if (gimple_code (stmt) == GIMPLE_TRANSACTION)
1046 {
1047 /* The beginning of a transaction is a memory barrier. */
1048 /* ??? If we were really cool, we'd only be a barrier
1049 for the memories touched within the transaction. */
1050 mark_all_reaching_defs_necessary (stmt);
1051 }
1052 else
1053 gcc_unreachable ();
1054
1055 /* If we over-used our alias oracle budget drop to simple
1056 mode. The cost metric allows quadratic behavior
1057 (number of uses times number of may-defs queries) up to
1058 a constant maximal number of queries and after that falls back to
1059 super-linear complexity. */
1060 if (/* Constant but quadratic for small functions. */
1061 total_chain > 128 * 128
1062 /* Linear in the number of may-defs. */
1063 && total_chain > 32 * longest_chain
1064 /* Linear in the number of uses. */
1065 && total_chain > nr_walks * 32)
1066 {
1067 chain_ovfl = true;
1068 if (visited)
1069 bitmap_clear (visited);
1070 }
1071 }
1072 }
1073 }
1074
1075 /* Remove dead PHI nodes from block BB. */
1076
1077 static bool
1078 remove_dead_phis (basic_block bb)
1079 {
1080 bool something_changed = false;
1081 gphi *phi;
1082 gphi_iterator gsi;
1083
1084 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
1085 {
1086 stats.total_phis++;
1087 phi = gsi.phi ();
1088
1089 /* We do not track necessity of virtual PHI nodes. Instead do
1090 very simple dead PHI removal here. */
1091 if (virtual_operand_p (gimple_phi_result (phi)))
1092 {
1093 /* Virtual PHI nodes with one or identical arguments
1094 can be removed. */
1095 if (degenerate_phi_p (phi))
1096 {
1097 tree vdef = gimple_phi_result (phi);
1098 tree vuse = gimple_phi_arg_def (phi, 0);
1099
1100 use_operand_p use_p;
1101 imm_use_iterator iter;
1102 gimple *use_stmt;
1103 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1104 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1105 SET_USE (use_p, vuse);
1106 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
1107 && TREE_CODE (vuse) == SSA_NAME)
1108 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1109 }
1110 else
1111 gimple_set_plf (phi, STMT_NECESSARY, true);
1112 }
1113
1114 if (!gimple_plf (phi, STMT_NECESSARY))
1115 {
1116 something_changed = true;
1117 if (dump_file && (dump_flags & TDF_DETAILS))
1118 {
1119 fprintf (dump_file, "Deleting : ");
1120 print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
1121 fprintf (dump_file, "\n");
1122 }
1123
1124 remove_phi_node (&gsi, true);
1125 stats.removed_phis++;
1126 continue;
1127 }
1128
1129 gsi_next (&gsi);
1130 }
1131 return something_changed;
1132 }
1133
1134
1135 /* Remove dead statement pointed to by iterator I. Receives the basic block BB
1136 containing I so that we don't have to look it up. */
1137
1138 static void
1139 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb,
1140 vec<edge> &to_remove_edges)
1141 {
1142 gimple *stmt = gsi_stmt (*i);
1143
1144 if (dump_file && (dump_flags & TDF_DETAILS))
1145 {
1146 fprintf (dump_file, "Deleting : ");
1147 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1148 fprintf (dump_file, "\n");
1149 }
1150
1151 stats.removed++;
1152
1153 /* If we have determined that a conditional branch statement contributes
1154 nothing to the program, then we not only remove it, but we need to update
1155 the CFG. We can chose any of edges out of BB as long as we are sure to not
1156 close infinite loops. This is done by always choosing the edge closer to
1157 exit in inverted_post_order_compute order. */
1158 if (is_ctrl_stmt (stmt))
1159 {
1160 edge_iterator ei;
1161 edge e = NULL, e2;
1162
1163 /* See if there is only one non-abnormal edge. */
1164 if (single_succ_p (bb))
1165 e = single_succ_edge (bb);
1166 /* Otherwise chose one that is closer to bb with live statement in it.
1167 To be able to chose one, we compute inverted post order starting from
1168 all BBs with live statements. */
1169 if (!e)
1170 {
1171 if (!bb_postorder)
1172 {
1173 auto_vec<int, 20> postorder;
1174 inverted_post_order_compute (&postorder,
1175 &bb_contains_live_stmts);
1176 bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
1177 for (unsigned int i = 0; i < postorder.length (); ++i)
1178 bb_postorder[postorder[i]] = i;
1179 }
1180 FOR_EACH_EDGE (e2, ei, bb->succs)
1181 if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
1182 || bb_postorder [e->dest->index]
1183 < bb_postorder [e2->dest->index])
1184 e = e2;
1185 }
1186 gcc_assert (e);
1187 e->probability = profile_probability::always ();
1188
1189 /* The edge is no longer associated with a conditional, so it does
1190 not have TRUE/FALSE flags.
1191 We are also safe to drop EH/ABNORMAL flags and turn them into
1192 normal control flow, because we know that all the destinations (including
1193 those odd edges) are equivalent for program execution. */
1194 e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE | EDGE_EH | EDGE_ABNORMAL);
1195
1196 /* The lone outgoing edge from BB will be a fallthru edge. */
1197 e->flags |= EDGE_FALLTHRU;
1198
1199 /* Remove the remaining outgoing edges. */
1200 FOR_EACH_EDGE (e2, ei, bb->succs)
1201 if (e != e2)
1202 {
1203 /* If we made a BB unconditionally exit a loop or removed
1204 an entry into an irreducible region, then this transform
1205 alters the set of BBs in the loop. Schedule a fixup. */
1206 if (loop_exit_edge_p (bb->loop_father, e)
1207 || (e2->dest->flags & BB_IRREDUCIBLE_LOOP))
1208 loops_state_set (LOOPS_NEED_FIXUP);
1209 to_remove_edges.safe_push (e2);
1210 }
1211 }
1212
1213 /* If this is a store into a variable that is being optimized away,
1214 add a debug bind stmt if possible. */
1215 if (MAY_HAVE_DEBUG_BIND_STMTS
1216 && gimple_assign_single_p (stmt)
1217 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1218 {
1219 tree lhs = gimple_assign_lhs (stmt);
1220 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1221 && !DECL_IGNORED_P (lhs)
1222 && is_gimple_reg_type (TREE_TYPE (lhs))
1223 && !is_global_var (lhs)
1224 && !DECL_HAS_VALUE_EXPR_P (lhs))
1225 {
1226 tree rhs = gimple_assign_rhs1 (stmt);
1227 gdebug *note
1228 = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
1229 gsi_insert_after (i, note, GSI_SAME_STMT);
1230 }
1231 }
1232
1233 unlink_stmt_vdef (stmt);
1234 gsi_remove (i, true);
1235 release_defs (stmt);
1236 }
1237
1238 /* Helper for maybe_optimize_arith_overflow. Find in *TP if there are any
1239 uses of data (SSA_NAME) other than REALPART_EXPR referencing it. */
1240
1241 static tree
1242 find_non_realpart_uses (tree *tp, int *walk_subtrees, void *data)
1243 {
1244 if (TYPE_P (*tp) || TREE_CODE (*tp) == REALPART_EXPR)
1245 *walk_subtrees = 0;
1246 if (*tp == (tree) data)
1247 return *tp;
1248 return NULL_TREE;
1249 }
1250
1251 /* If the IMAGPART_EXPR of the {ADD,SUB,MUL}_OVERFLOW result is never used,
1252 but REALPART_EXPR is, optimize the {ADD,SUB,MUL}_OVERFLOW internal calls
1253 into plain unsigned {PLUS,MINUS,MULT}_EXPR, and if needed reset debug
1254 uses. */
1255
1256 static void
1257 maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
1258 enum tree_code subcode)
1259 {
1260 gimple *stmt = gsi_stmt (*gsi);
1261 tree lhs = gimple_call_lhs (stmt);
1262
1263 if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
1264 return;
1265
1266 imm_use_iterator imm_iter;
1267 use_operand_p use_p;
1268 bool has_debug_uses = false;
1269 bool has_realpart_uses = false;
1270 bool has_other_uses = false;
1271 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
1272 {
1273 gimple *use_stmt = USE_STMT (use_p);
1274 if (is_gimple_debug (use_stmt))
1275 has_debug_uses = true;
1276 else if (is_gimple_assign (use_stmt)
1277 && gimple_assign_rhs_code (use_stmt) == REALPART_EXPR
1278 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == lhs)
1279 has_realpart_uses = true;
1280 else
1281 {
1282 has_other_uses = true;
1283 break;
1284 }
1285 }
1286
1287 if (!has_realpart_uses || has_other_uses)
1288 return;
1289
1290 tree arg0 = gimple_call_arg (stmt, 0);
1291 tree arg1 = gimple_call_arg (stmt, 1);
1292 location_t loc = gimple_location (stmt);
1293 tree type = TREE_TYPE (TREE_TYPE (lhs));
1294 tree utype = type;
1295 if (!TYPE_UNSIGNED (type))
1296 utype = build_nonstandard_integer_type (TYPE_PRECISION (type), 1);
1297 tree result = fold_build2_loc (loc, subcode, utype,
1298 fold_convert_loc (loc, utype, arg0),
1299 fold_convert_loc (loc, utype, arg1));
1300 result = fold_convert_loc (loc, type, result);
1301
1302 if (has_debug_uses)
1303 {
1304 gimple *use_stmt;
1305 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
1306 {
1307 if (!gimple_debug_bind_p (use_stmt))
1308 continue;
1309 tree v = gimple_debug_bind_get_value (use_stmt);
1310 if (walk_tree (&v, find_non_realpart_uses, lhs, NULL))
1311 {
1312 gimple_debug_bind_reset_value (use_stmt);
1313 update_stmt (use_stmt);
1314 }
1315 }
1316 }
1317
1318 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
1319 result = drop_tree_overflow (result);
1320 tree overflow = build_zero_cst (type);
1321 tree ctype = build_complex_type (type);
1322 if (TREE_CODE (result) == INTEGER_CST)
1323 result = build_complex (ctype, result, overflow);
1324 else
1325 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
1326 ctype, result, overflow);
1327
1328 if (dump_file && (dump_flags & TDF_DETAILS))
1329 {
1330 fprintf (dump_file, "Transforming call: ");
1331 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1332 fprintf (dump_file, "because the overflow result is never used into: ");
1333 print_generic_stmt (dump_file, result, TDF_SLIM);
1334 fprintf (dump_file, "\n");
1335 }
1336
1337 if (!update_call_from_tree (gsi, result))
1338 gimplify_and_update_call_from_tree (gsi, result);
1339 }
1340
1341 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1342 contributes nothing to the program, and can be deleted. */
1343
1344 static bool
1345 eliminate_unnecessary_stmts (void)
1346 {
1347 bool something_changed = false;
1348 basic_block bb;
1349 gimple_stmt_iterator gsi, psi;
1350 gimple *stmt;
1351 tree call;
1352 vec<basic_block> h;
1353 auto_vec<edge> to_remove_edges;
1354
1355 if (dump_file && (dump_flags & TDF_DETAILS))
1356 fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1357
1358 clear_special_calls ();
1359
1360 /* Walking basic blocks and statements in reverse order avoids
1361 releasing SSA names before any other DEFs that refer to them are
1362 released. This helps avoid loss of debug information, as we get
1363 a chance to propagate all RHSs of removed SSAs into debug uses,
1364 rather than only the latest ones. E.g., consider:
1365
1366 x_3 = y_1 + z_2;
1367 a_5 = x_3 - b_4;
1368 # DEBUG a => a_5
1369
1370 If we were to release x_3 before a_5, when we reached a_5 and
1371 tried to substitute it into the debug stmt, we'd see x_3 there,
1372 but x_3's DEF, type, etc would have already been disconnected.
1373 By going backwards, the debug stmt first changes to:
1374
1375 # DEBUG a => x_3 - b_4
1376
1377 and then to:
1378
1379 # DEBUG a => y_1 + z_2 - b_4
1380
1381 as desired. */
1382 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1383 h = get_all_dominated_blocks (CDI_DOMINATORS,
1384 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1385
1386 while (h.length ())
1387 {
1388 bb = h.pop ();
1389
1390 /* Remove dead statements. */
1391 auto_bitmap debug_seen;
1392 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1393 {
1394 stmt = gsi_stmt (gsi);
1395
1396 psi = gsi;
1397 gsi_prev (&psi);
1398
1399 stats.total++;
1400
1401 /* We can mark a call to free as not necessary if the
1402 defining statement of its argument is not necessary
1403 (and thus is getting removed). */
1404 if (gimple_plf (stmt, STMT_NECESSARY)
1405 && (gimple_call_builtin_p (stmt, BUILT_IN_FREE)
1406 || (is_gimple_call (stmt)
1407 && gimple_call_from_new_or_delete (as_a <gcall *> (stmt))
1408 && gimple_call_operator_delete_p (as_a <gcall *> (stmt)))))
1409 {
1410 tree ptr = gimple_call_arg (stmt, 0);
1411 if (TREE_CODE (ptr) == SSA_NAME)
1412 {
1413 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1414 if (!gimple_nop_p (def_stmt)
1415 && !gimple_plf (def_stmt, STMT_NECESSARY))
1416 gimple_set_plf (stmt, STMT_NECESSARY, false);
1417 }
1418 }
1419
1420 /* If GSI is not necessary then remove it. */
1421 if (!gimple_plf (stmt, STMT_NECESSARY))
1422 {
1423 /* Keep clobbers that we can keep live live. */
1424 if (gimple_clobber_p (stmt))
1425 {
1426 ssa_op_iter iter;
1427 use_operand_p use_p;
1428 bool dead = false;
1429 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1430 {
1431 tree name = USE_FROM_PTR (use_p);
1432 if (!SSA_NAME_IS_DEFAULT_DEF (name)
1433 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name)))
1434 {
1435 dead = true;
1436 break;
1437 }
1438 }
1439 if (!dead)
1440 {
1441 bitmap_clear (debug_seen);
1442 continue;
1443 }
1444 }
1445 if (!is_gimple_debug (stmt))
1446 something_changed = true;
1447 remove_dead_stmt (&gsi, bb, to_remove_edges);
1448 continue;
1449 }
1450 else if (is_gimple_call (stmt))
1451 {
1452 tree name = gimple_call_lhs (stmt);
1453
1454 notice_special_calls (as_a <gcall *> (stmt));
1455
1456 /* When LHS of var = call (); is dead, simplify it into
1457 call (); saving one operand. */
1458 if (name
1459 && TREE_CODE (name) == SSA_NAME
1460 && !bitmap_bit_p (processed, SSA_NAME_VERSION (name))
1461 /* Avoid doing so for allocation calls which we
1462 did not mark as necessary, it will confuse the
1463 special logic we apply to malloc/free pair removal. */
1464 && (!(call = gimple_call_fndecl (stmt))
1465 || ((DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1466 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1467 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1468 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1469 && !ALLOCA_FUNCTION_CODE_P
1470 (DECL_FUNCTION_CODE (call))))
1471 && !DECL_IS_REPLACEABLE_OPERATOR_NEW_P (call))))
1472 {
1473 something_changed = true;
1474 if (dump_file && (dump_flags & TDF_DETAILS))
1475 {
1476 fprintf (dump_file, "Deleting LHS of call: ");
1477 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1478 fprintf (dump_file, "\n");
1479 }
1480
1481 gimple_call_set_lhs (stmt, NULL_TREE);
1482 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1483 update_stmt (stmt);
1484 release_ssa_name (name);
1485
1486 /* GOMP_SIMD_LANE (unless three argument) or ASAN_POISON
1487 without lhs is not needed. */
1488 if (gimple_call_internal_p (stmt))
1489 switch (gimple_call_internal_fn (stmt))
1490 {
1491 case IFN_GOMP_SIMD_LANE:
1492 if (gimple_call_num_args (stmt) >= 3
1493 && !integer_nonzerop (gimple_call_arg (stmt, 2)))
1494 break;
1495 /* FALLTHRU */
1496 case IFN_ASAN_POISON:
1497 remove_dead_stmt (&gsi, bb, to_remove_edges);
1498 break;
1499 default:
1500 break;
1501 }
1502 }
1503 else if (gimple_call_internal_p (stmt))
1504 switch (gimple_call_internal_fn (stmt))
1505 {
1506 case IFN_ADD_OVERFLOW:
1507 maybe_optimize_arith_overflow (&gsi, PLUS_EXPR);
1508 break;
1509 case IFN_SUB_OVERFLOW:
1510 maybe_optimize_arith_overflow (&gsi, MINUS_EXPR);
1511 break;
1512 case IFN_MUL_OVERFLOW:
1513 maybe_optimize_arith_overflow (&gsi, MULT_EXPR);
1514 break;
1515 default:
1516 break;
1517 }
1518 }
1519 else if (gimple_debug_bind_p (stmt))
1520 {
1521 /* We are only keeping the last debug-bind of a
1522 non-DEBUG_EXPR_DECL variable in a series of
1523 debug-bind stmts. */
1524 tree var = gimple_debug_bind_get_var (stmt);
1525 if (TREE_CODE (var) != DEBUG_EXPR_DECL
1526 && !bitmap_set_bit (debug_seen, DECL_UID (var)))
1527 remove_dead_stmt (&gsi, bb, to_remove_edges);
1528 continue;
1529 }
1530 bitmap_clear (debug_seen);
1531 }
1532
1533 /* Remove dead PHI nodes. */
1534 something_changed |= remove_dead_phis (bb);
1535 }
1536
1537 h.release ();
1538
1539 /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1540 rendered some PHI nodes unreachable while they are still in use.
1541 Mark them for renaming. */
1542 if (!to_remove_edges.is_empty ())
1543 {
1544 basic_block prev_bb;
1545
1546 /* Remove edges. We've delayed this to not get bogus debug stmts
1547 during PHI node removal. */
1548 for (unsigned i = 0; i < to_remove_edges.length (); ++i)
1549 remove_edge (to_remove_edges[i]);
1550 cfg_altered = true;
1551
1552 find_unreachable_blocks ();
1553
1554 /* Delete all unreachable basic blocks in reverse dominator order. */
1555 for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
1556 bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
1557 {
1558 prev_bb = bb->prev_bb;
1559
1560 if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
1561 || !(bb->flags & BB_REACHABLE))
1562 {
1563 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1564 gsi_next (&gsi))
1565 if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
1566 {
1567 bool found = false;
1568 imm_use_iterator iter;
1569
1570 FOR_EACH_IMM_USE_STMT (stmt, iter,
1571 gimple_phi_result (gsi.phi ()))
1572 {
1573 if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1574 continue;
1575 if (gimple_code (stmt) == GIMPLE_PHI
1576 || gimple_plf (stmt, STMT_NECESSARY))
1577 {
1578 found = true;
1579 BREAK_FROM_IMM_USE_STMT (iter);
1580 }
1581 }
1582 if (found)
1583 mark_virtual_phi_result_for_renaming (gsi.phi ());
1584 }
1585
1586 if (!(bb->flags & BB_REACHABLE))
1587 {
1588 /* Speed up the removal of blocks that don't
1589 dominate others. Walking backwards, this should
1590 be the common case. ??? Do we need to recompute
1591 dominators because of cfg_altered? */
1592 if (!first_dom_son (CDI_DOMINATORS, bb))
1593 delete_basic_block (bb);
1594 else
1595 {
1596 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1597
1598 while (h.length ())
1599 {
1600 bb = h.pop ();
1601 prev_bb = bb->prev_bb;
1602 /* Rearrangements to the CFG may have failed
1603 to update the dominators tree, so that
1604 formerly-dominated blocks are now
1605 otherwise reachable. */
1606 if (!!(bb->flags & BB_REACHABLE))
1607 continue;
1608 delete_basic_block (bb);
1609 }
1610
1611 h.release ();
1612 }
1613 }
1614 }
1615 }
1616 }
1617
1618 if (bb_postorder)
1619 free (bb_postorder);
1620 bb_postorder = NULL;
1621
1622 return something_changed;
1623 }
1624
1625
1626 /* Print out removed statement statistics. */
1627
1628 static void
1629 print_stats (void)
1630 {
1631 float percg;
1632
1633 percg = ((float) stats.removed / (float) stats.total) * 100;
1634 fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1635 stats.removed, stats.total, (int) percg);
1636
1637 if (stats.total_phis == 0)
1638 percg = 0;
1639 else
1640 percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1641
1642 fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1643 stats.removed_phis, stats.total_phis, (int) percg);
1644 }
1645
1646 /* Initialization for this pass. Set up the used data structures. */
1647
1648 static void
1649 tree_dce_init (bool aggressive)
1650 {
1651 memset ((void *) &stats, 0, sizeof (stats));
1652
1653 if (aggressive)
1654 {
1655 last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
1656 bitmap_clear (last_stmt_necessary);
1657 bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
1658 bitmap_clear (bb_contains_live_stmts);
1659 }
1660
1661 processed = sbitmap_alloc (num_ssa_names + 1);
1662 bitmap_clear (processed);
1663
1664 worklist.create (64);
1665 cfg_altered = false;
1666 }
1667
1668 /* Cleanup after this pass. */
1669
1670 static void
1671 tree_dce_done (bool aggressive)
1672 {
1673 if (aggressive)
1674 {
1675 delete cd;
1676 sbitmap_free (visited_control_parents);
1677 sbitmap_free (last_stmt_necessary);
1678 sbitmap_free (bb_contains_live_stmts);
1679 bb_contains_live_stmts = NULL;
1680 }
1681
1682 sbitmap_free (processed);
1683
1684 worklist.release ();
1685 }
1686
1687 /* Main routine to eliminate dead code.
1688
1689 AGGRESSIVE controls the aggressiveness of the algorithm.
1690 In conservative mode, we ignore control dependence and simply declare
1691 all but the most trivially dead branches necessary. This mode is fast.
1692 In aggressive mode, control dependences are taken into account, which
1693 results in more dead code elimination, but at the cost of some time.
1694
1695 FIXME: Aggressive mode before PRE doesn't work currently because
1696 the dominance info is not invalidated after DCE1. This is
1697 not an issue right now because we only run aggressive DCE
1698 as the last tree SSA pass, but keep this in mind when you
1699 start experimenting with pass ordering. */
1700
1701 static unsigned int
1702 perform_tree_ssa_dce (bool aggressive)
1703 {
1704 bool something_changed = 0;
1705
1706 calculate_dominance_info (CDI_DOMINATORS);
1707
1708 /* Preheaders are needed for SCEV to work.
1709 Simple lateches and recorded exits improve chances that loop will
1710 proved to be finite in testcases such as in loop-15.c and loop-24.c */
1711 bool in_loop_pipeline = scev_initialized_p ();
1712 if (aggressive && ! in_loop_pipeline)
1713 {
1714 scev_initialize ();
1715 loop_optimizer_init (LOOPS_NORMAL
1716 | LOOPS_HAVE_RECORDED_EXITS);
1717 }
1718
1719 tree_dce_init (aggressive);
1720
1721 if (aggressive)
1722 {
1723 /* Compute control dependence. */
1724 calculate_dominance_info (CDI_POST_DOMINATORS);
1725 cd = new control_dependences ();
1726
1727 visited_control_parents =
1728 sbitmap_alloc (last_basic_block_for_fn (cfun));
1729 bitmap_clear (visited_control_parents);
1730
1731 mark_dfs_back_edges ();
1732 }
1733
1734 find_obviously_necessary_stmts (aggressive);
1735
1736 if (aggressive && ! in_loop_pipeline)
1737 {
1738 loop_optimizer_finalize ();
1739 scev_finalize ();
1740 }
1741
1742 longest_chain = 0;
1743 total_chain = 0;
1744 nr_walks = 0;
1745 chain_ovfl = false;
1746 visited = BITMAP_ALLOC (NULL);
1747 propagate_necessity (aggressive);
1748 BITMAP_FREE (visited);
1749
1750 something_changed |= eliminate_unnecessary_stmts ();
1751 something_changed |= cfg_altered;
1752
1753 /* We do not update postdominators, so free them unconditionally. */
1754 free_dominance_info (CDI_POST_DOMINATORS);
1755
1756 /* If we removed paths in the CFG, then we need to update
1757 dominators as well. I haven't investigated the possibility
1758 of incrementally updating dominators. */
1759 if (cfg_altered)
1760 free_dominance_info (CDI_DOMINATORS);
1761
1762 statistics_counter_event (cfun, "Statements deleted", stats.removed);
1763 statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1764
1765 /* Debugging dumps. */
1766 if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1767 print_stats ();
1768
1769 tree_dce_done (aggressive);
1770
1771 if (something_changed)
1772 {
1773 free_numbers_of_iterations_estimates (cfun);
1774 if (in_loop_pipeline)
1775 scev_reset ();
1776 return TODO_update_ssa | TODO_cleanup_cfg;
1777 }
1778 return 0;
1779 }
1780
1781 /* Pass entry points. */
1782 static unsigned int
1783 tree_ssa_dce (void)
1784 {
1785 return perform_tree_ssa_dce (/*aggressive=*/false);
1786 }
1787
1788 static unsigned int
1789 tree_ssa_cd_dce (void)
1790 {
1791 return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1792 }
1793
1794 namespace {
1795
1796 const pass_data pass_data_dce =
1797 {
1798 GIMPLE_PASS, /* type */
1799 "dce", /* name */
1800 OPTGROUP_NONE, /* optinfo_flags */
1801 TV_TREE_DCE, /* tv_id */
1802 ( PROP_cfg | PROP_ssa ), /* properties_required */
1803 0, /* properties_provided */
1804 0, /* properties_destroyed */
1805 0, /* todo_flags_start */
1806 0, /* todo_flags_finish */
1807 };
1808
1809 class pass_dce : public gimple_opt_pass
1810 {
1811 public:
1812 pass_dce (gcc::context *ctxt)
1813 : gimple_opt_pass (pass_data_dce, ctxt)
1814 {}
1815
1816 /* opt_pass methods: */
1817 opt_pass * clone () { return new pass_dce (m_ctxt); }
1818 virtual bool gate (function *) { return flag_tree_dce != 0; }
1819 virtual unsigned int execute (function *) { return tree_ssa_dce (); }
1820
1821 }; // class pass_dce
1822
1823 } // anon namespace
1824
1825 gimple_opt_pass *
1826 make_pass_dce (gcc::context *ctxt)
1827 {
1828 return new pass_dce (ctxt);
1829 }
1830
1831 namespace {
1832
1833 const pass_data pass_data_cd_dce =
1834 {
1835 GIMPLE_PASS, /* type */
1836 "cddce", /* name */
1837 OPTGROUP_NONE, /* optinfo_flags */
1838 TV_TREE_CD_DCE, /* tv_id */
1839 ( PROP_cfg | PROP_ssa ), /* properties_required */
1840 0, /* properties_provided */
1841 0, /* properties_destroyed */
1842 0, /* todo_flags_start */
1843 0, /* todo_flags_finish */
1844 };
1845
1846 class pass_cd_dce : public gimple_opt_pass
1847 {
1848 public:
1849 pass_cd_dce (gcc::context *ctxt)
1850 : gimple_opt_pass (pass_data_cd_dce, ctxt)
1851 {}
1852
1853 /* opt_pass methods: */
1854 opt_pass * clone () { return new pass_cd_dce (m_ctxt); }
1855 virtual bool gate (function *) { return flag_tree_dce != 0; }
1856 virtual unsigned int execute (function *) { return tree_ssa_cd_dce (); }
1857
1858 }; // class pass_cd_dce
1859
1860 } // anon namespace
1861
1862 gimple_opt_pass *
1863 make_pass_cd_dce (gcc::context *ctxt)
1864 {
1865 return new pass_cd_dce (ctxt);
1866 }
1867
1868
1869 /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
1870 is consumed by this function. The function has linear complexity in
1871 the number of dead stmts with a constant factor like the average SSA
1872 use operands number. */
1873
1874 void
1875 simple_dce_from_worklist (bitmap worklist)
1876 {
1877 while (! bitmap_empty_p (worklist))
1878 {
1879 /* Pop item. */
1880 unsigned i = bitmap_first_set_bit (worklist);
1881 bitmap_clear_bit (worklist, i);
1882
1883 tree def = ssa_name (i);
1884 /* Removed by somebody else or still in use. */
1885 if (! def || ! has_zero_uses (def))
1886 continue;
1887
1888 gimple *t = SSA_NAME_DEF_STMT (def);
1889 if (gimple_has_side_effects (t))
1890 continue;
1891
1892 /* Add uses to the worklist. */
1893 ssa_op_iter iter;
1894 use_operand_p use_p;
1895 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
1896 {
1897 tree use = USE_FROM_PTR (use_p);
1898 if (TREE_CODE (use) == SSA_NAME
1899 && ! SSA_NAME_IS_DEFAULT_DEF (use))
1900 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
1901 }
1902
1903 /* Remove stmt. */
1904 if (dump_file && (dump_flags & TDF_DETAILS))
1905 {
1906 fprintf (dump_file, "Removing dead stmt:");
1907 print_gimple_stmt (dump_file, t, 0);
1908 }
1909 gimple_stmt_iterator gsi = gsi_for_stmt (t);
1910 if (gimple_code (t) == GIMPLE_PHI)
1911 remove_phi_node (&gsi, true);
1912 else
1913 {
1914 gsi_remove (&gsi, true);
1915 release_defs (t);
1916 }
1917 }
1918 }