]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-dom.c
remove need for store_values_directly
[thirdparty/gcc.git] / gcc / tree-ssa-dom.c
CommitLineData
6de9cd9a 1/* SSA Dominator optimizations for trees
5624e564 2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
6de9cd9a
DN
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
4a8fb1a1 24#include "hash-table.h"
6de9cd9a 25#include "tm.h"
40e23961
MC
26#include "hash-set.h"
27#include "machmode.h"
28#include "vec.h"
29#include "double-int.h"
30#include "input.h"
31#include "alias.h"
32#include "symtab.h"
33#include "wide-int.h"
34#include "inchash.h"
35#include "real.h"
6de9cd9a 36#include "tree.h"
40e23961 37#include "fold-const.h"
d8a2d370 38#include "stor-layout.h"
6de9cd9a 39#include "flags.h"
6de9cd9a 40#include "tm_p.h"
60393bbc 41#include "predict.h"
83685514
AM
42#include "hard-reg-set.h"
43#include "input.h"
6de9cd9a 44#include "function.h"
60393bbc
AM
45#include "dominance.h"
46#include "cfg.h"
47#include "cfganal.h"
48#include "basic-block.h"
49#include "cfgloop.h"
50#include "inchash.h"
cf835838 51#include "gimple-pretty-print.h"
2fb9a547
AM
52#include "tree-ssa-alias.h"
53#include "internal-fn.h"
54#include "gimple-fold.h"
55#include "tree-eh.h"
56#include "gimple-expr.h"
57#include "is-a.h"
442b4905 58#include "gimple.h"
5be5c238 59#include "gimple-iterator.h"
442b4905
AM
60#include "gimple-ssa.h"
61#include "tree-cfg.h"
62#include "tree-phinodes.h"
63#include "ssa-iterators.h"
d8a2d370 64#include "stringpool.h"
442b4905
AM
65#include "tree-ssanames.h"
66#include "tree-into-ssa.h"
6de9cd9a 67#include "domwalk.h"
6de9cd9a 68#include "tree-pass.h"
c7f90219 69#include "tree-ssa-propagate.h"
5254eac4 70#include "tree-ssa-threadupdate.h"
6de9cd9a 71#include "langhooks.h"
43f31be5 72#include "params.h"
f6c72af4 73#include "tree-ssa-scopedtables.h"
4484a35a
AM
74#include "tree-ssa-threadedge.h"
75#include "tree-ssa-dom.h"
2bc10537 76#include "inchash.h"
b00734df 77#include "gimplify.h"
2a5671ee 78#include "tree-cfgcleanup.h"
6de9cd9a
DN
79
80/* This file implements optimizations on the dominator tree. */
81
726a989a
RB
82/* Representation of a "naked" right-hand-side expression, to be used
83 in recording available expressions in the expression hash table. */
84
85enum expr_kind
86{
87 EXPR_SINGLE,
88 EXPR_UNARY,
89 EXPR_BINARY,
0354c0c7 90 EXPR_TERNARY,
13a3e5b6
BS
91 EXPR_CALL,
92 EXPR_PHI
726a989a
RB
93};
94
95struct hashable_expr
96{
97 tree type;
98 enum expr_kind kind;
99 union {
100 struct { tree rhs; } single;
101 struct { enum tree_code op; tree opnd; } unary;
0354c0c7
BS
102 struct { enum tree_code op; tree opnd0, opnd1; } binary;
103 struct { enum tree_code op; tree opnd0, opnd1, opnd2; } ternary;
538dd0b7 104 struct { gcall *fn_from; bool pure; size_t nargs; tree *args; } call;
13a3e5b6 105 struct { size_t nargs; tree *args; } phi;
726a989a
RB
106 } ops;
107};
108
109/* Structure for recording known values of a conditional expression
110 at the exits from its block. */
111
fd4a760e 112typedef struct cond_equivalence_s
726a989a
RB
113{
114 struct hashable_expr cond;
115 tree value;
fd4a760e
RG
116} cond_equivalence;
117
efea75f9
JL
118
119/* Structure for recording edge equivalences as well as any pending
120 edge redirections during the dominator optimizer.
121
122 Computing and storing the edge equivalences instead of creating
123 them on-demand can save significant amounts of time, particularly
b8698a0f 124 for pathological cases involving switch statements.
efea75f9
JL
125
126 These structures live for a single iteration of the dominator
127 optimizer in the edge's AUX field. At the end of an iteration we
128 free each of these structures and update the AUX field to point
129 to any requested redirection target (the code for updating the
130 CFG and SSA graph for edge redirection expects redirection edge
131 targets to be in the AUX field for each edge. */
132
133struct edge_info
134{
135 /* If this edge creates a simple equivalence, the LHS and RHS of
136 the equivalence will be stored here. */
137 tree lhs;
138 tree rhs;
139
140 /* Traversing an edge may also indicate one or more particular conditions
fd4a760e 141 are true or false. */
9771b263 142 vec<cond_equivalence> cond_equivalences;
efea75f9
JL
143};
144
48732f23
JL
145/* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
146 expressions it enters into the hash table along with a marker entry
b3a27618 147 (null). When we finish processing the block, we pop off entries and
48732f23
JL
148 remove the expressions from the global hash table until we hit the
149 marker. */
726a989a 150typedef struct expr_hash_elt * expr_hash_elt_t;
726a989a 151
b00734df 152static vec<std::pair<expr_hash_elt_t, expr_hash_elt_t> > avail_exprs_stack;
48732f23 153
726a989a 154/* Structure for entries in the expression hash table. */
56b043c8 155
6de9cd9a
DN
156struct expr_hash_elt
157{
158 /* The value (lhs) of this expression. */
159 tree lhs;
160
161 /* The expression (rhs) we want to record. */
726a989a 162 struct hashable_expr expr;
6de9cd9a 163
b00734df
RB
164 /* The virtual operand associated with the nearest dominating stmt
165 loading from or storing to expr. */
166 tree vop;
6de9cd9a 167
726a989a 168 /* The hash value for RHS. */
6de9cd9a 169 hashval_t hash;
726a989a
RB
170
171 /* A unique stamp, typically the address of the hash
172 element itself, used in removing entries from the table. */
173 struct expr_hash_elt *stamp;
6de9cd9a
DN
174};
175
4a8fb1a1
LC
176/* Hashtable helpers. */
177
178static bool hashable_expr_equal_p (const struct hashable_expr *,
179 const struct hashable_expr *);
180static void free_expr_hash_elt (void *);
181
182struct expr_elt_hasher
183{
84baa4b9
TS
184 typedef expr_hash_elt *value_type;
185 typedef expr_hash_elt *compare_type;
84baa4b9
TS
186 static inline hashval_t hash (const value_type &);
187 static inline bool equal (const value_type &, const compare_type &);
188 static inline void remove (value_type &);
4a8fb1a1
LC
189};
190
191inline hashval_t
84baa4b9 192expr_elt_hasher::hash (const value_type &p)
4a8fb1a1
LC
193{
194 return p->hash;
195}
196
197inline bool
84baa4b9 198expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
4a8fb1a1 199{
4a8fb1a1
LC
200 const struct hashable_expr *expr1 = &p1->expr;
201 const struct expr_hash_elt *stamp1 = p1->stamp;
4a8fb1a1
LC
202 const struct hashable_expr *expr2 = &p2->expr;
203 const struct expr_hash_elt *stamp2 = p2->stamp;
204
205 /* This case should apply only when removing entries from the table. */
206 if (stamp1 == stamp2)
207 return true;
208
b00734df 209 if (p1->hash != p2->hash)
4a8fb1a1
LC
210 return false;
211
212 /* In case of a collision, both RHS have to be identical and have the
213 same VUSE operands. */
214 if (hashable_expr_equal_p (expr1, expr2)
215 && types_compatible_p (expr1->type, expr2->type))
b00734df 216 return true;
4a8fb1a1
LC
217
218 return false;
219}
220
221/* Delete an expr_hash_elt and reclaim its storage. */
222
223inline void
84baa4b9 224expr_elt_hasher::remove (value_type &element)
4a8fb1a1
LC
225{
226 free_expr_hash_elt (element);
227}
228
229/* Hash table with expressions made available during the renaming process.
230 When an assignment of the form X_i = EXPR is found, the statement is
231 stored in this table. If the same expression EXPR is later found on the
232 RHS of another statement, it is replaced with X_i (thus performing
233 global redundancy elimination). Similarly as we pass through conditionals
234 we record the conditional itself as having either a true or false value
235 in this table. */
c203e8a7 236static hash_table<expr_elt_hasher> *avail_exprs;
4a8fb1a1 237
f6c72af4
JL
238/* Unwindable const/copy equivalences. */
239static const_and_copies *const_and_copies;
b5fefcf6 240
6de9cd9a
DN
241/* Track whether or not we have changed the control flow graph. */
242static bool cfg_altered;
243
1eaba2f2 244/* Bitmap of blocks that have had EH statements cleaned. We should
f6fe65dc 245 remove their dead edges eventually. */
1eaba2f2 246static bitmap need_eh_cleanup;
2a5671ee 247static vec<gimple> need_noreturn_fixup;
1eaba2f2 248
6de9cd9a
DN
249/* Statistics for dominator optimizations. */
250struct opt_stats_d
251{
252 long num_stmts;
253 long num_exprs_considered;
254 long num_re;
0bca51f0
DN
255 long num_const_prop;
256 long num_copy_prop;
6de9cd9a
DN
257};
258
23530866
JL
259static struct opt_stats_d opt_stats;
260
6de9cd9a 261/* Local functions. */
ccf5c864 262static void optimize_stmt (basic_block, gimple_stmt_iterator);
726a989a 263static tree lookup_avail_expr (gimple, bool);
6de9cd9a 264static hashval_t avail_expr_hash (const void *);
c203e8a7
TS
265static void htab_statistics (FILE *,
266 const hash_table<expr_elt_hasher> &);
fd4a760e 267static void record_cond (cond_equivalence *);
b5fefcf6 268static void record_equality (tree, tree);
efea75f9
JL
269static void record_equivalences_from_phis (basic_block);
270static void record_equivalences_from_incoming_edge (basic_block);
87c93592 271static void eliminate_redundant_computations (gimple_stmt_iterator *);
726a989a 272static void record_equivalences_from_stmt (gimple, int);
48732f23 273static void remove_local_expressions_from_table (void);
28c008bb 274static edge single_incoming_edge_ignoring_loop_edges (basic_block);
6de9cd9a 275
0bca51f0 276
726a989a
RB
277/* Given a statement STMT, initialize the hash table element pointed to
278 by ELEMENT. */
279
280static void
281initialize_hash_element (gimple stmt, tree lhs,
282 struct expr_hash_elt *element)
283{
284 enum gimple_code code = gimple_code (stmt);
285 struct hashable_expr *expr = &element->expr;
286
287 if (code == GIMPLE_ASSIGN)
288 {
289 enum tree_code subcode = gimple_assign_rhs_code (stmt);
290
726a989a
RB
291 switch (get_gimple_rhs_class (subcode))
292 {
293 case GIMPLE_SINGLE_RHS:
0354c0c7 294 expr->kind = EXPR_SINGLE;
e2c9010a 295 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
0354c0c7
BS
296 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
297 break;
726a989a 298 case GIMPLE_UNARY_RHS:
0354c0c7 299 expr->kind = EXPR_UNARY;
726a989a 300 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
d822570f 301 if (CONVERT_EXPR_CODE_P (subcode))
625a9766 302 subcode = NOP_EXPR;
0354c0c7
BS
303 expr->ops.unary.op = subcode;
304 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
305 break;
726a989a 306 case GIMPLE_BINARY_RHS:
0354c0c7 307 expr->kind = EXPR_BINARY;
726a989a 308 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
0354c0c7
BS
309 expr->ops.binary.op = subcode;
310 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
311 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
312 break;
313 case GIMPLE_TERNARY_RHS:
314 expr->kind = EXPR_TERNARY;
315 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
316 expr->ops.ternary.op = subcode;
317 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
318 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
319 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
320 break;
726a989a
RB
321 default:
322 gcc_unreachable ();
323 }
324 }
325 else if (code == GIMPLE_COND)
326 {
327 expr->type = boolean_type_node;
328 expr->kind = EXPR_BINARY;
329 expr->ops.binary.op = gimple_cond_code (stmt);
330 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
331 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
332 }
538dd0b7 333 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
726a989a 334 {
538dd0b7 335 size_t nargs = gimple_call_num_args (call_stmt);
726a989a
RB
336 size_t i;
337
538dd0b7 338 gcc_assert (gimple_call_lhs (call_stmt));
726a989a 339
538dd0b7 340 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
726a989a 341 expr->kind = EXPR_CALL;
538dd0b7 342 expr->ops.call.fn_from = call_stmt;
726a989a 343
538dd0b7 344 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
726a989a 345 expr->ops.call.pure = true;
b8698a0f 346 else
726a989a
RB
347 expr->ops.call.pure = false;
348
349 expr->ops.call.nargs = nargs;
13a3e5b6 350 expr->ops.call.args = XCNEWVEC (tree, nargs);
726a989a 351 for (i = 0; i < nargs; i++)
538dd0b7 352 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
726a989a 353 }
538dd0b7 354 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
726a989a 355 {
538dd0b7 356 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
726a989a 357 expr->kind = EXPR_SINGLE;
538dd0b7 358 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
726a989a
RB
359 }
360 else if (code == GIMPLE_GOTO)
361 {
362 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
363 expr->kind = EXPR_SINGLE;
364 expr->ops.single.rhs = gimple_goto_dest (stmt);
365 }
13a3e5b6
BS
366 else if (code == GIMPLE_PHI)
367 {
368 size_t nargs = gimple_phi_num_args (stmt);
369 size_t i;
370
371 expr->type = TREE_TYPE (gimple_phi_result (stmt));
372 expr->kind = EXPR_PHI;
373 expr->ops.phi.nargs = nargs;
374 expr->ops.phi.args = XCNEWVEC (tree, nargs);
375
376 for (i = 0; i < nargs; i++)
377 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
378 }
726a989a
RB
379 else
380 gcc_unreachable ();
381
382 element->lhs = lhs;
b00734df 383 element->vop = gimple_vuse (stmt);
726a989a
RB
384 element->hash = avail_expr_hash (element);
385 element->stamp = element;
386}
387
388/* Given a conditional expression COND as a tree, initialize
389 a hashable_expr expression EXPR. The conditional must be a
390 comparison or logical negation. A constant or a variable is
391 not permitted. */
392
393static void
394initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
395{
396 expr->type = boolean_type_node;
b8698a0f 397
726a989a
RB
398 if (COMPARISON_CLASS_P (cond))
399 {
400 expr->kind = EXPR_BINARY;
401 expr->ops.binary.op = TREE_CODE (cond);
402 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
403 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
404 }
405 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
406 {
407 expr->kind = EXPR_UNARY;
408 expr->ops.unary.op = TRUTH_NOT_EXPR;
409 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
410 }
411 else
412 gcc_unreachable ();
413}
414
415/* Given a hashable_expr expression EXPR and an LHS,
416 initialize the hash table element pointed to by ELEMENT. */
417
418static void
419initialize_hash_element_from_expr (struct hashable_expr *expr,
420 tree lhs,
421 struct expr_hash_elt *element)
422{
423 element->expr = *expr;
424 element->lhs = lhs;
b00734df 425 element->vop = NULL_TREE;
726a989a
RB
426 element->hash = avail_expr_hash (element);
427 element->stamp = element;
428}
429
430/* Compare two hashable_expr structures for equivalence.
431 They are considered equivalent when the the expressions
432 they denote must necessarily be equal. The logic is intended
433 to follow that of operand_equal_p in fold-const.c */
434
435static bool
436hashable_expr_equal_p (const struct hashable_expr *expr0,
437 const struct hashable_expr *expr1)
438{
439 tree type0 = expr0->type;
440 tree type1 = expr1->type;
441
442 /* If either type is NULL, there is nothing to check. */
443 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
444 return false;
445
446 /* If both types don't have the same signedness, precision, and mode,
447 then we can't consider them equal. */
448 if (type0 != type1
449 && (TREE_CODE (type0) == ERROR_MARK
450 || TREE_CODE (type1) == ERROR_MARK
451 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
452 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
453 || TYPE_MODE (type0) != TYPE_MODE (type1)))
454 return false;
455
456 if (expr0->kind != expr1->kind)
457 return false;
458
459 switch (expr0->kind)
460 {
461 case EXPR_SINGLE:
462 return operand_equal_p (expr0->ops.single.rhs,
463 expr1->ops.single.rhs, 0);
464
465 case EXPR_UNARY:
466 if (expr0->ops.unary.op != expr1->ops.unary.op)
467 return false;
468
1a87cf0c 469 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
726a989a
RB
470 || expr0->ops.unary.op == NON_LVALUE_EXPR)
471 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
472 return false;
473
474 return operand_equal_p (expr0->ops.unary.opnd,
475 expr1->ops.unary.opnd, 0);
476
477 case EXPR_BINARY:
0354c0c7
BS
478 if (expr0->ops.binary.op != expr1->ops.binary.op)
479 return false;
480
481 if (operand_equal_p (expr0->ops.binary.opnd0,
482 expr1->ops.binary.opnd0, 0)
483 && operand_equal_p (expr0->ops.binary.opnd1,
484 expr1->ops.binary.opnd1, 0))
485 return true;
486
487 /* For commutative ops, allow the other order. */
488 return (commutative_tree_code (expr0->ops.binary.op)
489 && operand_equal_p (expr0->ops.binary.opnd0,
490 expr1->ops.binary.opnd1, 0)
491 && operand_equal_p (expr0->ops.binary.opnd1,
492 expr1->ops.binary.opnd0, 0));
493
494 case EXPR_TERNARY:
495 if (expr0->ops.ternary.op != expr1->ops.ternary.op
496 || !operand_equal_p (expr0->ops.ternary.opnd2,
497 expr1->ops.ternary.opnd2, 0))
498 return false;
499
500 if (operand_equal_p (expr0->ops.ternary.opnd0,
501 expr1->ops.ternary.opnd0, 0)
502 && operand_equal_p (expr0->ops.ternary.opnd1,
503 expr1->ops.ternary.opnd1, 0))
504 return true;
505
506 /* For commutative ops, allow the other order. */
507 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
508 && operand_equal_p (expr0->ops.ternary.opnd0,
509 expr1->ops.ternary.opnd1, 0)
510 && operand_equal_p (expr0->ops.ternary.opnd1,
511 expr1->ops.ternary.opnd0, 0));
726a989a
RB
512
513 case EXPR_CALL:
514 {
515 size_t i;
516
517 /* If the calls are to different functions, then they
518 clearly cannot be equal. */
25583c4f
RS
519 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
520 expr1->ops.call.fn_from))
726a989a
RB
521 return false;
522
523 if (! expr0->ops.call.pure)
524 return false;
525
526 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
527 return false;
528
529 for (i = 0; i < expr0->ops.call.nargs; i++)
530 if (! operand_equal_p (expr0->ops.call.args[i],
531 expr1->ops.call.args[i], 0))
532 return false;
533
7eab31ed
EB
534 if (stmt_could_throw_p (expr0->ops.call.fn_from))
535 {
536 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
537 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
538 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
539 return false;
540 }
541
726a989a
RB
542 return true;
543 }
b8698a0f 544
13a3e5b6
BS
545 case EXPR_PHI:
546 {
547 size_t i;
548
549 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
550 return false;
551
552 for (i = 0; i < expr0->ops.phi.nargs; i++)
553 if (! operand_equal_p (expr0->ops.phi.args[i],
554 expr1->ops.phi.args[i], 0))
555 return false;
556
557 return true;
558 }
559
726a989a
RB
560 default:
561 gcc_unreachable ();
562 }
563}
564
862d0b35 565/* Generate a hash value for a pair of expressions. This can be used
2bc10537 566 iteratively by passing a previous result in HSTATE.
862d0b35
DN
567
568 The same hash value is always returned for a given pair of expressions,
569 regardless of the order in which they are presented. This is useful in
570 hashing the operands of commutative functions. */
571
2bc10537 572namespace inchash
862d0b35 573{
862d0b35 574
2bc10537
AK
575static void
576add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
577{
578 hash one, two;
862d0b35 579
2bc10537
AK
580 inchash::add_expr (t1, one);
581 inchash::add_expr (t2, two);
582 hstate.add_commutative (one, two);
862d0b35
DN
583}
584
726a989a
RB
585/* Compute a hash value for a hashable_expr value EXPR and a
586 previously accumulated hash value VAL. If two hashable_expr
587 values compare equal with hashable_expr_equal_p, they must
588 hash to the same value, given an identical value of VAL.
2bc10537 589 The logic is intended to follow inchash::add_expr in tree.c. */
726a989a 590
2bc10537
AK
591static void
592add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
726a989a
RB
593{
594 switch (expr->kind)
595 {
596 case EXPR_SINGLE:
2bc10537 597 inchash::add_expr (expr->ops.single.rhs, hstate);
726a989a
RB
598 break;
599
600 case EXPR_UNARY:
2bc10537 601 hstate.add_object (expr->ops.unary.op);
726a989a
RB
602
603 /* Make sure to include signedness in the hash computation.
604 Don't hash the type, that can lead to having nodes which
605 compare equal according to operand_equal_p, but which
606 have different hash codes. */
1a87cf0c 607 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
726a989a 608 || expr->ops.unary.op == NON_LVALUE_EXPR)
2bc10537 609 hstate.add_int (TYPE_UNSIGNED (expr->type));
726a989a 610
2bc10537 611 inchash::add_expr (expr->ops.unary.opnd, hstate);
726a989a
RB
612 break;
613
614 case EXPR_BINARY:
2bc10537 615 hstate.add_object (expr->ops.binary.op);
726a989a 616 if (commutative_tree_code (expr->ops.binary.op))
2bc10537
AK
617 inchash::add_expr_commutative (expr->ops.binary.opnd0,
618 expr->ops.binary.opnd1, hstate);
726a989a
RB
619 else
620 {
2bc10537
AK
621 inchash::add_expr (expr->ops.binary.opnd0, hstate);
622 inchash::add_expr (expr->ops.binary.opnd1, hstate);
726a989a
RB
623 }
624 break;
625
0354c0c7 626 case EXPR_TERNARY:
2bc10537 627 hstate.add_object (expr->ops.ternary.op);
0354c0c7 628 if (commutative_ternary_tree_code (expr->ops.ternary.op))
2bc10537
AK
629 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
630 expr->ops.ternary.opnd1, hstate);
0354c0c7
BS
631 else
632 {
2bc10537
AK
633 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
634 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
0354c0c7 635 }
2bc10537 636 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
0354c0c7
BS
637 break;
638
726a989a
RB
639 case EXPR_CALL:
640 {
641 size_t i;
642 enum tree_code code = CALL_EXPR;
538dd0b7 643 gcall *fn_from;
726a989a 644
2bc10537 645 hstate.add_object (code);
25583c4f
RS
646 fn_from = expr->ops.call.fn_from;
647 if (gimple_call_internal_p (fn_from))
2bc10537 648 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
25583c4f 649 else
2bc10537 650 inchash::add_expr (gimple_call_fn (fn_from), hstate);
726a989a 651 for (i = 0; i < expr->ops.call.nargs; i++)
2bc10537 652 inchash::add_expr (expr->ops.call.args[i], hstate);
726a989a
RB
653 }
654 break;
b8698a0f 655
13a3e5b6
BS
656 case EXPR_PHI:
657 {
658 size_t i;
659
660 for (i = 0; i < expr->ops.phi.nargs; i++)
2bc10537 661 inchash::add_expr (expr->ops.phi.args[i], hstate);
13a3e5b6
BS
662 }
663 break;
664
726a989a
RB
665 default:
666 gcc_unreachable ();
667 }
2bc10537 668}
726a989a 669
726a989a
RB
670}
671
672/* Print a diagnostic dump of an expression hash table entry. */
673
674static void
675print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
676{
b00734df 677 fprintf (stream, "STMT ");
726a989a
RB
678
679 if (element->lhs)
680 {
681 print_generic_expr (stream, element->lhs, 0);
682 fprintf (stream, " = ");
683 }
b8698a0f 684
726a989a
RB
685 switch (element->expr.kind)
686 {
687 case EXPR_SINGLE:
688 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
689 break;
690
691 case EXPR_UNARY:
5806f481 692 fprintf (stream, "%s ", get_tree_code_name (element->expr.ops.unary.op));
726a989a
RB
693 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
694 break;
695
696 case EXPR_BINARY:
697 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
5806f481 698 fprintf (stream, " %s ", get_tree_code_name (element->expr.ops.binary.op));
726a989a
RB
699 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
700 break;
701
0354c0c7 702 case EXPR_TERNARY:
5806f481 703 fprintf (stream, " %s <", get_tree_code_name (element->expr.ops.ternary.op));
0354c0c7
BS
704 print_generic_expr (stream, element->expr.ops.ternary.opnd0, 0);
705 fputs (", ", stream);
706 print_generic_expr (stream, element->expr.ops.ternary.opnd1, 0);
707 fputs (", ", stream);
708 print_generic_expr (stream, element->expr.ops.ternary.opnd2, 0);
709 fputs (">", stream);
710 break;
711
726a989a
RB
712 case EXPR_CALL:
713 {
714 size_t i;
715 size_t nargs = element->expr.ops.call.nargs;
538dd0b7 716 gcall *fn_from;
25583c4f
RS
717
718 fn_from = element->expr.ops.call.fn_from;
719 if (gimple_call_internal_p (fn_from))
720 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
721 stream);
722 else
723 print_generic_expr (stream, gimple_call_fn (fn_from), 0);
726a989a
RB
724 fprintf (stream, " (");
725 for (i = 0; i < nargs; i++)
726 {
727 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
728 if (i + 1 < nargs)
729 fprintf (stream, ", ");
730 }
731 fprintf (stream, ")");
732 }
733 break;
13a3e5b6
BS
734
735 case EXPR_PHI:
736 {
737 size_t i;
738 size_t nargs = element->expr.ops.phi.nargs;
739
740 fprintf (stream, "PHI <");
741 for (i = 0; i < nargs; i++)
742 {
743 print_generic_expr (stream, element->expr.ops.phi.args[i], 0);
744 if (i + 1 < nargs)
745 fprintf (stream, ", ");
746 }
747 fprintf (stream, ">");
748 }
749 break;
726a989a 750 }
726a989a 751
b00734df 752 if (element->vop)
726a989a 753 {
b00734df
RB
754 fprintf (stream, " with ");
755 print_generic_expr (stream, element->vop, 0);
726a989a 756 }
b00734df
RB
757
758 fprintf (stream, "\n");
726a989a
RB
759}
760
b6db991c 761/* Delete variable sized pieces of the expr_hash_elt ELEMENT. */
726a989a
RB
762
763static void
b6db991c 764free_expr_hash_elt_contents (struct expr_hash_elt *element)
726a989a 765{
726a989a
RB
766 if (element->expr.kind == EXPR_CALL)
767 free (element->expr.ops.call.args);
b6db991c 768 else if (element->expr.kind == EXPR_PHI)
13a3e5b6 769 free (element->expr.ops.phi.args);
b6db991c
RG
770}
771
772/* Delete an expr_hash_elt and reclaim its storage. */
13a3e5b6 773
b6db991c
RG
774static void
775free_expr_hash_elt (void *elt)
776{
777 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
778 free_expr_hash_elt_contents (element);
726a989a
RB
779 free (element);
780}
781
efea75f9
JL
782/* Allocate an EDGE_INFO for edge E and attach it to E.
783 Return the new EDGE_INFO structure. */
784
785static struct edge_info *
786allocate_edge_info (edge e)
787{
788 struct edge_info *edge_info;
789
e1111e8e 790 edge_info = XCNEW (struct edge_info);
efea75f9
JL
791
792 e->aux = edge_info;
793 return edge_info;
794}
795
796/* Free all EDGE_INFO structures associated with edges in the CFG.
cbb1cada 797 If a particular edge can be threaded, copy the redirection
efea75f9
JL
798 target from the EDGE_INFO structure into the edge's AUX field
799 as required by code to update the CFG and SSA graph for
800 jump threading. */
801
802static void
803free_all_edge_infos (void)
804{
805 basic_block bb;
806 edge_iterator ei;
807 edge e;
808
11cd3bed 809 FOR_EACH_BB_FN (bb, cfun)
efea75f9
JL
810 {
811 FOR_EACH_EDGE (e, ei, bb->preds)
812 {
e1111e8e 813 struct edge_info *edge_info = (struct edge_info *) e->aux;
efea75f9
JL
814
815 if (edge_info)
816 {
9771b263 817 edge_info->cond_equivalences.release ();
efea75f9 818 free (edge_info);
8702a557 819 e->aux = NULL;
efea75f9
JL
820 }
821 }
822 }
823}
824
20e38fcf
JL
825/* Build a cond_equivalence record indicating that the comparison
826 CODE holds between operands OP0 and OP1 and push it to **P. */
827
828static void
829build_and_record_new_cond (enum tree_code code,
830 tree op0, tree op1,
831 vec<cond_equivalence> *p)
832{
833 cond_equivalence c;
834 struct hashable_expr *cond = &c.cond;
835
836 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
837
838 cond->type = boolean_type_node;
839 cond->kind = EXPR_BINARY;
840 cond->ops.binary.op = code;
841 cond->ops.binary.opnd0 = op0;
842 cond->ops.binary.opnd1 = op1;
843
844 c.value = boolean_true_node;
845 p->safe_push (c);
846}
847
848/* Record that COND is true and INVERTED is false into the edge information
849 structure. Also record that any conditions dominated by COND are true
850 as well.
851
852 For example, if a < b is true, then a <= b must also be true. */
853
854static void
855record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
856{
857 tree op0, op1;
858 cond_equivalence c;
859
860 if (!COMPARISON_CLASS_P (cond))
861 return;
862
863 op0 = TREE_OPERAND (cond, 0);
864 op1 = TREE_OPERAND (cond, 1);
865
866 switch (TREE_CODE (cond))
867 {
868 case LT_EXPR:
869 case GT_EXPR:
870 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
871 {
872 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
873 &edge_info->cond_equivalences);
874 build_and_record_new_cond (LTGT_EXPR, op0, op1,
875 &edge_info->cond_equivalences);
876 }
877
878 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
879 ? LE_EXPR : GE_EXPR),
880 op0, op1, &edge_info->cond_equivalences);
881 build_and_record_new_cond (NE_EXPR, op0, op1,
882 &edge_info->cond_equivalences);
883 break;
884
885 case GE_EXPR:
886 case LE_EXPR:
887 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
888 {
889 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
890 &edge_info->cond_equivalences);
891 }
892 break;
893
894 case EQ_EXPR:
895 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
896 {
897 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
898 &edge_info->cond_equivalences);
899 }
900 build_and_record_new_cond (LE_EXPR, op0, op1,
901 &edge_info->cond_equivalences);
902 build_and_record_new_cond (GE_EXPR, op0, op1,
903 &edge_info->cond_equivalences);
904 break;
905
906 case UNORDERED_EXPR:
907 build_and_record_new_cond (NE_EXPR, op0, op1,
908 &edge_info->cond_equivalences);
909 build_and_record_new_cond (UNLE_EXPR, op0, op1,
910 &edge_info->cond_equivalences);
911 build_and_record_new_cond (UNGE_EXPR, op0, op1,
912 &edge_info->cond_equivalences);
913 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
914 &edge_info->cond_equivalences);
915 build_and_record_new_cond (UNLT_EXPR, op0, op1,
916 &edge_info->cond_equivalences);
917 build_and_record_new_cond (UNGT_EXPR, op0, op1,
918 &edge_info->cond_equivalences);
919 break;
920
921 case UNLT_EXPR:
922 case UNGT_EXPR:
923 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
924 ? UNLE_EXPR : UNGE_EXPR),
925 op0, op1, &edge_info->cond_equivalences);
926 build_and_record_new_cond (NE_EXPR, op0, op1,
927 &edge_info->cond_equivalences);
928 break;
929
930 case UNEQ_EXPR:
931 build_and_record_new_cond (UNLE_EXPR, op0, op1,
932 &edge_info->cond_equivalences);
933 build_and_record_new_cond (UNGE_EXPR, op0, op1,
934 &edge_info->cond_equivalences);
935 break;
936
937 case LTGT_EXPR:
938 build_and_record_new_cond (NE_EXPR, op0, op1,
939 &edge_info->cond_equivalences);
940 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
941 &edge_info->cond_equivalences);
942 break;
943
944 default:
945 break;
946 }
947
948 /* Now store the original true and false conditions into the first
949 two slots. */
950 initialize_expr_from_cond (cond, &c.cond);
951 c.value = boolean_true_node;
952 edge_info->cond_equivalences.safe_push (c);
953
954 /* It is possible for INVERTED to be the negation of a comparison,
955 and not a valid RHS or GIMPLE_COND condition. This happens because
956 invert_truthvalue may return such an expression when asked to invert
957 a floating-point comparison. These comparisons are not assumed to
958 obey the trichotomy law. */
959 initialize_expr_from_cond (inverted, &c.cond);
960 c.value = boolean_false_node;
961 edge_info->cond_equivalences.safe_push (c);
962}
963
964/* We have finished optimizing BB, record any information implied by
965 taking a specific outgoing edge from BB. */
966
967static void
968record_edge_info (basic_block bb)
969{
970 gimple_stmt_iterator gsi = gsi_last_bb (bb);
971 struct edge_info *edge_info;
972
973 if (! gsi_end_p (gsi))
974 {
975 gimple stmt = gsi_stmt (gsi);
976 location_t loc = gimple_location (stmt);
977
978 if (gimple_code (stmt) == GIMPLE_SWITCH)
979 {
980 gswitch *switch_stmt = as_a <gswitch *> (stmt);
981 tree index = gimple_switch_index (switch_stmt);
982
983 if (TREE_CODE (index) == SSA_NAME)
984 {
985 int i;
986 int n_labels = gimple_switch_num_labels (switch_stmt);
987 tree *info = XCNEWVEC (tree, last_basic_block_for_fn (cfun));
988 edge e;
989 edge_iterator ei;
990
991 for (i = 0; i < n_labels; i++)
992 {
993 tree label = gimple_switch_label (switch_stmt, i);
994 basic_block target_bb = label_to_block (CASE_LABEL (label));
995 if (CASE_HIGH (label)
996 || !CASE_LOW (label)
997 || info[target_bb->index])
998 info[target_bb->index] = error_mark_node;
999 else
1000 info[target_bb->index] = label;
1001 }
1002
1003 FOR_EACH_EDGE (e, ei, bb->succs)
1004 {
1005 basic_block target_bb = e->dest;
1006 tree label = info[target_bb->index];
1007
1008 if (label != NULL && label != error_mark_node)
1009 {
1010 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1011 CASE_LOW (label));
1012 edge_info = allocate_edge_info (e);
1013 edge_info->lhs = index;
1014 edge_info->rhs = x;
1015 }
1016 }
1017 free (info);
1018 }
1019 }
1020
1021 /* A COND_EXPR may create equivalences too. */
1022 if (gimple_code (stmt) == GIMPLE_COND)
1023 {
1024 edge true_edge;
1025 edge false_edge;
1026
1027 tree op0 = gimple_cond_lhs (stmt);
1028 tree op1 = gimple_cond_rhs (stmt);
1029 enum tree_code code = gimple_cond_code (stmt);
1030
1031 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1032
1033 /* Special case comparing booleans against a constant as we
1034 know the value of OP0 on both arms of the branch. i.e., we
1035 can record an equivalence for OP0 rather than COND. */
1036 if ((code == EQ_EXPR || code == NE_EXPR)
1037 && TREE_CODE (op0) == SSA_NAME
1038 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1039 && is_gimple_min_invariant (op1))
1040 {
1041 if (code == EQ_EXPR)
1042 {
1043 edge_info = allocate_edge_info (true_edge);
1044 edge_info->lhs = op0;
1045 edge_info->rhs = (integer_zerop (op1)
1046 ? boolean_false_node
1047 : boolean_true_node);
1048
1049 edge_info = allocate_edge_info (false_edge);
1050 edge_info->lhs = op0;
1051 edge_info->rhs = (integer_zerop (op1)
1052 ? boolean_true_node
1053 : boolean_false_node);
1054 }
1055 else
1056 {
1057 edge_info = allocate_edge_info (true_edge);
1058 edge_info->lhs = op0;
1059 edge_info->rhs = (integer_zerop (op1)
1060 ? boolean_true_node
1061 : boolean_false_node);
1062
1063 edge_info = allocate_edge_info (false_edge);
1064 edge_info->lhs = op0;
1065 edge_info->rhs = (integer_zerop (op1)
1066 ? boolean_false_node
1067 : boolean_true_node);
1068 }
1069 }
1070 else if (is_gimple_min_invariant (op0)
1071 && (TREE_CODE (op1) == SSA_NAME
1072 || is_gimple_min_invariant (op1)))
1073 {
1074 tree cond = build2 (code, boolean_type_node, op0, op1);
1075 tree inverted = invert_truthvalue_loc (loc, cond);
1076 bool can_infer_simple_equiv
1077 = !(HONOR_SIGNED_ZEROS (op0)
1078 && real_zerop (op0));
1079 struct edge_info *edge_info;
1080
1081 edge_info = allocate_edge_info (true_edge);
1082 record_conditions (edge_info, cond, inverted);
1083
1084 if (can_infer_simple_equiv && code == EQ_EXPR)
1085 {
1086 edge_info->lhs = op1;
1087 edge_info->rhs = op0;
1088 }
1089
1090 edge_info = allocate_edge_info (false_edge);
1091 record_conditions (edge_info, inverted, cond);
1092
1093 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1094 {
1095 edge_info->lhs = op1;
1096 edge_info->rhs = op0;
1097 }
1098 }
1099
1100 else if (TREE_CODE (op0) == SSA_NAME
1101 && (TREE_CODE (op1) == SSA_NAME
1102 || is_gimple_min_invariant (op1)))
1103 {
1104 tree cond = build2 (code, boolean_type_node, op0, op1);
1105 tree inverted = invert_truthvalue_loc (loc, cond);
1106 bool can_infer_simple_equiv
1107 = !(HONOR_SIGNED_ZEROS (op1)
1108 && (TREE_CODE (op1) == SSA_NAME || real_zerop (op1)));
1109 struct edge_info *edge_info;
1110
1111 edge_info = allocate_edge_info (true_edge);
1112 record_conditions (edge_info, cond, inverted);
1113
1114 if (can_infer_simple_equiv && code == EQ_EXPR)
1115 {
1116 edge_info->lhs = op0;
1117 edge_info->rhs = op1;
1118 }
1119
1120 edge_info = allocate_edge_info (false_edge);
1121 record_conditions (edge_info, inverted, cond);
1122
1123 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1124 {
1125 edge_info->lhs = op0;
1126 edge_info->rhs = op1;
1127 }
1128 }
1129 }
1130
1131 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1132 }
1133}
1134
1135
4d9192b5
TS
1136class dom_opt_dom_walker : public dom_walker
1137{
1138public:
1139 dom_opt_dom_walker (cdi_direction direction)
65d3284b 1140 : dom_walker (direction), m_dummy_cond (NULL) {}
4d9192b5
TS
1141
1142 virtual void before_dom_children (basic_block);
1143 virtual void after_dom_children (basic_block);
1144
1145private:
1146 void thread_across_edge (edge);
1147
538dd0b7 1148 gcond *m_dummy_cond;
4d9192b5
TS
1149};
1150
b8698a0f 1151/* Jump threading, redundancy elimination and const/copy propagation.
6de9cd9a 1152
6de9cd9a
DN
1153 This pass may expose new symbols that need to be renamed into SSA. For
1154 every new symbol exposed, its corresponding bit will be set in
ff2ad0f7 1155 VARS_TO_RENAME. */
6de9cd9a 1156
be55bfe6
TS
1157namespace {
1158
1159const pass_data pass_data_dominator =
1160{
1161 GIMPLE_PASS, /* type */
1162 "dom", /* name */
1163 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
1164 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
1165 ( PROP_cfg | PROP_ssa ), /* properties_required */
1166 0, /* properties_provided */
1167 0, /* properties_destroyed */
1168 0, /* todo_flags_start */
3bea341f 1169 ( TODO_cleanup_cfg | TODO_update_ssa ), /* todo_flags_finish */
be55bfe6
TS
1170};
1171
1172class pass_dominator : public gimple_opt_pass
1173{
1174public:
1175 pass_dominator (gcc::context *ctxt)
1176 : gimple_opt_pass (pass_data_dominator, ctxt)
1177 {}
1178
1179 /* opt_pass methods: */
1180 opt_pass * clone () { return new pass_dominator (m_ctxt); }
1181 virtual bool gate (function *) { return flag_tree_dom != 0; }
1182 virtual unsigned int execute (function *);
1183
1184}; // class pass_dominator
1185
1186unsigned int
1187pass_dominator::execute (function *fun)
6de9cd9a 1188{
fded8de7
DN
1189 memset (&opt_stats, 0, sizeof (opt_stats));
1190
6de9cd9a 1191 /* Create our hash tables. */
c203e8a7 1192 avail_exprs = new hash_table<expr_elt_hasher> (1024);
9771b263 1193 avail_exprs_stack.create (20);
f6c72af4 1194 const_and_copies = new class const_and_copies (dump_file, dump_flags);
8bdbfff5 1195 need_eh_cleanup = BITMAP_ALLOC (NULL);
2a5671ee 1196 need_noreturn_fixup.create (0);
6de9cd9a 1197
6de9cd9a 1198 calculate_dominance_info (CDI_DOMINATORS);
8d9d6561 1199 cfg_altered = false;
6de9cd9a 1200
b02b9b53
ZD
1201 /* We need to know loop structures in order to avoid destroying them
1202 in jump threading. Note that we still can e.g. thread through loop
1203 headers to an exit edge, or through loop header to the loop body, assuming
a3afdbb8
BC
1204 that we update the loop info.
1205
1206 TODO: We don't need to set LOOPS_HAVE_PREHEADERS generally, but due
1207 to several overly conservative bail-outs in jump threading, case
1208 gcc.dg/tree-ssa/pr21417.c can't be threaded if loop preheader is
1209 missing. We should improve jump threading in future then
1210 LOOPS_HAVE_PREHEADERS won't be needed here. */
1211 loop_optimizer_init (LOOPS_HAVE_PREHEADERS | LOOPS_HAVE_SIMPLE_LATCHES);
d38ffc55 1212
448ee662
RG
1213 /* Initialize the value-handle array. */
1214 threadedge_initialize_values ();
1215
2090d6a0 1216 /* We need accurate information regarding back edges in the CFG
fa10beec 1217 for jump threading; this may include back edges that are not part of
b02b9b53 1218 a single loop. */
2090d6a0 1219 mark_dfs_back_edges ();
b8698a0f 1220
2090d6a0 1221 /* Recursively walk the dominator tree optimizing statements. */
be55bfe6 1222 dom_opt_dom_walker (CDI_DOMINATORS).walk (fun->cfg->x_entry_block_ptr);
6de9cd9a 1223
2090d6a0 1224 {
726a989a 1225 gimple_stmt_iterator gsi;
2090d6a0 1226 basic_block bb;
be55bfe6 1227 FOR_EACH_BB_FN (bb, fun)
a881aaa7
RG
1228 {
1229 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
726a989a 1230 update_stmt_if_modified (gsi_stmt (gsi));
f430bae8 1231 }
2090d6a0 1232 }
a3b609df 1233
2090d6a0
JL
1234 /* If we exposed any new variables, go ahead and put them into
1235 SSA form now, before we handle jump threading. This simplifies
1236 interactions between rewriting of _DECL nodes into SSA form
1237 and rewriting SSA_NAME nodes into SSA form after block
1238 duplication and CFG manipulation. */
1239 update_ssa (TODO_update_ssa);
d38ffc55 1240
2090d6a0 1241 free_all_edge_infos ();
d38ffc55 1242
2090d6a0 1243 /* Thread jumps, creating duplicate blocks as needed. */
b02b9b53 1244 cfg_altered |= thread_through_all_blocks (first_pass_instance);
6de9cd9a 1245
8d9d6561
EB
1246 if (cfg_altered)
1247 free_dominance_info (CDI_DOMINATORS);
1248
2090d6a0
JL
1249 /* Removal of statements may make some EH edges dead. Purge
1250 such edges from the CFG as needed. */
1251 if (!bitmap_empty_p (need_eh_cleanup))
1252 {
45a7844f
EB
1253 unsigned i;
1254 bitmap_iterator bi;
1255
1256 /* Jump threading may have created forwarder blocks from blocks
1257 needing EH cleanup; the new successor of these blocks, which
1ce296cf
JJ
1258 has inherited from the original block, needs the cleanup.
1259 Don't clear bits in the bitmap, as that can break the bitmap
1260 iterator. */
45a7844f
EB
1261 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
1262 {
be55bfe6 1263 basic_block bb = BASIC_BLOCK_FOR_FN (fun, i);
1ce296cf
JJ
1264 if (bb == NULL)
1265 continue;
1266 while (single_succ_p (bb)
1267 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
1268 bb = single_succ (bb);
be55bfe6 1269 if (bb == EXIT_BLOCK_PTR_FOR_FN (fun))
1ce296cf
JJ
1270 continue;
1271 if ((unsigned) bb->index != i)
1272 bitmap_set_bit (need_eh_cleanup, bb->index);
45a7844f
EB
1273 }
1274
726a989a 1275 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
f61e445a 1276 bitmap_clear (need_eh_cleanup);
2090d6a0 1277 }
6de9cd9a 1278
2a5671ee
RB
1279 /* Fixup stmts that became noreturn calls. This may require splitting
1280 blocks and thus isn't possible during the dominator walk or before
1281 jump threading finished. Do this in reverse order so we don't
1282 inadvertedly remove a stmt we want to fixup by visiting a dominating
1283 now noreturn call first. */
1284 while (!need_noreturn_fixup.is_empty ())
1285 {
1286 gimple stmt = need_noreturn_fixup.pop ();
1287 if (dump_file && dump_flags & TDF_DETAILS)
1288 {
1289 fprintf (dump_file, "Fixing up noreturn call ");
1290 print_gimple_stmt (dump_file, stmt, 0, 0);
1291 fprintf (dump_file, "\n");
1292 }
1293 fixup_noreturn_call (stmt);
1294 }
1295
be55bfe6 1296 statistics_counter_event (fun, "Redundant expressions eliminated",
01902653 1297 opt_stats.num_re);
be55bfe6 1298 statistics_counter_event (fun, "Constants propagated",
01902653 1299 opt_stats.num_const_prop);
be55bfe6 1300 statistics_counter_event (fun, "Copies propagated",
01902653
RG
1301 opt_stats.num_copy_prop);
1302
6de9cd9a
DN
1303 /* Debugging dumps. */
1304 if (dump_file && (dump_flags & TDF_STATS))
1305 dump_dominator_optimization_stats (dump_file);
1306
b02b9b53
ZD
1307 loop_optimizer_finalize ();
1308
2090d6a0 1309 /* Delete our main hashtable. */
c203e8a7
TS
1310 delete avail_exprs;
1311 avail_exprs = NULL;
6de9cd9a 1312
b16caf72 1313 /* Free asserted bitmaps and stacks. */
8bdbfff5 1314 BITMAP_FREE (need_eh_cleanup);
2a5671ee 1315 need_noreturn_fixup.release ();
9771b263 1316 avail_exprs_stack.release ();
f6c72af4 1317 delete const_and_copies;
b8698a0f 1318
448ee662
RG
1319 /* Free the value-handle array. */
1320 threadedge_finalize_values ();
448ee662 1321
c2924966 1322 return 0;
6de9cd9a
DN
1323}
1324
27a4cd48
DM
1325} // anon namespace
1326
1327gimple_opt_pass *
1328make_pass_dominator (gcc::context *ctxt)
1329{
1330 return new pass_dominator (ctxt);
1331}
1332
6de9cd9a 1333
726a989a
RB
1334/* Given a conditional statement CONDSTMT, convert the
1335 condition to a canonical form. */
0e0ed594
JL
1336
1337static void
538dd0b7 1338canonicalize_comparison (gcond *condstmt)
0e0ed594 1339{
0e0ed594
JL
1340 tree op0;
1341 tree op1;
726a989a 1342 enum tree_code code;
0e0ed594 1343
726a989a 1344 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
0e0ed594 1345
726a989a
RB
1346 op0 = gimple_cond_lhs (condstmt);
1347 op1 = gimple_cond_rhs (condstmt);
1348
1349 code = gimple_cond_code (condstmt);
0e0ed594
JL
1350
1351 /* If it would be profitable to swap the operands, then do so to
1352 canonicalize the statement, enabling better optimization.
1353
1354 By placing canonicalization of such expressions here we
1355 transparently keep statements in canonical form, even
1356 when the statement is modified. */
1357 if (tree_swap_operands_p (op0, op1, false))
1358 {
1359 /* For relationals we need to swap the operands
1360 and change the code. */
1361 if (code == LT_EXPR
1362 || code == GT_EXPR
1363 || code == LE_EXPR
1364 || code == GE_EXPR)
1365 {
726a989a
RB
1366 code = swap_tree_comparison (code);
1367
1368 gimple_cond_set_code (condstmt, code);
1369 gimple_cond_set_lhs (condstmt, op1);
1370 gimple_cond_set_rhs (condstmt, op0);
1371
1372 update_stmt (condstmt);
0e0ed594
JL
1373 }
1374 }
1375}
6de9cd9a 1376
6de9cd9a
DN
1377/* Initialize local stacks for this optimizer and record equivalences
1378 upon entry to BB. Equivalences can come from the edge traversed to
1379 reach BB or they may come from PHI nodes at the start of BB. */
1380
6de9cd9a
DN
1381/* Remove all the expressions in LOCALS from TABLE, stopping when there are
1382 LIMIT entries left in LOCALs. */
1383
1384static void
48732f23 1385remove_local_expressions_from_table (void)
6de9cd9a 1386{
6de9cd9a 1387 /* Remove all the expressions made available in this block. */
9771b263 1388 while (avail_exprs_stack.length () > 0)
6de9cd9a 1389 {
b00734df
RB
1390 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim
1391 = avail_exprs_stack.pop ();
4a8fb1a1 1392 expr_hash_elt **slot;
48732f23 1393
b00734df 1394 if (victim.first == NULL)
48732f23 1395 break;
6de9cd9a 1396
726a989a
RB
1397 /* This must precede the actual removal from the hash table,
1398 as ELEMENT and the table entry may share a call argument
1399 vector which will be freed during removal. */
1400 if (dump_file && (dump_flags & TDF_DETAILS))
1401 {
1402 fprintf (dump_file, "<<<< ");
b00734df 1403 print_expr_hash_elt (dump_file, victim.first);
726a989a
RB
1404 }
1405
b00734df
RB
1406 slot = avail_exprs->find_slot (victim.first, NO_INSERT);
1407 gcc_assert (slot && *slot == victim.first);
1408 if (victim.second != NULL)
1409 {
1410 free_expr_hash_elt (*slot);
1411 *slot = victim.second;
1412 }
1413 else
1414 avail_exprs->clear_slot (slot);
6de9cd9a
DN
1415 }
1416}
1417
2090d6a0
JL
1418/* A trivial wrapper so that we can present the generic jump
1419 threading code with a simple API for simplifying statements. */
1420static tree
726a989a
RB
1421simplify_stmt_for_jump_threading (gimple stmt,
1422 gimple within_stmt ATTRIBUTE_UNUSED)
2090d6a0
JL
1423{
1424 return lookup_avail_expr (stmt, false);
1425}
1426
925f3871
JL
1427/* Record into the equivalence tables any equivalences implied by
1428 traversing edge E (which are cached in E->aux).
1429
1430 Callers are responsible for managing the unwinding markers. */
83ae86f5
JL
1431static void
1432record_temporary_equivalences (edge e)
1433{
1434 int i;
1435 struct edge_info *edge_info = (struct edge_info *) e->aux;
1436
1437 /* If we have info associated with this edge, record it into
1438 our equivalence tables. */
1439 if (edge_info)
1440 {
1441 cond_equivalence *eq;
1442 tree lhs = edge_info->lhs;
1443 tree rhs = edge_info->rhs;
1444
1445 /* If we have a simple NAME = VALUE equivalence, record it. */
1446 if (lhs && TREE_CODE (lhs) == SSA_NAME)
f6c72af4 1447 const_and_copies->record_const_or_copy (lhs, rhs);
83ae86f5
JL
1448
1449 /* If we have 0 = COND or 1 = COND equivalences, record them
1450 into our expression hash tables. */
1451 for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
1452 record_cond (eq);
1453 }
1454}
1455
2090d6a0
JL
1456/* Wrapper for common code to attempt to thread an edge. For example,
1457 it handles lazily building the dummy condition and the bookkeeping
1458 when jump threading is successful. */
1459
4d9192b5
TS
1460void
1461dom_opt_dom_walker::thread_across_edge (edge e)
2090d6a0 1462{
65d3284b
RS
1463 if (! m_dummy_cond)
1464 m_dummy_cond =
726a989a
RB
1465 gimple_build_cond (NE_EXPR,
1466 integer_zero_node, integer_zero_node,
1467 NULL, NULL);
2090d6a0 1468
83ae86f5
JL
1469 /* Push a marker on both stacks so we can unwind the tables back to their
1470 current state. */
b00734df
RB
1471 avail_exprs_stack.safe_push
1472 (std::pair<expr_hash_elt_t, expr_hash_elt_t> (NULL, NULL));
f6c72af4 1473 const_and_copies->push_marker ();
83ae86f5
JL
1474
1475 /* Traversing E may result in equivalences we can utilize. */
1476 record_temporary_equivalences (e);
1477
1478 /* With all the edge equivalences in the tables, go ahead and attempt
1479 to thread through E->dest. */
65d3284b 1480 ::thread_across_edge (m_dummy_cond, e, false,
f6c72af4 1481 const_and_copies,
4d9192b5 1482 simplify_stmt_for_jump_threading);
83ae86f5
JL
1483
1484 /* And restore the various tables to their state before
1485 we threaded this edge.
1486
1487 XXX The code in tree-ssa-threadedge.c will restore the state of
1488 the const_and_copies table. We we just have to restore the expression
1489 table. */
1490 remove_local_expressions_from_table ();
2090d6a0
JL
1491}
1492
6de9cd9a
DN
1493/* PHI nodes can create equivalences too.
1494
1495 Ignoring any alternatives which are the same as the result, if
1496 all the alternatives are equal, then the PHI node creates an
b16caf72 1497 equivalence. */
dd747311 1498
6de9cd9a 1499static void
efea75f9 1500record_equivalences_from_phis (basic_block bb)
6de9cd9a 1501{
538dd0b7 1502 gphi_iterator gsi;
b8698a0f 1503
726a989a 1504 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6de9cd9a 1505 {
538dd0b7 1506 gphi *phi = gsi.phi ();
726a989a
RB
1507
1508 tree lhs = gimple_phi_result (phi);
6de9cd9a 1509 tree rhs = NULL;
726a989a 1510 size_t i;
6de9cd9a 1511
726a989a 1512 for (i = 0; i < gimple_phi_num_args (phi); i++)
6de9cd9a 1513 {
726a989a 1514 tree t = gimple_phi_arg_def (phi, i);
6de9cd9a 1515
6e38fea3
KH
1516 /* Ignore alternatives which are the same as our LHS. Since
1517 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
1518 can simply compare pointers. */
073b8140 1519 if (lhs == t)
a18428f3
KH
1520 continue;
1521
1522 /* If we have not processed an alternative yet, then set
1523 RHS to this alternative. */
1524 if (rhs == NULL)
1525 rhs = t;
1526 /* If we have processed an alternative (stored in RHS), then
1527 see if it is equal to this one. If it isn't, then stop
1528 the search. */
1529 else if (! operand_equal_for_phi_arg_p (rhs, t))
6de9cd9a
DN
1530 break;
1531 }
1532
1533 /* If we had no interesting alternatives, then all the RHS alternatives
1534 must have been the same as LHS. */
1535 if (!rhs)
1536 rhs = lhs;
1537
1538 /* If we managed to iterate through each PHI alternative without
1539 breaking out of the loop, then we have a PHI which may create
1540 a useful equivalence. We do not need to record unwind data for
1541 this, since this is a true assignment and not an equivalence
1ea7e6ad 1542 inferred from a comparison. All uses of this ssa name are dominated
6de9cd9a 1543 by this assignment, so unwinding just costs time and space. */
73c7d6bc 1544 if (i == gimple_phi_num_args (phi)
a59d8e8e 1545 && may_propagate_copy (lhs, rhs))
448ee662 1546 set_ssa_name_value (lhs, rhs);
6de9cd9a
DN
1547 }
1548}
1549
28c008bb
JL
1550/* Ignoring loop backedges, if BB has precisely one incoming edge then
1551 return that edge. Otherwise return NULL. */
1552static edge
1553single_incoming_edge_ignoring_loop_edges (basic_block bb)
1554{
1555 edge retval = NULL;
1556 edge e;
628f6a4e 1557 edge_iterator ei;
28c008bb 1558
628f6a4e 1559 FOR_EACH_EDGE (e, ei, bb->preds)
28c008bb
JL
1560 {
1561 /* A loop back edge can be identified by the destination of
1562 the edge dominating the source of the edge. */
1563 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1564 continue;
1565
1566 /* If we have already seen a non-loop edge, then we must have
1567 multiple incoming non-loop edges and thus we return NULL. */
1568 if (retval)
1569 return NULL;
1570
1571 /* This is the first non-loop incoming edge we have found. Record
1572 it. */
1573 retval = e;
1574 }
1575
1576 return retval;
1577}
1578
6de9cd9a
DN
1579/* Record any equivalences created by the incoming edge to BB. If BB
1580 has more than one incoming edge, then no equivalence is created. */
1581
1582static void
efea75f9 1583record_equivalences_from_incoming_edge (basic_block bb)
6de9cd9a 1584{
efea75f9 1585 edge e;
6de9cd9a 1586 basic_block parent;
efea75f9 1587 struct edge_info *edge_info;
6de9cd9a 1588
35fd3193 1589 /* If our parent block ended with a control statement, then we may be
6de9cd9a
DN
1590 able to record some equivalences based on which outgoing edge from
1591 the parent was followed. */
1592 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
6de9cd9a 1593
efea75f9 1594 e = single_incoming_edge_ignoring_loop_edges (bb);
6de9cd9a 1595
efea75f9
JL
1596 /* If we had a single incoming edge from our parent block, then enter
1597 any data associated with the edge into our tables. */
1598 if (e && e->src == parent)
6de9cd9a 1599 {
efea75f9 1600 unsigned int i;
6de9cd9a 1601
e1111e8e 1602 edge_info = (struct edge_info *) e->aux;
6de9cd9a 1603
efea75f9 1604 if (edge_info)
6de9cd9a 1605 {
efea75f9
JL
1606 tree lhs = edge_info->lhs;
1607 tree rhs = edge_info->rhs;
fd4a760e 1608 cond_equivalence *eq;
efea75f9
JL
1609
1610 if (lhs)
1611 record_equality (lhs, rhs);
1612
39e45653
JL
1613 /* If LHS is an SSA_NAME and RHS is a constant integer and LHS was
1614 set via a widening type conversion, then we may be able to record
4f1f78b9
JL
1615 additional equivalences. */
1616 if (lhs
1617 && TREE_CODE (lhs) == SSA_NAME
39e45653
JL
1618 && is_gimple_constant (rhs)
1619 && TREE_CODE (rhs) == INTEGER_CST)
4f1f78b9
JL
1620 {
1621 gimple defstmt = SSA_NAME_DEF_STMT (lhs);
1622
1623 if (defstmt
1624 && is_gimple_assign (defstmt)
1625 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (defstmt)))
1626 {
1627 tree old_rhs = gimple_assign_rhs1 (defstmt);
39e45653 1628
be672e08
JL
1629 /* If the conversion widens the original value and
1630 the constant is in the range of the type of OLD_RHS,
8d34e421 1631 then convert the constant and record the equivalence.
be672e08
JL
1632
1633 Note that int_fits_type_p does not check the precision
1634 if the upper and lower bounds are OK. */
dd884525 1635 if (INTEGRAL_TYPE_P (TREE_TYPE (old_rhs))
be672e08
JL
1636 && (TYPE_PRECISION (TREE_TYPE (lhs))
1637 > TYPE_PRECISION (TREE_TYPE (old_rhs)))
dd884525 1638 && int_fits_type_p (rhs, TREE_TYPE (old_rhs)))
39e45653
JL
1639 {
1640 tree newval = fold_convert (TREE_TYPE (old_rhs), rhs);
1641 record_equality (old_rhs, newval);
1642 }
4f1f78b9
JL
1643 }
1644 }
1645
9771b263 1646 for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
fd4a760e 1647 record_cond (eq);
6de9cd9a
DN
1648 }
1649 }
6de9cd9a
DN
1650}
1651
1652/* Dump SSA statistics on FILE. */
1653
1654void
1655dump_dominator_optimization_stats (FILE *file)
1656{
6de9cd9a
DN
1657 fprintf (file, "Total number of statements: %6ld\n\n",
1658 opt_stats.num_stmts);
1659 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1660 opt_stats.num_exprs_considered);
1661
6de9cd9a
DN
1662 fprintf (file, "\nHash table statistics:\n");
1663
1664 fprintf (file, " avail_exprs: ");
c203e8a7 1665 htab_statistics (file, *avail_exprs);
6de9cd9a
DN
1666}
1667
1668
1669/* Dump SSA statistics on stderr. */
1670
24e47c76 1671DEBUG_FUNCTION void
6de9cd9a
DN
1672debug_dominator_optimization_stats (void)
1673{
1674 dump_dominator_optimization_stats (stderr);
1675}
1676
1677
20e38fcf 1678/* Dump statistics for the hash table HTAB. */
d2d8936f 1679
20e38fcf
JL
1680static void
1681htab_statistics (FILE *file, const hash_table<expr_elt_hasher> &htab)
1682{
1683 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1684 (long) htab.size (),
1685 (long) htab.elements (),
1686 htab.collisions ());
1687}
d2d8936f 1688
d2d8936f 1689
20e38fcf
JL
1690/* Enter condition equivalence into the expression hash table.
1691 This indicates that a conditional expression has a known
1692 boolean value. */
efea75f9 1693
20e38fcf
JL
1694static void
1695record_cond (cond_equivalence *p)
1696{
1697 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1698 expr_hash_elt **slot;
726a989a 1699
20e38fcf
JL
1700 initialize_hash_element_from_expr (&p->cond, p->value, element);
1701
1702 slot = avail_exprs->find_slot_with_hash (element, element->hash, INSERT);
1703 if (*slot == NULL)
1704 {
1705 *slot = element;
1706
1707 if (dump_file && (dump_flags & TDF_DETAILS))
1708 {
1709 fprintf (dump_file, "1>>> ");
1710 print_expr_hash_elt (dump_file, element);
1711 }
1712
1713 avail_exprs_stack.safe_push
1714 (std::pair<expr_hash_elt_t, expr_hash_elt_t> (element, NULL));
1715 }
1716 else
1717 free_expr_hash_elt (element);
d2d8936f
JL
1718}
1719
406bfdd3
RB
1720/* Return the loop depth of the basic block of the defining statement of X.
1721 This number should not be treated as absolutely correct because the loop
1722 information may not be completely up-to-date when dom runs. However, it
1723 will be relatively correct, and as more passes are taught to keep loop info
1724 up to date, the result will become more and more accurate. */
1725
1726static int
1727loop_depth_of_name (tree x)
1728{
1729 gimple defstmt;
1730 basic_block defbb;
1731
1732 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1733 if (TREE_CODE (x) != SSA_NAME)
1734 return 0;
1735
1736 /* Otherwise return the loop depth of the defining statement's bb.
1737 Note that there may not actually be a bb for this statement, if the
1738 ssa_name is live on entry. */
1739 defstmt = SSA_NAME_DEF_STMT (x);
1740 defbb = gimple_bb (defstmt);
1741 if (!defbb)
1742 return 0;
1743
1744 return bb_loop_depth (defbb);
1745}
1746
6de9cd9a
DN
1747/* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1748 This constrains the cases in which we may treat this as assignment. */
1749
1750static void
b5fefcf6 1751record_equality (tree x, tree y)
6de9cd9a
DN
1752{
1753 tree prev_x = NULL, prev_y = NULL;
1754
1755 if (TREE_CODE (x) == SSA_NAME)
3aecd08b 1756 prev_x = SSA_NAME_VALUE (x);
6de9cd9a 1757 if (TREE_CODE (y) == SSA_NAME)
3aecd08b 1758 prev_y = SSA_NAME_VALUE (y);
6de9cd9a 1759
84dd478f
DB
1760 /* If one of the previous values is invariant, or invariant in more loops
1761 (by depth), then use that.
6de9cd9a
DN
1762 Otherwise it doesn't matter which value we choose, just so
1763 long as we canonicalize on one value. */
ad6003f2 1764 if (is_gimple_min_invariant (y))
6de9cd9a 1765 ;
406bfdd3
RB
1766 else if (is_gimple_min_invariant (x)
1767 /* ??? When threading over backedges the following is important
1768 for correctness. See PR61757. */
1769 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
6de9cd9a 1770 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
ad6003f2 1771 else if (prev_x && is_gimple_min_invariant (prev_x))
6de9cd9a 1772 x = y, y = prev_x, prev_x = prev_y;
c9145754 1773 else if (prev_y)
6de9cd9a
DN
1774 y = prev_y;
1775
1776 /* After the swapping, we must have one SSA_NAME. */
1777 if (TREE_CODE (x) != SSA_NAME)
1778 return;
1779
1780 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1781 variable compared against zero. If we're honoring signed zeros,
1782 then we cannot record this value unless we know that the value is
1ea7e6ad 1783 nonzero. */
3d3dbadd 1784 if (HONOR_SIGNED_ZEROS (x)
6de9cd9a
DN
1785 && (TREE_CODE (y) != REAL_CST
1786 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1787 return;
1788
f6c72af4 1789 const_and_copies->record_const_or_copy (x, y, prev_x);
6de9cd9a
DN
1790}
1791
f67e783f
ZD
1792/* Returns true when STMT is a simple iv increment. It detects the
1793 following situation:
b8698a0f 1794
f67e783f
ZD
1795 i_1 = phi (..., i_2)
1796 i_2 = i_1 +/- ... */
1797
601f64e2 1798bool
726a989a 1799simple_iv_increment_p (gimple stmt)
f67e783f 1800{
601f64e2 1801 enum tree_code code;
726a989a
RB
1802 tree lhs, preinc;
1803 gimple phi;
1804 size_t i;
f67e783f 1805
726a989a 1806 if (gimple_code (stmt) != GIMPLE_ASSIGN)
f67e783f
ZD
1807 return false;
1808
726a989a 1809 lhs = gimple_assign_lhs (stmt);
f67e783f
ZD
1810 if (TREE_CODE (lhs) != SSA_NAME)
1811 return false;
1812
601f64e2
RG
1813 code = gimple_assign_rhs_code (stmt);
1814 if (code != PLUS_EXPR
1815 && code != MINUS_EXPR
1816 && code != POINTER_PLUS_EXPR)
f67e783f
ZD
1817 return false;
1818
726a989a 1819 preinc = gimple_assign_rhs1 (stmt);
f67e783f
ZD
1820 if (TREE_CODE (preinc) != SSA_NAME)
1821 return false;
1822
1823 phi = SSA_NAME_DEF_STMT (preinc);
726a989a 1824 if (gimple_code (phi) != GIMPLE_PHI)
f67e783f
ZD
1825 return false;
1826
726a989a
RB
1827 for (i = 0; i < gimple_phi_num_args (phi); i++)
1828 if (gimple_phi_arg_def (phi, i) == lhs)
f67e783f
ZD
1829 return true;
1830
1831 return false;
1832}
1833
ff2ad0f7 1834/* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
b8698a0f 1835 known value for that SSA_NAME (or NULL if no value is known).
ff2ad0f7 1836
b16caf72
JL
1837 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1838 successors of BB. */
ff2ad0f7
DN
1839
1840static void
b16caf72 1841cprop_into_successor_phis (basic_block bb)
ff2ad0f7
DN
1842{
1843 edge e;
628f6a4e 1844 edge_iterator ei;
ff2ad0f7 1845
628f6a4e 1846 FOR_EACH_EDGE (e, ei, bb->succs)
ff2ad0f7 1847 {
0492baf2 1848 int indx;
538dd0b7 1849 gphi_iterator gsi;
ff2ad0f7
DN
1850
1851 /* If this is an abnormal edge, then we do not want to copy propagate
1852 into the PHI alternative associated with this edge. */
1853 if (e->flags & EDGE_ABNORMAL)
1854 continue;
1855
726a989a
RB
1856 gsi = gsi_start_phis (e->dest);
1857 if (gsi_end_p (gsi))
ff2ad0f7
DN
1858 continue;
1859
8d34e421
JL
1860 /* We may have an equivalence associated with this edge. While
1861 we can not propagate it into non-dominated blocks, we can
1862 propagate them into PHIs in non-dominated blocks. */
1863
1864 /* Push the unwind marker so we can reset the const and copies
1865 table back to its original state after processing this edge. */
f6c72af4 1866 const_and_copies->push_marker ();
8d34e421
JL
1867
1868 /* Extract and record any simple NAME = VALUE equivalences.
1869
1870 Don't bother with [01] = COND equivalences, they're not useful
1871 here. */
1872 struct edge_info *edge_info = (struct edge_info *) e->aux;
1873 if (edge_info)
1874 {
1875 tree lhs = edge_info->lhs;
1876 tree rhs = edge_info->rhs;
1877
1878 if (lhs && TREE_CODE (lhs) == SSA_NAME)
f6c72af4 1879 const_and_copies->record_const_or_copy (lhs, rhs);
8d34e421
JL
1880 }
1881
0492baf2 1882 indx = e->dest_idx;
726a989a 1883 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
ff2ad0f7 1884 {
c22940cd 1885 tree new_val;
ff2ad0f7 1886 use_operand_p orig_p;
c22940cd 1887 tree orig_val;
538dd0b7 1888 gphi *phi = gsi.phi ();
ff2ad0f7 1889
ff2ad0f7
DN
1890 /* The alternative may be associated with a constant, so verify
1891 it is an SSA_NAME before doing anything with it. */
726a989a
RB
1892 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1893 orig_val = get_use_from_ptr (orig_p);
c22940cd 1894 if (TREE_CODE (orig_val) != SSA_NAME)
ff2ad0f7
DN
1895 continue;
1896
ff2ad0f7
DN
1897 /* If we have *ORIG_P in our constant/copy table, then replace
1898 ORIG_P with its value in our constant/copy table. */
c22940cd
TN
1899 new_val = SSA_NAME_VALUE (orig_val);
1900 if (new_val
1901 && new_val != orig_val
1902 && (TREE_CODE (new_val) == SSA_NAME
1903 || is_gimple_min_invariant (new_val))
1904 && may_propagate_copy (orig_val, new_val))
1905 propagate_value (orig_p, new_val);
ff2ad0f7 1906 }
8d34e421 1907
f6c72af4 1908 const_and_copies->pop_to_marker ();
ff2ad0f7
DN
1909 }
1910}
1911
4d9192b5
TS
1912void
1913dom_opt_dom_walker::before_dom_children (basic_block bb)
6de9cd9a 1914{
ccf5c864
PB
1915 gimple_stmt_iterator gsi;
1916
1917 if (dump_file && (dump_flags & TDF_DETAILS))
1918 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1919
1920 /* Push a marker on the stacks of local information so that we know how
1921 far to unwind when we finalize this block. */
b00734df
RB
1922 avail_exprs_stack.safe_push
1923 (std::pair<expr_hash_elt_t, expr_hash_elt_t> (NULL, NULL));
f6c72af4 1924 const_and_copies->push_marker ();
ccf5c864
PB
1925
1926 record_equivalences_from_incoming_edge (bb);
1927
1928 /* PHI nodes can create equivalences too. */
1929 record_equivalences_from_phis (bb);
1930
13a3e5b6
BS
1931 /* Create equivalences from redundant PHIs. PHIs are only truly
1932 redundant when they exist in the same block, so push another
1933 marker and unwind right afterwards. */
b00734df
RB
1934 avail_exprs_stack.safe_push
1935 (std::pair<expr_hash_elt_t, expr_hash_elt_t> (NULL, NULL));
13a3e5b6
BS
1936 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1937 eliminate_redundant_computations (&gsi);
1938 remove_local_expressions_from_table ();
1939
ccf5c864
PB
1940 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1941 optimize_stmt (bb, gsi);
1942
1943 /* Now prepare to process dominated blocks. */
efea75f9 1944 record_edge_info (bb);
b16caf72 1945 cprop_into_successor_phis (bb);
6de9cd9a
DN
1946}
1947
ccf5c864
PB
1948/* We have finished processing the dominator children of BB, perform
1949 any finalization actions in preparation for leaving this node in
1950 the dominator tree. */
1951
4d9192b5
TS
1952void
1953dom_opt_dom_walker::after_dom_children (basic_block bb)
ccf5c864
PB
1954{
1955 gimple last;
1956
1957 /* If we have an outgoing edge to a block with multiple incoming and
1958 outgoing edges, then we may be able to thread the edge, i.e., we
1959 may be able to statically determine which of the outgoing edges
1960 will be traversed when the incoming edge from BB is traversed. */
1961 if (single_succ_p (bb)
1962 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1963 && potentially_threadable_block (single_succ (bb)))
1964 {
4d9192b5 1965 thread_across_edge (single_succ_edge (bb));
ccf5c864
PB
1966 }
1967 else if ((last = last_stmt (bb))
1968 && gimple_code (last) == GIMPLE_COND
1969 && EDGE_COUNT (bb->succs) == 2
1970 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1971 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1972 {
1973 edge true_edge, false_edge;
1974
1975 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1976
1977 /* Only try to thread the edge if it reaches a target block with
1978 more than one predecessor and more than one successor. */
1979 if (potentially_threadable_block (true_edge->dest))
83ae86f5 1980 thread_across_edge (true_edge);
ccf5c864
PB
1981
1982 /* Similarly for the ELSE arm. */
1983 if (potentially_threadable_block (false_edge->dest))
83ae86f5 1984 thread_across_edge (false_edge);
ccf5c864 1985
ccf5c864
PB
1986 }
1987
83ae86f5 1988 /* These remove expressions local to BB from the tables. */
ccf5c864 1989 remove_local_expressions_from_table ();
f6c72af4 1990 const_and_copies->pop_to_marker ();
ccf5c864
PB
1991}
1992
6de9cd9a
DN
1993/* Search for redundant computations in STMT. If any are found, then
1994 replace them with the variable holding the result of the computation.
1995
1996 If safe, record this expression into the available expression hash
1997 table. */
1998
87c93592 1999static void
726a989a 2000eliminate_redundant_computations (gimple_stmt_iterator* gsi)
6de9cd9a 2001{
726a989a 2002 tree expr_type;
6de9cd9a 2003 tree cached_lhs;
13a3e5b6 2004 tree def;
726a989a 2005 bool insert = true;
726a989a 2006 bool assigns_var_p = false;
6de9cd9a 2007
726a989a
RB
2008 gimple stmt = gsi_stmt (*gsi);
2009
13a3e5b6
BS
2010 if (gimple_code (stmt) == GIMPLE_PHI)
2011 def = gimple_phi_result (stmt);
2012 else
2013 def = gimple_get_lhs (stmt);
6de9cd9a
DN
2014
2015 /* Certain expressions on the RHS can be optimized away, but can not
471854f8 2016 themselves be entered into the hash tables. */
ff88c5aa 2017 if (! def
6de9cd9a
DN
2018 || TREE_CODE (def) != SSA_NAME
2019 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
5006671f 2020 || gimple_vdef (stmt)
f67e783f
ZD
2021 /* Do not record equivalences for increments of ivs. This would create
2022 overlapping live ranges for a very questionable gain. */
2023 || simple_iv_increment_p (stmt))
6de9cd9a
DN
2024 insert = false;
2025
2026 /* Check if the expression has been computed before. */
48732f23 2027 cached_lhs = lookup_avail_expr (stmt, insert);
6de9cd9a 2028
6de9cd9a
DN
2029 opt_stats.num_exprs_considered++;
2030
726a989a
RB
2031 /* Get the type of the expression we are trying to optimize. */
2032 if (is_gimple_assign (stmt))
019b02f1 2033 {
726a989a
RB
2034 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
2035 assigns_var_p = true;
019b02f1 2036 }
726a989a
RB
2037 else if (gimple_code (stmt) == GIMPLE_COND)
2038 expr_type = boolean_type_node;
2039 else if (is_gimple_call (stmt))
019b02f1 2040 {
726a989a
RB
2041 gcc_assert (gimple_call_lhs (stmt));
2042 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
2043 assigns_var_p = true;
019b02f1 2044 }
538dd0b7
DM
2045 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
2046 expr_type = TREE_TYPE (gimple_switch_index (swtch_stmt));
13a3e5b6
BS
2047 else if (gimple_code (stmt) == GIMPLE_PHI)
2048 /* We can't propagate into a phi, so the logic below doesn't apply.
2049 Instead record an equivalence between the cached LHS and the
2050 PHI result of this statement, provided they are in the same block.
2051 This should be sufficient to kill the redundant phi. */
2052 {
2053 if (def && cached_lhs)
f6c72af4 2054 const_and_copies->record_const_or_copy (def, cached_lhs);
13a3e5b6
BS
2055 return;
2056 }
726a989a
RB
2057 else
2058 gcc_unreachable ();
2059
2060 if (!cached_lhs)
87c93592 2061 return;
6de9cd9a
DN
2062
2063 /* It is safe to ignore types here since we have already done
2064 type checking in the hashing and equality routines. In fact
2065 type checking here merely gets in the way of constant
2066 propagation. Also, make sure that it is safe to propagate
726a989a
RB
2067 CACHED_LHS into the expression in STMT. */
2068 if ((TREE_CODE (cached_lhs) != SSA_NAME
2069 && (assigns_var_p
2070 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
2071 || may_propagate_copy_into_stmt (stmt, cached_lhs))
2072 {
77a74ed7
NF
2073 gcc_checking_assert (TREE_CODE (cached_lhs) == SSA_NAME
2074 || is_gimple_min_invariant (cached_lhs));
726a989a 2075
6de9cd9a
DN
2076 if (dump_file && (dump_flags & TDF_DETAILS))
2077 {
2078 fprintf (dump_file, " Replaced redundant expr '");
726a989a 2079 print_gimple_expr (dump_file, stmt, 0, dump_flags);
6de9cd9a
DN
2080 fprintf (dump_file, "' with '");
2081 print_generic_expr (dump_file, cached_lhs, dump_flags);
726a989a 2082 fprintf (dump_file, "'\n");
6de9cd9a
DN
2083 }
2084
2085 opt_stats.num_re++;
b8698a0f 2086
726a989a
RB
2087 if (assigns_var_p
2088 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
2089 cached_lhs = fold_convert (expr_type, cached_lhs);
6de9cd9a 2090
726a989a
RB
2091 propagate_tree_value_into_stmt (gsi, cached_lhs);
2092
2093 /* Since it is always necessary to mark the result as modified,
2094 perhaps we should move this into propagate_tree_value_into_stmt
2095 itself. */
2096 gimple_set_modified (gsi_stmt (*gsi), true);
2097 }
6de9cd9a
DN
2098}
2099
726a989a 2100/* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
6de9cd9a
DN
2101 the available expressions table or the const_and_copies table.
2102 Detect and record those equivalences. */
726a989a
RB
2103/* We handle only very simple copy equivalences here. The heavy
2104 lifing is done by eliminate_redundant_computations. */
6de9cd9a
DN
2105
2106static void
726a989a 2107record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
6de9cd9a 2108{
726a989a
RB
2109 tree lhs;
2110 enum tree_code lhs_code;
6de9cd9a 2111
726a989a
RB
2112 gcc_assert (is_gimple_assign (stmt));
2113
2114 lhs = gimple_assign_lhs (stmt);
2115 lhs_code = TREE_CODE (lhs);
6de9cd9a 2116
726a989a 2117 if (lhs_code == SSA_NAME
7e673273 2118 && gimple_assign_single_p (stmt))
726a989a
RB
2119 {
2120 tree rhs = gimple_assign_rhs1 (stmt);
b8698a0f 2121
6de9cd9a
DN
2122 /* If the RHS of the assignment is a constant or another variable that
2123 may be propagated, register it in the CONST_AND_COPIES table. We
2124 do not need to record unwind data for this, since this is a true
1ea7e6ad 2125 assignment and not an equivalence inferred from a comparison. All
6de9cd9a
DN
2126 uses of this ssa name are dominated by this assignment, so unwinding
2127 just costs time and space. */
2128 if (may_optimize_p
2129 && (TREE_CODE (rhs) == SSA_NAME
2130 || is_gimple_min_invariant (rhs)))
726a989a
RB
2131 {
2132 if (dump_file && (dump_flags & TDF_DETAILS))
2133 {
2134 fprintf (dump_file, "==== ASGN ");
2135 print_generic_expr (dump_file, lhs, 0);
2136 fprintf (dump_file, " = ");
2137 print_generic_expr (dump_file, rhs, 0);
2138 fprintf (dump_file, "\n");
2139 }
2140
448ee662 2141 set_ssa_name_value (lhs, rhs);
726a989a 2142 }
6de9cd9a
DN
2143 }
2144
b00734df
RB
2145 /* Make sure we can propagate &x + CST. */
2146 if (lhs_code == SSA_NAME
2147 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2148 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR
2149 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
2150 {
2151 tree op0 = gimple_assign_rhs1 (stmt);
2152 tree op1 = gimple_assign_rhs2 (stmt);
2153 tree new_rhs
2154 = build_fold_addr_expr (fold_build2 (MEM_REF,
2155 TREE_TYPE (TREE_TYPE (op0)),
2156 unshare_expr (op0),
2157 fold_convert (ptr_type_node,
2158 op1)));
2159 if (dump_file && (dump_flags & TDF_DETAILS))
2160 {
2161 fprintf (dump_file, "==== ASGN ");
2162 print_generic_expr (dump_file, lhs, 0);
2163 fprintf (dump_file, " = ");
2164 print_generic_expr (dump_file, new_rhs, 0);
2165 fprintf (dump_file, "\n");
2166 }
2167
2168 set_ssa_name_value (lhs, new_rhs);
2169 }
2170
6de9cd9a
DN
2171 /* A memory store, even an aliased store, creates a useful
2172 equivalence. By exchanging the LHS and RHS, creating suitable
2173 vops and recording the result in the available expression table,
2174 we may be able to expose more redundant loads. */
726a989a
RB
2175 if (!gimple_has_volatile_ops (stmt)
2176 && gimple_references_memory_p (stmt)
2177 && gimple_assign_single_p (stmt)
2178 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2179 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
6de9cd9a
DN
2180 && !is_gimple_reg (lhs))
2181 {
726a989a 2182 tree rhs = gimple_assign_rhs1 (stmt);
538dd0b7 2183 gassign *new_stmt;
6de9cd9a 2184
cf3135aa 2185 /* Build a new statement with the RHS and LHS exchanged. */
726a989a
RB
2186 if (TREE_CODE (rhs) == SSA_NAME)
2187 {
2188 /* NOTE tuples. The call to gimple_build_assign below replaced
2189 a call to build_gimple_modify_stmt, which did not set the
2190 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
2191 may cause an SSA validation failure, as the LHS may be a
2192 default-initialized name and should have no definition. I'm
2193 a bit dubious of this, as the artificial statement that we
2194 generate here may in fact be ill-formed, but it is simply
2195 used as an internal device in this pass, and never becomes
2196 part of the CFG. */
2197 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2198 new_stmt = gimple_build_assign (rhs, lhs);
2199 SSA_NAME_DEF_STMT (rhs) = defstmt;
2200 }
2201 else
2202 new_stmt = gimple_build_assign (rhs, lhs);
2203
5006671f 2204 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
6de9cd9a 2205
cf3135aa
RG
2206 /* Finally enter the statement into the available expression
2207 table. */
2208 lookup_avail_expr (new_stmt, true);
6de9cd9a
DN
2209 }
2210}
2211
ff2ad0f7
DN
2212/* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2213 CONST_AND_COPIES. */
2214
87c93592 2215static void
726a989a 2216cprop_operand (gimple stmt, use_operand_p op_p)
ff2ad0f7 2217{
ff2ad0f7
DN
2218 tree val;
2219 tree op = USE_FROM_PTR (op_p);
2220
2221 /* If the operand has a known constant value or it is known to be a
2222 copy of some other variable, use the value or copy stored in
2223 CONST_AND_COPIES. */
3aecd08b 2224 val = SSA_NAME_VALUE (op);
c9145754 2225 if (val && val != op)
ff2ad0f7 2226 {
aa24864c 2227 /* Do not replace hard register operands in asm statements. */
726a989a 2228 if (gimple_code (stmt) == GIMPLE_ASM
aa24864c 2229 && !may_propagate_copy_into_asm (op))
87c93592 2230 return;
aa24864c 2231
ff2ad0f7
DN
2232 /* Certain operands are not allowed to be copy propagated due
2233 to their interaction with exception handling and some GCC
2234 extensions. */
66e8b99c 2235 if (!may_propagate_copy (op, val))
87c93592 2236 return;
66e8b99c 2237
6f423f4c
RB
2238 /* Do not propagate copies into BIVs.
2239 See PR23821 and PR62217 for how this can disturb IV and
2240 number of iteration analysis. */
2241 if (TREE_CODE (val) != INTEGER_CST)
2242 {
2243 gimple def = SSA_NAME_DEF_STMT (op);
2244 if (gimple_code (def) == GIMPLE_PHI
2245 && gimple_bb (def)->loop_father->header == gimple_bb (def))
2246 return;
2247 }
e9d85fa6 2248
ff2ad0f7
DN
2249 /* Dump details. */
2250 if (dump_file && (dump_flags & TDF_DETAILS))
2251 {
2252 fprintf (dump_file, " Replaced '");
2253 print_generic_expr (dump_file, op, dump_flags);
2254 fprintf (dump_file, "' with %s '",
2255 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2256 print_generic_expr (dump_file, val, dump_flags);
2257 fprintf (dump_file, "'\n");
2258 }
2259
0bca51f0
DN
2260 if (TREE_CODE (val) != SSA_NAME)
2261 opt_stats.num_const_prop++;
2262 else
2263 opt_stats.num_copy_prop++;
2264
ff2ad0f7
DN
2265 propagate_value (op_p, val);
2266
2267 /* And note that we modified this statement. This is now
2268 safe, even if we changed virtual operands since we will
2269 rescan the statement and rewrite its operands again. */
726a989a 2270 gimple_set_modified (stmt, true);
ff2ad0f7 2271 }
ff2ad0f7
DN
2272}
2273
2274/* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
b8698a0f 2275 known value for that SSA_NAME (or NULL if no value is known).
ff2ad0f7
DN
2276
2277 Propagate values from CONST_AND_COPIES into the uses, vuses and
cfaab3a9 2278 vdef_ops of STMT. */
ff2ad0f7 2279
87c93592 2280static void
726a989a 2281cprop_into_stmt (gimple stmt)
ff2ad0f7 2282{
4c124b4c
AM
2283 use_operand_p op_p;
2284 ssa_op_iter iter;
ff2ad0f7 2285
2a86de57
RB
2286 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_USE)
2287 cprop_operand (stmt, op_p);
ff2ad0f7
DN
2288}
2289
206048bd 2290/* Optimize the statement pointed to by iterator SI.
b8698a0f 2291
6de9cd9a
DN
2292 We try to perform some simplistic global redundancy elimination and
2293 constant propagation:
2294
2295 1- To detect global redundancy, we keep track of expressions that have
2296 been computed in this block and its dominators. If we find that the
2297 same expression is computed more than once, we eliminate repeated
2298 computations by using the target of the first one.
2299
2300 2- Constant values and copy assignments. This is used to do very
2301 simplistic constant and copy propagation. When a constant or copy
2302 assignment is found, we map the value on the RHS of the assignment to
2303 the variable in the LHS in the CONST_AND_COPIES table. */
2304
2305static void
ccf5c864 2306optimize_stmt (basic_block bb, gimple_stmt_iterator si)
6de9cd9a 2307{
726a989a 2308 gimple stmt, old_stmt;
6de9cd9a 2309 bool may_optimize_p;
c5cac099 2310 bool modified_p = false;
2a5671ee 2311 bool was_noreturn;
6de9cd9a 2312
726a989a 2313 old_stmt = stmt = gsi_stmt (si);
2a5671ee 2314 was_noreturn = is_gimple_call (stmt) && gimple_call_noreturn_p (stmt);
b8698a0f 2315
6de9cd9a
DN
2316 if (dump_file && (dump_flags & TDF_DETAILS))
2317 {
2318 fprintf (dump_file, "Optimizing statement ");
726a989a 2319 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6de9cd9a
DN
2320 }
2321
2a86de57 2322 if (gimple_code (stmt) == GIMPLE_COND)
538dd0b7 2323 canonicalize_comparison (as_a <gcond *> (stmt));
2a86de57
RB
2324
2325 update_stmt_if_modified (stmt);
2326 opt_stats.num_stmts++;
2327
cfaab3a9 2328 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
87c93592 2329 cprop_into_stmt (stmt);
6de9cd9a
DN
2330
2331 /* If the statement has been modified with constant replacements,
2332 fold its RHS before checking for redundant computations. */
726a989a 2333 if (gimple_modified_p (stmt))
6de9cd9a 2334 {
726a989a 2335 tree rhs = NULL;
6cedb4ac 2336
6de9cd9a
DN
2337 /* Try to fold the statement making sure that STMT is kept
2338 up to date. */
726a989a 2339 if (fold_stmt (&si))
6de9cd9a 2340 {
726a989a 2341 stmt = gsi_stmt (si);
076ba157 2342 gimple_set_modified (stmt, true);
6de9cd9a
DN
2343
2344 if (dump_file && (dump_flags & TDF_DETAILS))
2345 {
2346 fprintf (dump_file, " Folded to: ");
726a989a 2347 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6de9cd9a
DN
2348 }
2349 }
2350
726a989a
RB
2351 /* We only need to consider cases that can yield a gimple operand. */
2352 if (gimple_assign_single_p (stmt))
2353 rhs = gimple_assign_rhs1 (stmt);
2354 else if (gimple_code (stmt) == GIMPLE_GOTO)
2355 rhs = gimple_goto_dest (stmt);
538dd0b7 2356 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
726a989a 2357 /* This should never be an ADDR_EXPR. */
538dd0b7 2358 rhs = gimple_switch_index (swtch_stmt);
726a989a 2359
6cedb4ac 2360 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
726a989a 2361 recompute_tree_invariant_for_addr_expr (rhs);
6cedb4ac 2362
c5cac099
JJ
2363 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2364 even if fold_stmt updated the stmt already and thus cleared
2365 gimple_modified_p flag on it. */
2366 modified_p = true;
6de9cd9a
DN
2367 }
2368
2369 /* Check for redundant computations. Do this optimization only
2370 for assignments that have no volatile ops and conditionals. */
d829c408
RG
2371 may_optimize_p = (!gimple_has_side_effects (stmt)
2372 && (is_gimple_assign (stmt)
726a989a 2373 || (is_gimple_call (stmt)
d829c408 2374 && gimple_call_lhs (stmt) != NULL_TREE)
726a989a
RB
2375 || gimple_code (stmt) == GIMPLE_COND
2376 || gimple_code (stmt) == GIMPLE_SWITCH));
6de9cd9a
DN
2377
2378 if (may_optimize_p)
726a989a 2379 {
44e10129
MM
2380 if (gimple_code (stmt) == GIMPLE_CALL)
2381 {
2382 /* Resolve __builtin_constant_p. If it hasn't been
2383 folded to integer_one_node by now, it's fairly
2384 certain that the value simply isn't constant. */
2385 tree callee = gimple_call_fndecl (stmt);
2386 if (callee
2387 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2388 && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
2389 {
2390 propagate_tree_value_into_stmt (&si, integer_zero_node);
2391 stmt = gsi_stmt (si);
2392 }
2393 }
aabf6a03
RG
2394
2395 update_stmt_if_modified (stmt);
2396 eliminate_redundant_computations (&si);
2397 stmt = gsi_stmt (si);
565b8886
RG
2398
2399 /* Perform simple redundant store elimination. */
2400 if (gimple_assign_single_p (stmt)
2401 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
2402 {
2403 tree lhs = gimple_assign_lhs (stmt);
2404 tree rhs = gimple_assign_rhs1 (stmt);
2405 tree cached_lhs;
538dd0b7 2406 gassign *new_stmt;
565b8886
RG
2407 if (TREE_CODE (rhs) == SSA_NAME)
2408 {
2409 tree tem = SSA_NAME_VALUE (rhs);
2410 if (tem)
2411 rhs = tem;
2412 }
2413 /* Build a new statement with the RHS and LHS exchanged. */
2414 if (TREE_CODE (rhs) == SSA_NAME)
2415 {
2416 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2417 new_stmt = gimple_build_assign (rhs, lhs);
2418 SSA_NAME_DEF_STMT (rhs) = defstmt;
2419 }
2420 else
2421 new_stmt = gimple_build_assign (rhs, lhs);
2422 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2423 cached_lhs = lookup_avail_expr (new_stmt, false);
2424 if (cached_lhs
2425 && rhs == cached_lhs)
2426 {
2427 basic_block bb = gimple_bb (stmt);
565b8886 2428 unlink_stmt_vdef (stmt);
b5b3ec3e 2429 if (gsi_remove (&si, true))
565b8886
RG
2430 {
2431 bitmap_set_bit (need_eh_cleanup, bb->index);
2432 if (dump_file && (dump_flags & TDF_DETAILS))
2433 fprintf (dump_file, " Flagged to clear EH edges.\n");
2434 }
3d3f2249 2435 release_defs (stmt);
565b8886
RG
2436 return;
2437 }
2438 }
726a989a 2439 }
6de9cd9a
DN
2440
2441 /* Record any additional equivalences created by this statement. */
726a989a
RB
2442 if (is_gimple_assign (stmt))
2443 record_equivalences_from_stmt (stmt, may_optimize_p);
6de9cd9a 2444
6de9cd9a
DN
2445 /* If STMT is a COND_EXPR and it was modified, then we may know
2446 where it goes. If that is the case, then mark the CFG as altered.
2447
2448 This will cause us to later call remove_unreachable_blocks and
b8698a0f 2449 cleanup_tree_cfg when it is safe to do so. It is not safe to
6de9cd9a
DN
2450 clean things up here since removal of edges and such can trigger
2451 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2452 the manager.
2453
2454 That's all fine and good, except that once SSA_NAMEs are released
2455 to the manager, we must not call create_ssa_name until all references
2456 to released SSA_NAMEs have been eliminated.
2457
2458 All references to the deleted SSA_NAMEs can not be eliminated until
2459 we remove unreachable blocks.
2460
2461 We can not remove unreachable blocks until after we have completed
2462 any queued jump threading.
2463
2464 We can not complete any queued jump threads until we have taken
2465 appropriate variables out of SSA form. Taking variables out of
2466 SSA form can call create_ssa_name and thus we lose.
2467
2468 Ultimately I suspect we're going to need to change the interface
2469 into the SSA_NAME manager. */
c5cac099 2470 if (gimple_modified_p (stmt) || modified_p)
6de9cd9a
DN
2471 {
2472 tree val = NULL;
b8698a0f 2473
aabf6a03 2474 update_stmt_if_modified (stmt);
6de9cd9a 2475
726a989a 2476 if (gimple_code (stmt) == GIMPLE_COND)
db3927fb
AH
2477 val = fold_binary_loc (gimple_location (stmt),
2478 gimple_cond_code (stmt), boolean_type_node,
726a989a 2479 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
538dd0b7
DM
2480 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
2481 val = gimple_switch_index (swtch_stmt);
6de9cd9a 2482
1eaba2f2 2483 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
6de9cd9a 2484 cfg_altered = true;
1eaba2f2
RH
2485
2486 /* If we simplified a statement in such a way as to be shown that it
2487 cannot trap, update the eh information and the cfg to match. */
af47810a 2488 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1eaba2f2
RH
2489 {
2490 bitmap_set_bit (need_eh_cleanup, bb->index);
2491 if (dump_file && (dump_flags & TDF_DETAILS))
2492 fprintf (dump_file, " Flagged to clear EH edges.\n");
2493 }
2a5671ee
RB
2494
2495 if (!was_noreturn
2496 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2497 need_noreturn_fixup.safe_push (stmt);
6de9cd9a 2498 }
6de9cd9a
DN
2499}
2500
b00734df
RB
2501/* Helper for walk_non_aliased_vuses. Determine if we arrived at
2502 the desired memory state. */
2503
2504static void *
2505vuse_eq (ao_ref *, tree vuse1, unsigned int cnt, void *data)
2506{
2507 tree vuse2 = (tree) data;
2508 if (vuse1 == vuse2)
2509 return data;
2510
2511 /* This bounds the stmt walks we perform on reference lookups
2512 to O(1) instead of O(N) where N is the number of dominating
2513 stores leading to a candidate. We re-use the SCCVN param
2514 for this as it is basically the same complexity. */
2515 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
2516 return (void *)-1;
2517
2518 return NULL;
2519}
2520
726a989a
RB
2521/* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2522 If found, return its LHS. Otherwise insert STMT in the table and
2523 return NULL_TREE.
6de9cd9a 2524
726a989a
RB
2525 Also, when an expression is first inserted in the table, it is also
2526 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2527 we finish processing this block and its children. */
6de9cd9a
DN
2528
2529static tree
726a989a 2530lookup_avail_expr (gimple stmt, bool insert)
6de9cd9a 2531{
4a8fb1a1 2532 expr_hash_elt **slot;
6de9cd9a
DN
2533 tree lhs;
2534 tree temp;
a7d04a53 2535 struct expr_hash_elt element;
6de9cd9a 2536
13a3e5b6
BS
2537 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
2538 if (gimple_code (stmt) == GIMPLE_PHI)
2539 lhs = gimple_phi_result (stmt);
2540 else
2541 lhs = gimple_get_lhs (stmt);
6de9cd9a 2542
a7d04a53 2543 initialize_hash_element (stmt, lhs, &element);
6de9cd9a 2544
726a989a
RB
2545 if (dump_file && (dump_flags & TDF_DETAILS))
2546 {
2547 fprintf (dump_file, "LKUP ");
a7d04a53 2548 print_expr_hash_elt (dump_file, &element);
726a989a
RB
2549 }
2550
6de9cd9a
DN
2551 /* Don't bother remembering constant assignments and copy operations.
2552 Constants and copy operations are handled by the constant/copy propagator
2553 in optimize_stmt. */
a7d04a53
RG
2554 if (element.expr.kind == EXPR_SINGLE
2555 && (TREE_CODE (element.expr.ops.single.rhs) == SSA_NAME
2556 || is_gimple_min_invariant (element.expr.ops.single.rhs)))
2557 return NULL_TREE;
6de9cd9a 2558
6de9cd9a 2559 /* Finally try to find the expression in the main expression hash table. */
c203e8a7 2560 slot = avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
6de9cd9a 2561 if (slot == NULL)
b6db991c
RG
2562 {
2563 free_expr_hash_elt_contents (&element);
2564 return NULL_TREE;
2565 }
2566 else if (*slot == NULL)
6de9cd9a 2567 {
a7d04a53
RG
2568 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2569 *element2 = element;
2570 element2->stamp = element2;
4a8fb1a1 2571 *slot = element2;
726a989a
RB
2572
2573 if (dump_file && (dump_flags & TDF_DETAILS))
2574 {
2575 fprintf (dump_file, "2>>> ");
a7d04a53 2576 print_expr_hash_elt (dump_file, element2);
726a989a
RB
2577 }
2578
b00734df
RB
2579 avail_exprs_stack.safe_push
2580 (std::pair<expr_hash_elt_t, expr_hash_elt_t> (element2, NULL));
6de9cd9a
DN
2581 return NULL_TREE;
2582 }
b00734df
RB
2583
2584 /* If we found a redundant memory operation do an alias walk to
2585 check if we can re-use it. */
2586 if (gimple_vuse (stmt) != (*slot)->vop)
2587 {
2588 tree vuse1 = (*slot)->vop;
2589 tree vuse2 = gimple_vuse (stmt);
2590 /* If we have a load of a register and a candidate in the
2591 hash with vuse1 then try to reach its stmt by walking
2592 up the virtual use-def chain using walk_non_aliased_vuses.
2593 But don't do this when removing expressions from the hash. */
2594 ao_ref ref;
2595 if (!(vuse1 && vuse2
2596 && gimple_assign_single_p (stmt)
2597 && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
2598 && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)), true)
2599 && walk_non_aliased_vuses (&ref, vuse2,
92a5094e 2600 vuse_eq, NULL, NULL, vuse1) != NULL))
b00734df 2601 {
2f159d9f 2602 if (insert)
b00734df 2603 {
2f159d9f
JL
2604 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2605 *element2 = element;
2606 element2->stamp = element2;
2607
2608 /* Insert the expr into the hash by replacing the current
2609 entry and recording the value to restore in the
2610 avail_exprs_stack. */
2611 avail_exprs_stack.safe_push (std::make_pair (element2, *slot));
2612 *slot = element2;
2613 if (dump_file && (dump_flags & TDF_DETAILS))
2614 {
2615 fprintf (dump_file, "2>>> ");
2616 print_expr_hash_elt (dump_file, *slot);
2617 }
b00734df
RB
2618 }
2619 return NULL_TREE;
2620 }
2621 }
2622
2623 free_expr_hash_elt_contents (&element);
6de9cd9a
DN
2624
2625 /* Extract the LHS of the assignment so that it can be used as the current
2626 definition of another variable. */
b00734df 2627 lhs = (*slot)->lhs;
6de9cd9a
DN
2628
2629 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2630 use the value from the const_and_copies table. */
2631 if (TREE_CODE (lhs) == SSA_NAME)
2632 {
3aecd08b 2633 temp = SSA_NAME_VALUE (lhs);
c9145754 2634 if (temp)
6de9cd9a
DN
2635 lhs = temp;
2636 }
2637
726a989a
RB
2638 if (dump_file && (dump_flags & TDF_DETAILS))
2639 {
2640 fprintf (dump_file, "FIND: ");
2641 print_generic_expr (dump_file, lhs, 0);
2642 fprintf (dump_file, "\n");
2643 }
2644
6de9cd9a
DN
2645 return lhs;
2646}
2647
726a989a
RB
2648/* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2649 for expressions using the code of the expression and the SSA numbers of
2650 its operands. */
6de9cd9a
DN
2651
2652static hashval_t
2653avail_expr_hash (const void *p)
2654{
726a989a 2655 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
2bc10537 2656 inchash::hash hstate;
6de9cd9a 2657
2bc10537 2658 inchash::add_hashable_expr (expr, hstate);
6de9cd9a 2659
2bc10537 2660 return hstate.end ();
6de9cd9a
DN
2661}
2662
e67c25c7
JL
2663/* PHI-ONLY copy and constant propagation. This pass is meant to clean
2664 up degenerate PHIs created by or exposed by jump threading. */
2665
726a989a 2666/* Given a statement STMT, which is either a PHI node or an assignment,
e67c25c7
JL
2667 remove it from the IL. */
2668
2669static void
726a989a 2670remove_stmt_or_phi (gimple stmt)
e67c25c7 2671{
726a989a
RB
2672 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2673
2674 if (gimple_code (stmt) == GIMPLE_PHI)
2675 remove_phi_node (&gsi, true);
e67c25c7
JL
2676 else
2677 {
726a989a
RB
2678 gsi_remove (&gsi, true);
2679 release_defs (stmt);
e67c25c7
JL
2680 }
2681}
2682
726a989a 2683/* Given a statement STMT, which is either a PHI node or an assignment,
e67c25c7 2684 return the "rhs" of the node, in the case of a non-degenerate
726a989a 2685 phi, NULL is returned. */
e67c25c7
JL
2686
2687static tree
726a989a 2688get_rhs_or_phi_arg (gimple stmt)
e67c25c7 2689{
726a989a 2690 if (gimple_code (stmt) == GIMPLE_PHI)
538dd0b7 2691 return degenerate_phi_result (as_a <gphi *> (stmt));
726a989a
RB
2692 else if (gimple_assign_single_p (stmt))
2693 return gimple_assign_rhs1 (stmt);
2694 else
2695 gcc_unreachable ();
e67c25c7
JL
2696}
2697
2698
726a989a 2699/* Given a statement STMT, which is either a PHI node or an assignment,
e67c25c7
JL
2700 return the "lhs" of the node. */
2701
2702static tree
726a989a 2703get_lhs_or_phi_result (gimple stmt)
e67c25c7 2704{
726a989a
RB
2705 if (gimple_code (stmt) == GIMPLE_PHI)
2706 return gimple_phi_result (stmt);
2707 else if (is_gimple_assign (stmt))
2708 return gimple_assign_lhs (stmt);
2709 else
2710 gcc_unreachable ();
e67c25c7
JL
2711}
2712
2713/* Propagate RHS into all uses of LHS (when possible).
2714
2715 RHS and LHS are derived from STMT, which is passed in solely so
2716 that we can remove it if propagation is successful.
2717
2718 When propagating into a PHI node or into a statement which turns
2719 into a trivial copy or constant initialization, set the
2720 appropriate bit in INTERESTING_NAMEs so that we will visit those
2721 nodes as well in an effort to pick up secondary optimization
2722 opportunities. */
2723
b8698a0f 2724static void
726a989a 2725propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
e67c25c7 2726{
a59d8e8e 2727 /* First verify that propagation is valid. */
406bfdd3 2728 if (may_propagate_copy (lhs, rhs))
e67c25c7
JL
2729 {
2730 use_operand_p use_p;
2731 imm_use_iterator iter;
726a989a 2732 gimple use_stmt;
e67c25c7
JL
2733 bool all = true;
2734
2735 /* Dump details. */
2736 if (dump_file && (dump_flags & TDF_DETAILS))
2737 {
2738 fprintf (dump_file, " Replacing '");
2739 print_generic_expr (dump_file, lhs, dump_flags);
2740 fprintf (dump_file, "' with %s '",
2741 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2742 print_generic_expr (dump_file, rhs, dump_flags);
2743 fprintf (dump_file, "'\n");
2744 }
2745
b8698a0f 2746 /* Walk over every use of LHS and try to replace the use with RHS.
e67c25c7
JL
2747 At this point the only reason why such a propagation would not
2748 be successful would be if the use occurs in an ASM_EXPR. */
6c00f606 2749 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
e67c25c7 2750 {
b5b8b0ac
AO
2751 /* Leave debug stmts alone. If we succeed in propagating
2752 all non-debug uses, we'll drop the DEF, and propagation
2753 into debug stmts will occur then. */
2754 if (gimple_debug_bind_p (use_stmt))
2755 continue;
b8698a0f 2756
e67c25c7 2757 /* It's not always safe to propagate into an ASM_EXPR. */
726a989a
RB
2758 if (gimple_code (use_stmt) == GIMPLE_ASM
2759 && ! may_propagate_copy_into_asm (lhs))
e67c25c7
JL
2760 {
2761 all = false;
2762 continue;
2763 }
2764
720151ca
RG
2765 /* It's not ok to propagate into the definition stmt of RHS.
2766 <bb 9>:
2767 # prephitmp.12_36 = PHI <g_67.1_6(9)>
2768 g_67.1_6 = prephitmp.12_36;
2769 goto <bb 9>;
2770 While this is strictly all dead code we do not want to
2771 deal with this here. */
2772 if (TREE_CODE (rhs) == SSA_NAME
2773 && SSA_NAME_DEF_STMT (rhs) == use_stmt)
2774 {
2775 all = false;
2776 continue;
2777 }
2778
e67c25c7
JL
2779 /* Dump details. */
2780 if (dump_file && (dump_flags & TDF_DETAILS))
2781 {
2782 fprintf (dump_file, " Original statement:");
726a989a 2783 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
e67c25c7
JL
2784 }
2785
cbc75e62 2786 /* Propagate the RHS into this use of the LHS. */
6c00f606
AM
2787 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2788 propagate_value (use_p, rhs);
cbc75e62
JL
2789
2790 /* Special cases to avoid useless calls into the folding
2791 routines, operand scanning, etc.
2792
6b4a85ad 2793 Propagation into a PHI may cause the PHI to become
cbc75e62 2794 a degenerate, so mark the PHI as interesting. No other
6b4a85ad
RG
2795 actions are necessary. */
2796 if (gimple_code (use_stmt) == GIMPLE_PHI)
cbc75e62 2797 {
6b4a85ad
RG
2798 tree result;
2799
cbc75e62
JL
2800 /* Dump details. */
2801 if (dump_file && (dump_flags & TDF_DETAILS))
2802 {
2803 fprintf (dump_file, " Updated statement:");
726a989a 2804 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
cbc75e62
JL
2805 }
2806
6b4a85ad
RG
2807 result = get_lhs_or_phi_result (use_stmt);
2808 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
cbc75e62
JL
2809 continue;
2810 }
2811
b8698a0f 2812 /* From this point onward we are propagating into a
cbc75e62
JL
2813 real statement. Folding may (or may not) be possible,
2814 we may expose new operands, expose dead EH edges,
2815 etc. */
726a989a
RB
2816 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2817 cannot fold a call that simplifies to a constant,
2818 because the GIMPLE_CALL must be replaced by a
2819 GIMPLE_ASSIGN, and there is no way to effect such a
2820 transformation in-place. We might want to consider
2821 using the more general fold_stmt here. */
59401b92
RG
2822 {
2823 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
2824 fold_stmt_inplace (&gsi);
2825 }
3ae194cd
JL
2826
2827 /* Sometimes propagation can expose new operands to the
cff4e50d
PB
2828 renamer. */
2829 update_stmt (use_stmt);
e67c25c7
JL
2830
2831 /* Dump details. */
2832 if (dump_file && (dump_flags & TDF_DETAILS))
2833 {
2834 fprintf (dump_file, " Updated statement:");
726a989a 2835 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
e67c25c7
JL
2836 }
2837
e67c25c7
JL
2838 /* If we replaced a variable index with a constant, then
2839 we would need to update the invariant flag for ADDR_EXPRs. */
726a989a
RB
2840 if (gimple_assign_single_p (use_stmt)
2841 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
07beea0d 2842 recompute_tree_invariant_for_addr_expr
726a989a 2843 (gimple_assign_rhs1 (use_stmt));
e67c25c7
JL
2844
2845 /* If we cleaned up EH information from the statement,
72922229 2846 mark its containing block as needing EH cleanups. */
e67c25c7 2847 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
72922229 2848 {
726a989a 2849 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
72922229
JL
2850 if (dump_file && (dump_flags & TDF_DETAILS))
2851 fprintf (dump_file, " Flagged to clear EH edges.\n");
2852 }
e67c25c7 2853
cbc75e62
JL
2854 /* Propagation may expose new trivial copy/constant propagation
2855 opportunities. */
726a989a
RB
2856 if (gimple_assign_single_p (use_stmt)
2857 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2858 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2859 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2860 {
e67c25c7
JL
2861 tree result = get_lhs_or_phi_result (use_stmt);
2862 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2863 }
2864
2865 /* Propagation into these nodes may make certain edges in
2866 the CFG unexecutable. We want to identify them as PHI nodes
2867 at the destination of those unexecutable edges may become
2868 degenerates. */
726a989a
RB
2869 else if (gimple_code (use_stmt) == GIMPLE_COND
2870 || gimple_code (use_stmt) == GIMPLE_SWITCH
2871 || gimple_code (use_stmt) == GIMPLE_GOTO)
2872 {
e67c25c7
JL
2873 tree val;
2874
726a989a 2875 if (gimple_code (use_stmt) == GIMPLE_COND)
db3927fb
AH
2876 val = fold_binary_loc (gimple_location (use_stmt),
2877 gimple_cond_code (use_stmt),
726a989a
RB
2878 boolean_type_node,
2879 gimple_cond_lhs (use_stmt),
2880 gimple_cond_rhs (use_stmt));
2881 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
538dd0b7 2882 val = gimple_switch_index (as_a <gswitch *> (use_stmt));
e67c25c7 2883 else
726a989a 2884 val = gimple_goto_dest (use_stmt);
e67c25c7 2885
726a989a 2886 if (val && is_gimple_min_invariant (val))
e67c25c7 2887 {
726a989a 2888 basic_block bb = gimple_bb (use_stmt);
e67c25c7
JL
2889 edge te = find_taken_edge (bb, val);
2890 edge_iterator ei;
2891 edge e;
538dd0b7
DM
2892 gimple_stmt_iterator gsi;
2893 gphi_iterator psi;
e67c25c7
JL
2894
2895 /* Remove all outgoing edges except TE. */
2896 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2897 {
2898 if (e != te)
2899 {
e67c25c7
JL
2900 /* Mark all the PHI nodes at the destination of
2901 the unexecutable edge as interesting. */
726a989a
RB
2902 for (psi = gsi_start_phis (e->dest);
2903 !gsi_end_p (psi);
2904 gsi_next (&psi))
2905 {
538dd0b7 2906 gphi *phi = psi.phi ();
726a989a
RB
2907
2908 tree result = gimple_phi_result (phi);
e67c25c7
JL
2909 int version = SSA_NAME_VERSION (result);
2910
2911 bitmap_set_bit (interesting_names, version);
2912 }
2913
2914 te->probability += e->probability;
2915
2916 te->count += e->count;
2917 remove_edge (e);
8d9d6561 2918 cfg_altered = true;
e67c25c7
JL
2919 }
2920 else
2921 ei_next (&ei);
2922 }
2923
726a989a
RB
2924 gsi = gsi_last_bb (gimple_bb (use_stmt));
2925 gsi_remove (&gsi, true);
e67c25c7
JL
2926
2927 /* And fixup the flags on the single remaining edge. */
2928 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2929 te->flags &= ~EDGE_ABNORMAL;
2930 te->flags |= EDGE_FALLTHRU;
2931 if (te->probability > REG_BR_PROB_BASE)
2932 te->probability = REG_BR_PROB_BASE;
2933 }
2934 }
2935 }
2936
b8698a0f 2937 /* Ensure there is nothing else to do. */
a5f84464 2938 gcc_assert (!all || has_zero_uses (lhs));
243cc836 2939
e67c25c7
JL
2940 /* If we were able to propagate away all uses of LHS, then
2941 we can remove STMT. */
2942 if (all)
2943 remove_stmt_or_phi (stmt);
2944 }
2945}
2946
726a989a 2947/* STMT is either a PHI node (potentially a degenerate PHI node) or
e67c25c7
JL
2948 a statement that is a trivial copy or constant initialization.
2949
2950 Attempt to eliminate T by propagating its RHS into all uses of
2951 its LHS. This may in turn set new bits in INTERESTING_NAMES
2952 for nodes we want to revisit later.
2953
2954 All exit paths should clear INTERESTING_NAMES for the result
726a989a 2955 of STMT. */
e67c25c7
JL
2956
2957static void
726a989a 2958eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
e67c25c7 2959{
726a989a 2960 tree lhs = get_lhs_or_phi_result (stmt);
e67c25c7
JL
2961 tree rhs;
2962 int version = SSA_NAME_VERSION (lhs);
2963
2964 /* If the LHS of this statement or PHI has no uses, then we can
2965 just eliminate it. This can occur if, for example, the PHI
2966 was created by block duplication due to threading and its only
2967 use was in the conditional at the end of the block which was
2968 deleted. */
2969 if (has_zero_uses (lhs))
2970 {
2971 bitmap_clear_bit (interesting_names, version);
726a989a 2972 remove_stmt_or_phi (stmt);
e67c25c7
JL
2973 return;
2974 }
2975
2976 /* Get the RHS of the assignment or PHI node if the PHI is a
2977 degenerate. */
726a989a 2978 rhs = get_rhs_or_phi_arg (stmt);
e67c25c7
JL
2979 if (!rhs)
2980 {
2981 bitmap_clear_bit (interesting_names, version);
2982 return;
2983 }
2984
bd388c2a
RB
2985 if (!virtual_operand_p (lhs))
2986 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2987 else
2988 {
2989 gimple use_stmt;
2990 imm_use_iterator iter;
2991 use_operand_p use_p;
2992 /* For virtual operands we have to propagate into all uses as
2993 otherwise we will create overlapping life-ranges. */
2994 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2995 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2996 SET_USE (use_p, rhs);
2997 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2998 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
2999 remove_stmt_or_phi (stmt);
3000 }
e67c25c7 3001
726a989a 3002 /* Note that STMT may well have been deleted by now, so do
e67c25c7
JL
3003 not access it, instead use the saved version # to clear
3004 T's entry in the worklist. */
3005 bitmap_clear_bit (interesting_names, version);
3006}
3007
3008/* The first phase in degenerate PHI elimination.
3009
3010 Eliminate the degenerate PHIs in BB, then recurse on the
3011 dominator children of BB. */
3012
3013static void
3014eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
3015{
538dd0b7 3016 gphi_iterator gsi;
e67c25c7
JL
3017 basic_block son;
3018
726a989a 3019 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
e67c25c7 3020 {
538dd0b7 3021 gphi *phi = gsi.phi ();
726a989a 3022
e67c25c7
JL
3023 eliminate_const_or_copy (phi, interesting_names);
3024 }
3025
3026 /* Recurse into the dominator children of BB. */
3027 for (son = first_dom_son (CDI_DOMINATORS, bb);
3028 son;
3029 son = next_dom_son (CDI_DOMINATORS, son))
3030 eliminate_degenerate_phis_1 (son, interesting_names);
3031}
3032
3033
3034/* A very simple pass to eliminate degenerate PHI nodes from the
3035 IL. This is meant to be fast enough to be able to be run several
3036 times in the optimization pipeline.
3037
3038 Certain optimizations, particularly those which duplicate blocks
3039 or remove edges from the CFG can create or expose PHIs which are
3040 trivial copies or constant initializations.
3041
3042 While we could pick up these optimizations in DOM or with the
3043 combination of copy-prop and CCP, those solutions are far too
3044 heavy-weight for our needs.
3045
3046 This implementation has two phases so that we can efficiently
3047 eliminate the first order degenerate PHIs and second order
3048 degenerate PHIs.
3049
3050 The first phase performs a dominator walk to identify and eliminate
3051 the vast majority of the degenerate PHIs. When a degenerate PHI
3052 is identified and eliminated any affected statements or PHIs
3053 are put on a worklist.
3054
3055 The second phase eliminates degenerate PHIs and trivial copies
3056 or constant initializations using the worklist. This is how we
3057 pick up the secondary optimization opportunities with minimal
3058 cost. */
3059
be55bfe6
TS
3060namespace {
3061
3062const pass_data pass_data_phi_only_cprop =
3063{
3064 GIMPLE_PASS, /* type */
3065 "phicprop", /* name */
3066 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
3067 TV_TREE_PHI_CPROP, /* tv_id */
3068 ( PROP_cfg | PROP_ssa ), /* properties_required */
3069 0, /* properties_provided */
3070 0, /* properties_destroyed */
3071 0, /* todo_flags_start */
3bea341f 3072 ( TODO_cleanup_cfg | TODO_update_ssa ), /* todo_flags_finish */
be55bfe6
TS
3073};
3074
3075class pass_phi_only_cprop : public gimple_opt_pass
3076{
3077public:
3078 pass_phi_only_cprop (gcc::context *ctxt)
3079 : gimple_opt_pass (pass_data_phi_only_cprop, ctxt)
3080 {}
3081
3082 /* opt_pass methods: */
3083 opt_pass * clone () { return new pass_phi_only_cprop (m_ctxt); }
3084 virtual bool gate (function *) { return flag_tree_dom != 0; }
3085 virtual unsigned int execute (function *);
3086
3087}; // class pass_phi_only_cprop
3088
3089unsigned int
3090pass_phi_only_cprop::execute (function *fun)
e67c25c7
JL
3091{
3092 bitmap interesting_names;
1f70491b 3093 bitmap interesting_names1;
e67c25c7 3094
72922229
JL
3095 /* Bitmap of blocks which need EH information updated. We can not
3096 update it on-the-fly as doing so invalidates the dominator tree. */
3097 need_eh_cleanup = BITMAP_ALLOC (NULL);
3098
e67c25c7
JL
3099 /* INTERESTING_NAMES is effectively our worklist, indexed by
3100 SSA_NAME_VERSION.
3101
3102 A set bit indicates that the statement or PHI node which
3103 defines the SSA_NAME should be (re)examined to determine if
66a4ad37 3104 it has become a degenerate PHI or trivial const/copy propagation
b8698a0f 3105 opportunity.
e67c25c7
JL
3106
3107 Experiments have show we generally get better compilation
3108 time behavior with bitmaps rather than sbitmaps. */
3109 interesting_names = BITMAP_ALLOC (NULL);
1f70491b 3110 interesting_names1 = BITMAP_ALLOC (NULL);
e67c25c7 3111
8d9d6561
EB
3112 calculate_dominance_info (CDI_DOMINATORS);
3113 cfg_altered = false;
3114
917f1b7e 3115 /* First phase. Eliminate degenerate PHIs via a dominator
e67c25c7
JL
3116 walk of the CFG.
3117
3118 Experiments have indicated that we generally get better
3119 compile-time behavior by visiting blocks in the first
3120 phase in dominator order. Presumably this is because walking
3121 in dominator order leaves fewer PHIs for later examination
3122 by the worklist phase. */
be55bfe6 3123 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR_FOR_FN (fun),
fefa31b5 3124 interesting_names);
e67c25c7 3125
917f1b7e 3126 /* Second phase. Eliminate second order degenerate PHIs as well
e67c25c7
JL
3127 as trivial copies or constant initializations identified by
3128 the first phase or this phase. Basically we keep iterating
3129 until our set of INTERESTING_NAMEs is empty. */
3130 while (!bitmap_empty_p (interesting_names))
3131 {
3132 unsigned int i;
3133 bitmap_iterator bi;
3134
1f70491b
AP
3135 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
3136 changed during the loop. Copy it to another bitmap and
3137 use that. */
3138 bitmap_copy (interesting_names1, interesting_names);
3139
3140 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
e67c25c7
JL
3141 {
3142 tree name = ssa_name (i);
3143
3144 /* Ignore SSA_NAMEs that have been released because
3145 their defining statement was deleted (unreachable). */
3146 if (name)
3147 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
3148 interesting_names);
3149 }
3150 }
72922229 3151
8d9d6561 3152 if (cfg_altered)
83ba4d6f
RB
3153 {
3154 free_dominance_info (CDI_DOMINATORS);
3155 /* If we changed the CFG schedule loops for fixup by cfgcleanup. */
726338f4 3156 loops_state_set (LOOPS_NEED_FIXUP);
83ba4d6f 3157 }
8d9d6561 3158
72922229
JL
3159 /* Propagation of const and copies may make some EH edges dead. Purge
3160 such edges from the CFG as needed. */
3161 if (!bitmap_empty_p (need_eh_cleanup))
3162 {
726a989a 3163 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
72922229
JL
3164 BITMAP_FREE (need_eh_cleanup);
3165 }
e67c25c7
JL
3166
3167 BITMAP_FREE (interesting_names);
1f70491b 3168 BITMAP_FREE (interesting_names1);
e67c25c7
JL
3169 return 0;
3170}
3171
27a4cd48
DM
3172} // anon namespace
3173
3174gimple_opt_pass *
3175make_pass_phi_only_cprop (gcc::context *ctxt)
3176{
3177 return new pass_phi_only_cprop (ctxt);
3178}