]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-dom.c
remove need for store_values_directly
[thirdparty/gcc.git] / gcc / tree-ssa-dom.c
CommitLineData
4ee9c684 1/* SSA Dominator optimizations for trees
d353bf18 2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
4ee9c684 3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
4ee9c684 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4ee9c684 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
d9dd21a8 24#include "hash-table.h"
4ee9c684 25#include "tm.h"
b20a8bb4 26#include "hash-set.h"
27#include "machmode.h"
28#include "vec.h"
29#include "double-int.h"
30#include "input.h"
31#include "alias.h"
32#include "symtab.h"
33#include "wide-int.h"
34#include "inchash.h"
35#include "real.h"
4ee9c684 36#include "tree.h"
b20a8bb4 37#include "fold-const.h"
9ed99284 38#include "stor-layout.h"
4ee9c684 39#include "flags.h"
4ee9c684 40#include "tm_p.h"
94ea8568 41#include "predict.h"
a3020f2f 42#include "hard-reg-set.h"
43#include "input.h"
4ee9c684 44#include "function.h"
94ea8568 45#include "dominance.h"
46#include "cfg.h"
47#include "cfganal.h"
48#include "basic-block.h"
49#include "cfgloop.h"
50#include "inchash.h"
ce084dfc 51#include "gimple-pretty-print.h"
bc61cadb 52#include "tree-ssa-alias.h"
53#include "internal-fn.h"
54#include "gimple-fold.h"
55#include "tree-eh.h"
56#include "gimple-expr.h"
57#include "is-a.h"
073c1fd5 58#include "gimple.h"
dcf1a1ec 59#include "gimple-iterator.h"
073c1fd5 60#include "gimple-ssa.h"
61#include "tree-cfg.h"
62#include "tree-phinodes.h"
63#include "ssa-iterators.h"
9ed99284 64#include "stringpool.h"
073c1fd5 65#include "tree-ssanames.h"
66#include "tree-into-ssa.h"
4ee9c684 67#include "domwalk.h"
4ee9c684 68#include "tree-pass.h"
7d564439 69#include "tree-ssa-propagate.h"
0c5b289a 70#include "tree-ssa-threadupdate.h"
4ee9c684 71#include "langhooks.h"
cf024d22 72#include "params.h"
545372c5 73#include "tree-ssa-scopedtables.h"
424a4a92 74#include "tree-ssa-threadedge.h"
75#include "tree-ssa-dom.h"
a01e9a5b 76#include "inchash.h"
387312e6 77#include "gimplify.h"
25959a39 78#include "tree-cfgcleanup.h"
4ee9c684 79
80/* This file implements optimizations on the dominator tree. */
81
75a70cf9 82/* Representation of a "naked" right-hand-side expression, to be used
83 in recording available expressions in the expression hash table. */
84
85enum expr_kind
86{
87 EXPR_SINGLE,
88 EXPR_UNARY,
89 EXPR_BINARY,
00f4f705 90 EXPR_TERNARY,
889ef038 91 EXPR_CALL,
92 EXPR_PHI
75a70cf9 93};
94
95struct hashable_expr
96{
97 tree type;
98 enum expr_kind kind;
99 union {
100 struct { tree rhs; } single;
101 struct { enum tree_code op; tree opnd; } unary;
00f4f705 102 struct { enum tree_code op; tree opnd0, opnd1; } binary;
103 struct { enum tree_code op; tree opnd0, opnd1, opnd2; } ternary;
1a91d914 104 struct { gcall *fn_from; bool pure; size_t nargs; tree *args; } call;
889ef038 105 struct { size_t nargs; tree *args; } phi;
75a70cf9 106 } ops;
107};
108
109/* Structure for recording known values of a conditional expression
110 at the exits from its block. */
111
7aab1427 112typedef struct cond_equivalence_s
75a70cf9 113{
114 struct hashable_expr cond;
115 tree value;
7aab1427 116} cond_equivalence;
117
2f0993e7 118
119/* Structure for recording edge equivalences as well as any pending
120 edge redirections during the dominator optimizer.
121
122 Computing and storing the edge equivalences instead of creating
123 them on-demand can save significant amounts of time, particularly
48e1416a 124 for pathological cases involving switch statements.
2f0993e7 125
126 These structures live for a single iteration of the dominator
127 optimizer in the edge's AUX field. At the end of an iteration we
128 free each of these structures and update the AUX field to point
129 to any requested redirection target (the code for updating the
130 CFG and SSA graph for edge redirection expects redirection edge
131 targets to be in the AUX field for each edge. */
132
133struct edge_info
134{
135 /* If this edge creates a simple equivalence, the LHS and RHS of
136 the equivalence will be stored here. */
137 tree lhs;
138 tree rhs;
139
140 /* Traversing an edge may also indicate one or more particular conditions
7aab1427 141 are true or false. */
f1f41a6c 142 vec<cond_equivalence> cond_equivalences;
2f0993e7 143};
144
9c629f0e 145/* Stack of available expressions in AVAIL_EXPRs. Each block pushes any
146 expressions it enters into the hash table along with a marker entry
73645111 147 (null). When we finish processing the block, we pop off entries and
9c629f0e 148 remove the expressions from the global hash table until we hit the
149 marker. */
75a70cf9 150typedef struct expr_hash_elt * expr_hash_elt_t;
75a70cf9 151
387312e6 152static vec<std::pair<expr_hash_elt_t, expr_hash_elt_t> > avail_exprs_stack;
9c629f0e 153
75a70cf9 154/* Structure for entries in the expression hash table. */
a8046f60 155
4ee9c684 156struct expr_hash_elt
157{
158 /* The value (lhs) of this expression. */
159 tree lhs;
160
161 /* The expression (rhs) we want to record. */
75a70cf9 162 struct hashable_expr expr;
4ee9c684 163
387312e6 164 /* The virtual operand associated with the nearest dominating stmt
165 loading from or storing to expr. */
166 tree vop;
4ee9c684 167
75a70cf9 168 /* The hash value for RHS. */
4ee9c684 169 hashval_t hash;
75a70cf9 170
171 /* A unique stamp, typically the address of the hash
172 element itself, used in removing entries from the table. */
173 struct expr_hash_elt *stamp;
4ee9c684 174};
175
d9dd21a8 176/* Hashtable helpers. */
177
178static bool hashable_expr_equal_p (const struct hashable_expr *,
179 const struct hashable_expr *);
180static void free_expr_hash_elt (void *);
181
182struct expr_elt_hasher
183{
2933f7af 184 typedef expr_hash_elt *value_type;
185 typedef expr_hash_elt *compare_type;
2933f7af 186 static inline hashval_t hash (const value_type &);
187 static inline bool equal (const value_type &, const compare_type &);
188 static inline void remove (value_type &);
d9dd21a8 189};
190
191inline hashval_t
2933f7af 192expr_elt_hasher::hash (const value_type &p)
d9dd21a8 193{
194 return p->hash;
195}
196
197inline bool
2933f7af 198expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
d9dd21a8 199{
d9dd21a8 200 const struct hashable_expr *expr1 = &p1->expr;
201 const struct expr_hash_elt *stamp1 = p1->stamp;
d9dd21a8 202 const struct hashable_expr *expr2 = &p2->expr;
203 const struct expr_hash_elt *stamp2 = p2->stamp;
204
205 /* This case should apply only when removing entries from the table. */
206 if (stamp1 == stamp2)
207 return true;
208
387312e6 209 if (p1->hash != p2->hash)
d9dd21a8 210 return false;
211
212 /* In case of a collision, both RHS have to be identical and have the
213 same VUSE operands. */
214 if (hashable_expr_equal_p (expr1, expr2)
215 && types_compatible_p (expr1->type, expr2->type))
387312e6 216 return true;
d9dd21a8 217
218 return false;
219}
220
221/* Delete an expr_hash_elt and reclaim its storage. */
222
223inline void
2933f7af 224expr_elt_hasher::remove (value_type &element)
d9dd21a8 225{
226 free_expr_hash_elt (element);
227}
228
229/* Hash table with expressions made available during the renaming process.
230 When an assignment of the form X_i = EXPR is found, the statement is
231 stored in this table. If the same expression EXPR is later found on the
232 RHS of another statement, it is replaced with X_i (thus performing
233 global redundancy elimination). Similarly as we pass through conditionals
234 we record the conditional itself as having either a true or false value
235 in this table. */
c1f445d2 236static hash_table<expr_elt_hasher> *avail_exprs;
d9dd21a8 237
545372c5 238/* Unwindable const/copy equivalences. */
239static const_and_copies *const_and_copies;
da43203c 240
4ee9c684 241/* Track whether or not we have changed the control flow graph. */
242static bool cfg_altered;
243
35c15734 244/* Bitmap of blocks that have had EH statements cleaned. We should
0870fd6e 245 remove their dead edges eventually. */
35c15734 246static bitmap need_eh_cleanup;
25959a39 247static vec<gimple> need_noreturn_fixup;
35c15734 248
4ee9c684 249/* Statistics for dominator optimizations. */
250struct opt_stats_d
251{
252 long num_stmts;
253 long num_exprs_considered;
254 long num_re;
88dbf20f 255 long num_const_prop;
256 long num_copy_prop;
4ee9c684 257};
258
d0d897b6 259static struct opt_stats_d opt_stats;
260
4ee9c684 261/* Local functions. */
6bf320fb 262static void optimize_stmt (basic_block, gimple_stmt_iterator);
75a70cf9 263static tree lookup_avail_expr (gimple, bool);
4ee9c684 264static hashval_t avail_expr_hash (const void *);
c1f445d2 265static void htab_statistics (FILE *,
266 const hash_table<expr_elt_hasher> &);
7aab1427 267static void record_cond (cond_equivalence *);
da43203c 268static void record_equality (tree, tree);
2f0993e7 269static void record_equivalences_from_phis (basic_block);
270static void record_equivalences_from_incoming_edge (basic_block);
e08ff65a 271static void eliminate_redundant_computations (gimple_stmt_iterator *);
75a70cf9 272static void record_equivalences_from_stmt (gimple, int);
9c629f0e 273static void remove_local_expressions_from_table (void);
c0735efa 274static edge single_incoming_edge_ignoring_loop_edges (basic_block);
4ee9c684 275
88dbf20f 276
75a70cf9 277/* Given a statement STMT, initialize the hash table element pointed to
278 by ELEMENT. */
279
280static void
281initialize_hash_element (gimple stmt, tree lhs,
282 struct expr_hash_elt *element)
283{
284 enum gimple_code code = gimple_code (stmt);
285 struct hashable_expr *expr = &element->expr;
286
287 if (code == GIMPLE_ASSIGN)
288 {
289 enum tree_code subcode = gimple_assign_rhs_code (stmt);
290
75a70cf9 291 switch (get_gimple_rhs_class (subcode))
292 {
293 case GIMPLE_SINGLE_RHS:
00f4f705 294 expr->kind = EXPR_SINGLE;
6140195d 295 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
00f4f705 296 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
297 break;
75a70cf9 298 case GIMPLE_UNARY_RHS:
00f4f705 299 expr->kind = EXPR_UNARY;
75a70cf9 300 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
2a2d01bb 301 if (CONVERT_EXPR_CODE_P (subcode))
d09ef31a 302 subcode = NOP_EXPR;
00f4f705 303 expr->ops.unary.op = subcode;
304 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
305 break;
75a70cf9 306 case GIMPLE_BINARY_RHS:
00f4f705 307 expr->kind = EXPR_BINARY;
75a70cf9 308 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
00f4f705 309 expr->ops.binary.op = subcode;
310 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
311 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
312 break;
313 case GIMPLE_TERNARY_RHS:
314 expr->kind = EXPR_TERNARY;
315 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
316 expr->ops.ternary.op = subcode;
317 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
318 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
319 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
320 break;
75a70cf9 321 default:
322 gcc_unreachable ();
323 }
324 }
325 else if (code == GIMPLE_COND)
326 {
327 expr->type = boolean_type_node;
328 expr->kind = EXPR_BINARY;
329 expr->ops.binary.op = gimple_cond_code (stmt);
330 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
331 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
332 }
1a91d914 333 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
75a70cf9 334 {
1a91d914 335 size_t nargs = gimple_call_num_args (call_stmt);
75a70cf9 336 size_t i;
337
1a91d914 338 gcc_assert (gimple_call_lhs (call_stmt));
75a70cf9 339
1a91d914 340 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
75a70cf9 341 expr->kind = EXPR_CALL;
1a91d914 342 expr->ops.call.fn_from = call_stmt;
75a70cf9 343
1a91d914 344 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
75a70cf9 345 expr->ops.call.pure = true;
48e1416a 346 else
75a70cf9 347 expr->ops.call.pure = false;
348
349 expr->ops.call.nargs = nargs;
889ef038 350 expr->ops.call.args = XCNEWVEC (tree, nargs);
75a70cf9 351 for (i = 0; i < nargs; i++)
1a91d914 352 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
75a70cf9 353 }
1a91d914 354 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
75a70cf9 355 {
1a91d914 356 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
75a70cf9 357 expr->kind = EXPR_SINGLE;
1a91d914 358 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
75a70cf9 359 }
360 else if (code == GIMPLE_GOTO)
361 {
362 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
363 expr->kind = EXPR_SINGLE;
364 expr->ops.single.rhs = gimple_goto_dest (stmt);
365 }
889ef038 366 else if (code == GIMPLE_PHI)
367 {
368 size_t nargs = gimple_phi_num_args (stmt);
369 size_t i;
370
371 expr->type = TREE_TYPE (gimple_phi_result (stmt));
372 expr->kind = EXPR_PHI;
373 expr->ops.phi.nargs = nargs;
374 expr->ops.phi.args = XCNEWVEC (tree, nargs);
375
376 for (i = 0; i < nargs; i++)
377 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
378 }
75a70cf9 379 else
380 gcc_unreachable ();
381
382 element->lhs = lhs;
387312e6 383 element->vop = gimple_vuse (stmt);
75a70cf9 384 element->hash = avail_expr_hash (element);
385 element->stamp = element;
386}
387
388/* Given a conditional expression COND as a tree, initialize
389 a hashable_expr expression EXPR. The conditional must be a
390 comparison or logical negation. A constant or a variable is
391 not permitted. */
392
393static void
394initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
395{
396 expr->type = boolean_type_node;
48e1416a 397
75a70cf9 398 if (COMPARISON_CLASS_P (cond))
399 {
400 expr->kind = EXPR_BINARY;
401 expr->ops.binary.op = TREE_CODE (cond);
402 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
403 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
404 }
405 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
406 {
407 expr->kind = EXPR_UNARY;
408 expr->ops.unary.op = TRUTH_NOT_EXPR;
409 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
410 }
411 else
412 gcc_unreachable ();
413}
414
415/* Given a hashable_expr expression EXPR and an LHS,
416 initialize the hash table element pointed to by ELEMENT. */
417
418static void
419initialize_hash_element_from_expr (struct hashable_expr *expr,
420 tree lhs,
421 struct expr_hash_elt *element)
422{
423 element->expr = *expr;
424 element->lhs = lhs;
387312e6 425 element->vop = NULL_TREE;
75a70cf9 426 element->hash = avail_expr_hash (element);
427 element->stamp = element;
428}
429
430/* Compare two hashable_expr structures for equivalence.
431 They are considered equivalent when the the expressions
432 they denote must necessarily be equal. The logic is intended
433 to follow that of operand_equal_p in fold-const.c */
434
435static bool
436hashable_expr_equal_p (const struct hashable_expr *expr0,
437 const struct hashable_expr *expr1)
438{
439 tree type0 = expr0->type;
440 tree type1 = expr1->type;
441
442 /* If either type is NULL, there is nothing to check. */
443 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
444 return false;
445
446 /* If both types don't have the same signedness, precision, and mode,
447 then we can't consider them equal. */
448 if (type0 != type1
449 && (TREE_CODE (type0) == ERROR_MARK
450 || TREE_CODE (type1) == ERROR_MARK
451 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
452 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
453 || TYPE_MODE (type0) != TYPE_MODE (type1)))
454 return false;
455
456 if (expr0->kind != expr1->kind)
457 return false;
458
459 switch (expr0->kind)
460 {
461 case EXPR_SINGLE:
462 return operand_equal_p (expr0->ops.single.rhs,
463 expr1->ops.single.rhs, 0);
464
465 case EXPR_UNARY:
466 if (expr0->ops.unary.op != expr1->ops.unary.op)
467 return false;
468
d9659041 469 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
75a70cf9 470 || expr0->ops.unary.op == NON_LVALUE_EXPR)
471 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
472 return false;
473
474 return operand_equal_p (expr0->ops.unary.opnd,
475 expr1->ops.unary.opnd, 0);
476
477 case EXPR_BINARY:
00f4f705 478 if (expr0->ops.binary.op != expr1->ops.binary.op)
479 return false;
480
481 if (operand_equal_p (expr0->ops.binary.opnd0,
482 expr1->ops.binary.opnd0, 0)
483 && operand_equal_p (expr0->ops.binary.opnd1,
484 expr1->ops.binary.opnd1, 0))
485 return true;
486
487 /* For commutative ops, allow the other order. */
488 return (commutative_tree_code (expr0->ops.binary.op)
489 && operand_equal_p (expr0->ops.binary.opnd0,
490 expr1->ops.binary.opnd1, 0)
491 && operand_equal_p (expr0->ops.binary.opnd1,
492 expr1->ops.binary.opnd0, 0));
493
494 case EXPR_TERNARY:
495 if (expr0->ops.ternary.op != expr1->ops.ternary.op
496 || !operand_equal_p (expr0->ops.ternary.opnd2,
497 expr1->ops.ternary.opnd2, 0))
498 return false;
499
500 if (operand_equal_p (expr0->ops.ternary.opnd0,
501 expr1->ops.ternary.opnd0, 0)
502 && operand_equal_p (expr0->ops.ternary.opnd1,
503 expr1->ops.ternary.opnd1, 0))
504 return true;
505
506 /* For commutative ops, allow the other order. */
507 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
508 && operand_equal_p (expr0->ops.ternary.opnd0,
509 expr1->ops.ternary.opnd1, 0)
510 && operand_equal_p (expr0->ops.ternary.opnd1,
511 expr1->ops.ternary.opnd0, 0));
75a70cf9 512
513 case EXPR_CALL:
514 {
515 size_t i;
516
517 /* If the calls are to different functions, then they
518 clearly cannot be equal. */
fb049fba 519 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
520 expr1->ops.call.fn_from))
75a70cf9 521 return false;
522
523 if (! expr0->ops.call.pure)
524 return false;
525
526 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
527 return false;
528
529 for (i = 0; i < expr0->ops.call.nargs; i++)
530 if (! operand_equal_p (expr0->ops.call.args[i],
531 expr1->ops.call.args[i], 0))
532 return false;
533
27b0e9e4 534 if (stmt_could_throw_p (expr0->ops.call.fn_from))
535 {
536 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
537 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
538 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
539 return false;
540 }
541
75a70cf9 542 return true;
543 }
48e1416a 544
889ef038 545 case EXPR_PHI:
546 {
547 size_t i;
548
549 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
550 return false;
551
552 for (i = 0; i < expr0->ops.phi.nargs; i++)
553 if (! operand_equal_p (expr0->ops.phi.args[i],
554 expr1->ops.phi.args[i], 0))
555 return false;
556
557 return true;
558 }
559
75a70cf9 560 default:
561 gcc_unreachable ();
562 }
563}
564
0e80b01d 565/* Generate a hash value for a pair of expressions. This can be used
a01e9a5b 566 iteratively by passing a previous result in HSTATE.
0e80b01d 567
568 The same hash value is always returned for a given pair of expressions,
569 regardless of the order in which they are presented. This is useful in
570 hashing the operands of commutative functions. */
571
a01e9a5b 572namespace inchash
0e80b01d 573{
0e80b01d 574
a01e9a5b 575static void
576add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
577{
578 hash one, two;
0e80b01d 579
a01e9a5b 580 inchash::add_expr (t1, one);
581 inchash::add_expr (t2, two);
582 hstate.add_commutative (one, two);
0e80b01d 583}
584
75a70cf9 585/* Compute a hash value for a hashable_expr value EXPR and a
586 previously accumulated hash value VAL. If two hashable_expr
587 values compare equal with hashable_expr_equal_p, they must
588 hash to the same value, given an identical value of VAL.
a01e9a5b 589 The logic is intended to follow inchash::add_expr in tree.c. */
75a70cf9 590
a01e9a5b 591static void
592add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
75a70cf9 593{
594 switch (expr->kind)
595 {
596 case EXPR_SINGLE:
a01e9a5b 597 inchash::add_expr (expr->ops.single.rhs, hstate);
75a70cf9 598 break;
599
600 case EXPR_UNARY:
a01e9a5b 601 hstate.add_object (expr->ops.unary.op);
75a70cf9 602
603 /* Make sure to include signedness in the hash computation.
604 Don't hash the type, that can lead to having nodes which
605 compare equal according to operand_equal_p, but which
606 have different hash codes. */
d9659041 607 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
75a70cf9 608 || expr->ops.unary.op == NON_LVALUE_EXPR)
a01e9a5b 609 hstate.add_int (TYPE_UNSIGNED (expr->type));
75a70cf9 610
a01e9a5b 611 inchash::add_expr (expr->ops.unary.opnd, hstate);
75a70cf9 612 break;
613
614 case EXPR_BINARY:
a01e9a5b 615 hstate.add_object (expr->ops.binary.op);
75a70cf9 616 if (commutative_tree_code (expr->ops.binary.op))
a01e9a5b 617 inchash::add_expr_commutative (expr->ops.binary.opnd0,
618 expr->ops.binary.opnd1, hstate);
75a70cf9 619 else
620 {
a01e9a5b 621 inchash::add_expr (expr->ops.binary.opnd0, hstate);
622 inchash::add_expr (expr->ops.binary.opnd1, hstate);
75a70cf9 623 }
624 break;
625
00f4f705 626 case EXPR_TERNARY:
a01e9a5b 627 hstate.add_object (expr->ops.ternary.op);
00f4f705 628 if (commutative_ternary_tree_code (expr->ops.ternary.op))
a01e9a5b 629 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
630 expr->ops.ternary.opnd1, hstate);
00f4f705 631 else
632 {
a01e9a5b 633 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
634 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
00f4f705 635 }
a01e9a5b 636 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
00f4f705 637 break;
638
75a70cf9 639 case EXPR_CALL:
640 {
641 size_t i;
642 enum tree_code code = CALL_EXPR;
1a91d914 643 gcall *fn_from;
75a70cf9 644
a01e9a5b 645 hstate.add_object (code);
fb049fba 646 fn_from = expr->ops.call.fn_from;
647 if (gimple_call_internal_p (fn_from))
a01e9a5b 648 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
fb049fba 649 else
a01e9a5b 650 inchash::add_expr (gimple_call_fn (fn_from), hstate);
75a70cf9 651 for (i = 0; i < expr->ops.call.nargs; i++)
a01e9a5b 652 inchash::add_expr (expr->ops.call.args[i], hstate);
75a70cf9 653 }
654 break;
48e1416a 655
889ef038 656 case EXPR_PHI:
657 {
658 size_t i;
659
660 for (i = 0; i < expr->ops.phi.nargs; i++)
a01e9a5b 661 inchash::add_expr (expr->ops.phi.args[i], hstate);
889ef038 662 }
663 break;
664
75a70cf9 665 default:
666 gcc_unreachable ();
667 }
a01e9a5b 668}
75a70cf9 669
75a70cf9 670}
671
672/* Print a diagnostic dump of an expression hash table entry. */
673
674static void
675print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
676{
387312e6 677 fprintf (stream, "STMT ");
75a70cf9 678
679 if (element->lhs)
680 {
681 print_generic_expr (stream, element->lhs, 0);
682 fprintf (stream, " = ");
683 }
48e1416a 684
75a70cf9 685 switch (element->expr.kind)
686 {
687 case EXPR_SINGLE:
688 print_generic_expr (stream, element->expr.ops.single.rhs, 0);
689 break;
690
691 case EXPR_UNARY:
f3d35d4d 692 fprintf (stream, "%s ", get_tree_code_name (element->expr.ops.unary.op));
75a70cf9 693 print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
694 break;
695
696 case EXPR_BINARY:
697 print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
f3d35d4d 698 fprintf (stream, " %s ", get_tree_code_name (element->expr.ops.binary.op));
75a70cf9 699 print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
700 break;
701
00f4f705 702 case EXPR_TERNARY:
f3d35d4d 703 fprintf (stream, " %s <", get_tree_code_name (element->expr.ops.ternary.op));
00f4f705 704 print_generic_expr (stream, element->expr.ops.ternary.opnd0, 0);
705 fputs (", ", stream);
706 print_generic_expr (stream, element->expr.ops.ternary.opnd1, 0);
707 fputs (", ", stream);
708 print_generic_expr (stream, element->expr.ops.ternary.opnd2, 0);
709 fputs (">", stream);
710 break;
711
75a70cf9 712 case EXPR_CALL:
713 {
714 size_t i;
715 size_t nargs = element->expr.ops.call.nargs;
1a91d914 716 gcall *fn_from;
fb049fba 717
718 fn_from = element->expr.ops.call.fn_from;
719 if (gimple_call_internal_p (fn_from))
720 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
721 stream);
722 else
723 print_generic_expr (stream, gimple_call_fn (fn_from), 0);
75a70cf9 724 fprintf (stream, " (");
725 for (i = 0; i < nargs; i++)
726 {
727 print_generic_expr (stream, element->expr.ops.call.args[i], 0);
728 if (i + 1 < nargs)
729 fprintf (stream, ", ");
730 }
731 fprintf (stream, ")");
732 }
733 break;
889ef038 734
735 case EXPR_PHI:
736 {
737 size_t i;
738 size_t nargs = element->expr.ops.phi.nargs;
739
740 fprintf (stream, "PHI <");
741 for (i = 0; i < nargs; i++)
742 {
743 print_generic_expr (stream, element->expr.ops.phi.args[i], 0);
744 if (i + 1 < nargs)
745 fprintf (stream, ", ");
746 }
747 fprintf (stream, ">");
748 }
749 break;
75a70cf9 750 }
75a70cf9 751
387312e6 752 if (element->vop)
75a70cf9 753 {
387312e6 754 fprintf (stream, " with ");
755 print_generic_expr (stream, element->vop, 0);
75a70cf9 756 }
387312e6 757
758 fprintf (stream, "\n");
75a70cf9 759}
760
78d53e33 761/* Delete variable sized pieces of the expr_hash_elt ELEMENT. */
75a70cf9 762
763static void
78d53e33 764free_expr_hash_elt_contents (struct expr_hash_elt *element)
75a70cf9 765{
75a70cf9 766 if (element->expr.kind == EXPR_CALL)
767 free (element->expr.ops.call.args);
78d53e33 768 else if (element->expr.kind == EXPR_PHI)
889ef038 769 free (element->expr.ops.phi.args);
78d53e33 770}
771
772/* Delete an expr_hash_elt and reclaim its storage. */
889ef038 773
78d53e33 774static void
775free_expr_hash_elt (void *elt)
776{
777 struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
778 free_expr_hash_elt_contents (element);
75a70cf9 779 free (element);
780}
781
2f0993e7 782/* Allocate an EDGE_INFO for edge E and attach it to E.
783 Return the new EDGE_INFO structure. */
784
785static struct edge_info *
786allocate_edge_info (edge e)
787{
788 struct edge_info *edge_info;
789
945865c5 790 edge_info = XCNEW (struct edge_info);
2f0993e7 791
792 e->aux = edge_info;
793 return edge_info;
794}
795
796/* Free all EDGE_INFO structures associated with edges in the CFG.
640e9781 797 If a particular edge can be threaded, copy the redirection
2f0993e7 798 target from the EDGE_INFO structure into the edge's AUX field
799 as required by code to update the CFG and SSA graph for
800 jump threading. */
801
802static void
803free_all_edge_infos (void)
804{
805 basic_block bb;
806 edge_iterator ei;
807 edge e;
808
fc00614f 809 FOR_EACH_BB_FN (bb, cfun)
2f0993e7 810 {
811 FOR_EACH_EDGE (e, ei, bb->preds)
812 {
945865c5 813 struct edge_info *edge_info = (struct edge_info *) e->aux;
2f0993e7 814
815 if (edge_info)
816 {
f1f41a6c 817 edge_info->cond_equivalences.release ();
2f0993e7 818 free (edge_info);
3cebc9d2 819 e->aux = NULL;
2f0993e7 820 }
821 }
822 }
823}
824
970975d0 825/* Build a cond_equivalence record indicating that the comparison
826 CODE holds between operands OP0 and OP1 and push it to **P. */
827
828static void
829build_and_record_new_cond (enum tree_code code,
830 tree op0, tree op1,
831 vec<cond_equivalence> *p)
832{
833 cond_equivalence c;
834 struct hashable_expr *cond = &c.cond;
835
836 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
837
838 cond->type = boolean_type_node;
839 cond->kind = EXPR_BINARY;
840 cond->ops.binary.op = code;
841 cond->ops.binary.opnd0 = op0;
842 cond->ops.binary.opnd1 = op1;
843
844 c.value = boolean_true_node;
845 p->safe_push (c);
846}
847
848/* Record that COND is true and INVERTED is false into the edge information
849 structure. Also record that any conditions dominated by COND are true
850 as well.
851
852 For example, if a < b is true, then a <= b must also be true. */
853
854static void
855record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
856{
857 tree op0, op1;
858 cond_equivalence c;
859
860 if (!COMPARISON_CLASS_P (cond))
861 return;
862
863 op0 = TREE_OPERAND (cond, 0);
864 op1 = TREE_OPERAND (cond, 1);
865
866 switch (TREE_CODE (cond))
867 {
868 case LT_EXPR:
869 case GT_EXPR:
870 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
871 {
872 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
873 &edge_info->cond_equivalences);
874 build_and_record_new_cond (LTGT_EXPR, op0, op1,
875 &edge_info->cond_equivalences);
876 }
877
878 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
879 ? LE_EXPR : GE_EXPR),
880 op0, op1, &edge_info->cond_equivalences);
881 build_and_record_new_cond (NE_EXPR, op0, op1,
882 &edge_info->cond_equivalences);
883 break;
884
885 case GE_EXPR:
886 case LE_EXPR:
887 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
888 {
889 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
890 &edge_info->cond_equivalences);
891 }
892 break;
893
894 case EQ_EXPR:
895 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
896 {
897 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
898 &edge_info->cond_equivalences);
899 }
900 build_and_record_new_cond (LE_EXPR, op0, op1,
901 &edge_info->cond_equivalences);
902 build_and_record_new_cond (GE_EXPR, op0, op1,
903 &edge_info->cond_equivalences);
904 break;
905
906 case UNORDERED_EXPR:
907 build_and_record_new_cond (NE_EXPR, op0, op1,
908 &edge_info->cond_equivalences);
909 build_and_record_new_cond (UNLE_EXPR, op0, op1,
910 &edge_info->cond_equivalences);
911 build_and_record_new_cond (UNGE_EXPR, op0, op1,
912 &edge_info->cond_equivalences);
913 build_and_record_new_cond (UNEQ_EXPR, op0, op1,
914 &edge_info->cond_equivalences);
915 build_and_record_new_cond (UNLT_EXPR, op0, op1,
916 &edge_info->cond_equivalences);
917 build_and_record_new_cond (UNGT_EXPR, op0, op1,
918 &edge_info->cond_equivalences);
919 break;
920
921 case UNLT_EXPR:
922 case UNGT_EXPR:
923 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
924 ? UNLE_EXPR : UNGE_EXPR),
925 op0, op1, &edge_info->cond_equivalences);
926 build_and_record_new_cond (NE_EXPR, op0, op1,
927 &edge_info->cond_equivalences);
928 break;
929
930 case UNEQ_EXPR:
931 build_and_record_new_cond (UNLE_EXPR, op0, op1,
932 &edge_info->cond_equivalences);
933 build_and_record_new_cond (UNGE_EXPR, op0, op1,
934 &edge_info->cond_equivalences);
935 break;
936
937 case LTGT_EXPR:
938 build_and_record_new_cond (NE_EXPR, op0, op1,
939 &edge_info->cond_equivalences);
940 build_and_record_new_cond (ORDERED_EXPR, op0, op1,
941 &edge_info->cond_equivalences);
942 break;
943
944 default:
945 break;
946 }
947
948 /* Now store the original true and false conditions into the first
949 two slots. */
950 initialize_expr_from_cond (cond, &c.cond);
951 c.value = boolean_true_node;
952 edge_info->cond_equivalences.safe_push (c);
953
954 /* It is possible for INVERTED to be the negation of a comparison,
955 and not a valid RHS or GIMPLE_COND condition. This happens because
956 invert_truthvalue may return such an expression when asked to invert
957 a floating-point comparison. These comparisons are not assumed to
958 obey the trichotomy law. */
959 initialize_expr_from_cond (inverted, &c.cond);
960 c.value = boolean_false_node;
961 edge_info->cond_equivalences.safe_push (c);
962}
963
964/* We have finished optimizing BB, record any information implied by
965 taking a specific outgoing edge from BB. */
966
967static void
968record_edge_info (basic_block bb)
969{
970 gimple_stmt_iterator gsi = gsi_last_bb (bb);
971 struct edge_info *edge_info;
972
973 if (! gsi_end_p (gsi))
974 {
975 gimple stmt = gsi_stmt (gsi);
976 location_t loc = gimple_location (stmt);
977
978 if (gimple_code (stmt) == GIMPLE_SWITCH)
979 {
980 gswitch *switch_stmt = as_a <gswitch *> (stmt);
981 tree index = gimple_switch_index (switch_stmt);
982
983 if (TREE_CODE (index) == SSA_NAME)
984 {
985 int i;
986 int n_labels = gimple_switch_num_labels (switch_stmt);
987 tree *info = XCNEWVEC (tree, last_basic_block_for_fn (cfun));
988 edge e;
989 edge_iterator ei;
990
991 for (i = 0; i < n_labels; i++)
992 {
993 tree label = gimple_switch_label (switch_stmt, i);
994 basic_block target_bb = label_to_block (CASE_LABEL (label));
995 if (CASE_HIGH (label)
996 || !CASE_LOW (label)
997 || info[target_bb->index])
998 info[target_bb->index] = error_mark_node;
999 else
1000 info[target_bb->index] = label;
1001 }
1002
1003 FOR_EACH_EDGE (e, ei, bb->succs)
1004 {
1005 basic_block target_bb = e->dest;
1006 tree label = info[target_bb->index];
1007
1008 if (label != NULL && label != error_mark_node)
1009 {
1010 tree x = fold_convert_loc (loc, TREE_TYPE (index),
1011 CASE_LOW (label));
1012 edge_info = allocate_edge_info (e);
1013 edge_info->lhs = index;
1014 edge_info->rhs = x;
1015 }
1016 }
1017 free (info);
1018 }
1019 }
1020
1021 /* A COND_EXPR may create equivalences too. */
1022 if (gimple_code (stmt) == GIMPLE_COND)
1023 {
1024 edge true_edge;
1025 edge false_edge;
1026
1027 tree op0 = gimple_cond_lhs (stmt);
1028 tree op1 = gimple_cond_rhs (stmt);
1029 enum tree_code code = gimple_cond_code (stmt);
1030
1031 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1032
1033 /* Special case comparing booleans against a constant as we
1034 know the value of OP0 on both arms of the branch. i.e., we
1035 can record an equivalence for OP0 rather than COND. */
1036 if ((code == EQ_EXPR || code == NE_EXPR)
1037 && TREE_CODE (op0) == SSA_NAME
1038 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1039 && is_gimple_min_invariant (op1))
1040 {
1041 if (code == EQ_EXPR)
1042 {
1043 edge_info = allocate_edge_info (true_edge);
1044 edge_info->lhs = op0;
1045 edge_info->rhs = (integer_zerop (op1)
1046 ? boolean_false_node
1047 : boolean_true_node);
1048
1049 edge_info = allocate_edge_info (false_edge);
1050 edge_info->lhs = op0;
1051 edge_info->rhs = (integer_zerop (op1)
1052 ? boolean_true_node
1053 : boolean_false_node);
1054 }
1055 else
1056 {
1057 edge_info = allocate_edge_info (true_edge);
1058 edge_info->lhs = op0;
1059 edge_info->rhs = (integer_zerop (op1)
1060 ? boolean_true_node
1061 : boolean_false_node);
1062
1063 edge_info = allocate_edge_info (false_edge);
1064 edge_info->lhs = op0;
1065 edge_info->rhs = (integer_zerop (op1)
1066 ? boolean_false_node
1067 : boolean_true_node);
1068 }
1069 }
1070 else if (is_gimple_min_invariant (op0)
1071 && (TREE_CODE (op1) == SSA_NAME
1072 || is_gimple_min_invariant (op1)))
1073 {
1074 tree cond = build2 (code, boolean_type_node, op0, op1);
1075 tree inverted = invert_truthvalue_loc (loc, cond);
1076 bool can_infer_simple_equiv
1077 = !(HONOR_SIGNED_ZEROS (op0)
1078 && real_zerop (op0));
1079 struct edge_info *edge_info;
1080
1081 edge_info = allocate_edge_info (true_edge);
1082 record_conditions (edge_info, cond, inverted);
1083
1084 if (can_infer_simple_equiv && code == EQ_EXPR)
1085 {
1086 edge_info->lhs = op1;
1087 edge_info->rhs = op0;
1088 }
1089
1090 edge_info = allocate_edge_info (false_edge);
1091 record_conditions (edge_info, inverted, cond);
1092
1093 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1094 {
1095 edge_info->lhs = op1;
1096 edge_info->rhs = op0;
1097 }
1098 }
1099
1100 else if (TREE_CODE (op0) == SSA_NAME
1101 && (TREE_CODE (op1) == SSA_NAME
1102 || is_gimple_min_invariant (op1)))
1103 {
1104 tree cond = build2 (code, boolean_type_node, op0, op1);
1105 tree inverted = invert_truthvalue_loc (loc, cond);
1106 bool can_infer_simple_equiv
1107 = !(HONOR_SIGNED_ZEROS (op1)
1108 && (TREE_CODE (op1) == SSA_NAME || real_zerop (op1)));
1109 struct edge_info *edge_info;
1110
1111 edge_info = allocate_edge_info (true_edge);
1112 record_conditions (edge_info, cond, inverted);
1113
1114 if (can_infer_simple_equiv && code == EQ_EXPR)
1115 {
1116 edge_info->lhs = op0;
1117 edge_info->rhs = op1;
1118 }
1119
1120 edge_info = allocate_edge_info (false_edge);
1121 record_conditions (edge_info, inverted, cond);
1122
1123 if (can_infer_simple_equiv && TREE_CODE (inverted) == EQ_EXPR)
1124 {
1125 edge_info->lhs = op0;
1126 edge_info->rhs = op1;
1127 }
1128 }
1129 }
1130
1131 /* ??? TRUTH_NOT_EXPR can create an equivalence too. */
1132 }
1133}
1134
1135
54c91640 1136class dom_opt_dom_walker : public dom_walker
1137{
1138public:
1139 dom_opt_dom_walker (cdi_direction direction)
ae84f584 1140 : dom_walker (direction), m_dummy_cond (NULL) {}
54c91640 1141
1142 virtual void before_dom_children (basic_block);
1143 virtual void after_dom_children (basic_block);
1144
1145private:
1146 void thread_across_edge (edge);
1147
1a91d914 1148 gcond *m_dummy_cond;
54c91640 1149};
1150
48e1416a 1151/* Jump threading, redundancy elimination and const/copy propagation.
4ee9c684 1152
4ee9c684 1153 This pass may expose new symbols that need to be renamed into SSA. For
1154 every new symbol exposed, its corresponding bit will be set in
591c2a30 1155 VARS_TO_RENAME. */
4ee9c684 1156
65b0537f 1157namespace {
1158
1159const pass_data pass_data_dominator =
1160{
1161 GIMPLE_PASS, /* type */
1162 "dom", /* name */
1163 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 1164 TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
1165 ( PROP_cfg | PROP_ssa ), /* properties_required */
1166 0, /* properties_provided */
1167 0, /* properties_destroyed */
1168 0, /* todo_flags_start */
8b88439e 1169 ( TODO_cleanup_cfg | TODO_update_ssa ), /* todo_flags_finish */
65b0537f 1170};
1171
1172class pass_dominator : public gimple_opt_pass
1173{
1174public:
1175 pass_dominator (gcc::context *ctxt)
1176 : gimple_opt_pass (pass_data_dominator, ctxt)
1177 {}
1178
1179 /* opt_pass methods: */
1180 opt_pass * clone () { return new pass_dominator (m_ctxt); }
1181 virtual bool gate (function *) { return flag_tree_dom != 0; }
1182 virtual unsigned int execute (function *);
1183
1184}; // class pass_dominator
1185
1186unsigned int
1187pass_dominator::execute (function *fun)
4ee9c684 1188{
03ec6c0e 1189 memset (&opt_stats, 0, sizeof (opt_stats));
1190
4ee9c684 1191 /* Create our hash tables. */
c1f445d2 1192 avail_exprs = new hash_table<expr_elt_hasher> (1024);
f1f41a6c 1193 avail_exprs_stack.create (20);
545372c5 1194 const_and_copies = new class const_and_copies (dump_file, dump_flags);
27335ffd 1195 need_eh_cleanup = BITMAP_ALLOC (NULL);
25959a39 1196 need_noreturn_fixup.create (0);
4ee9c684 1197
4ee9c684 1198 calculate_dominance_info (CDI_DOMINATORS);
9a17dd7d 1199 cfg_altered = false;
4ee9c684 1200
7e0311ae 1201 /* We need to know loop structures in order to avoid destroying them
1202 in jump threading. Note that we still can e.g. thread through loop
1203 headers to an exit edge, or through loop header to the loop body, assuming
3c2e3ed6 1204 that we update the loop info.
1205
1206 TODO: We don't need to set LOOPS_HAVE_PREHEADERS generally, but due
1207 to several overly conservative bail-outs in jump threading, case
1208 gcc.dg/tree-ssa/pr21417.c can't be threaded if loop preheader is
1209 missing. We should improve jump threading in future then
1210 LOOPS_HAVE_PREHEADERS won't be needed here. */
1211 loop_optimizer_init (LOOPS_HAVE_PREHEADERS | LOOPS_HAVE_SIMPLE_LATCHES);
388d1fc1 1212
f003f9fd 1213 /* Initialize the value-handle array. */
1214 threadedge_initialize_values ();
1215
62b180e1 1216 /* We need accurate information regarding back edges in the CFG
f0b5f617 1217 for jump threading; this may include back edges that are not part of
7e0311ae 1218 a single loop. */
62b180e1 1219 mark_dfs_back_edges ();
48e1416a 1220
62b180e1 1221 /* Recursively walk the dominator tree optimizing statements. */
65b0537f 1222 dom_opt_dom_walker (CDI_DOMINATORS).walk (fun->cfg->x_entry_block_ptr);
4ee9c684 1223
62b180e1 1224 {
75a70cf9 1225 gimple_stmt_iterator gsi;
62b180e1 1226 basic_block bb;
65b0537f 1227 FOR_EACH_BB_FN (bb, fun)
07eeb468 1228 {
1229 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
75a70cf9 1230 update_stmt_if_modified (gsi_stmt (gsi));
22aa74c4 1231 }
62b180e1 1232 }
0638045e 1233
62b180e1 1234 /* If we exposed any new variables, go ahead and put them into
1235 SSA form now, before we handle jump threading. This simplifies
1236 interactions between rewriting of _DECL nodes into SSA form
1237 and rewriting SSA_NAME nodes into SSA form after block
1238 duplication and CFG manipulation. */
1239 update_ssa (TODO_update_ssa);
388d1fc1 1240
62b180e1 1241 free_all_edge_infos ();
388d1fc1 1242
62b180e1 1243 /* Thread jumps, creating duplicate blocks as needed. */
7e0311ae 1244 cfg_altered |= thread_through_all_blocks (first_pass_instance);
4ee9c684 1245
9a17dd7d 1246 if (cfg_altered)
1247 free_dominance_info (CDI_DOMINATORS);
1248
62b180e1 1249 /* Removal of statements may make some EH edges dead. Purge
1250 such edges from the CFG as needed. */
1251 if (!bitmap_empty_p (need_eh_cleanup))
1252 {
5ec0139d 1253 unsigned i;
1254 bitmap_iterator bi;
1255
1256 /* Jump threading may have created forwarder blocks from blocks
1257 needing EH cleanup; the new successor of these blocks, which
3d3c2244 1258 has inherited from the original block, needs the cleanup.
1259 Don't clear bits in the bitmap, as that can break the bitmap
1260 iterator. */
5ec0139d 1261 EXECUTE_IF_SET_IN_BITMAP (need_eh_cleanup, 0, i, bi)
1262 {
65b0537f 1263 basic_block bb = BASIC_BLOCK_FOR_FN (fun, i);
3d3c2244 1264 if (bb == NULL)
1265 continue;
1266 while (single_succ_p (bb)
1267 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
1268 bb = single_succ (bb);
65b0537f 1269 if (bb == EXIT_BLOCK_PTR_FOR_FN (fun))
3d3c2244 1270 continue;
1271 if ((unsigned) bb->index != i)
1272 bitmap_set_bit (need_eh_cleanup, bb->index);
5ec0139d 1273 }
1274
75a70cf9 1275 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
53c5d9d4 1276 bitmap_clear (need_eh_cleanup);
62b180e1 1277 }
4ee9c684 1278
25959a39 1279 /* Fixup stmts that became noreturn calls. This may require splitting
1280 blocks and thus isn't possible during the dominator walk or before
1281 jump threading finished. Do this in reverse order so we don't
1282 inadvertedly remove a stmt we want to fixup by visiting a dominating
1283 now noreturn call first. */
1284 while (!need_noreturn_fixup.is_empty ())
1285 {
1286 gimple stmt = need_noreturn_fixup.pop ();
1287 if (dump_file && dump_flags & TDF_DETAILS)
1288 {
1289 fprintf (dump_file, "Fixing up noreturn call ");
1290 print_gimple_stmt (dump_file, stmt, 0, 0);
1291 fprintf (dump_file, "\n");
1292 }
1293 fixup_noreturn_call (stmt);
1294 }
1295
65b0537f 1296 statistics_counter_event (fun, "Redundant expressions eliminated",
581f8050 1297 opt_stats.num_re);
65b0537f 1298 statistics_counter_event (fun, "Constants propagated",
581f8050 1299 opt_stats.num_const_prop);
65b0537f 1300 statistics_counter_event (fun, "Copies propagated",
581f8050 1301 opt_stats.num_copy_prop);
1302
4ee9c684 1303 /* Debugging dumps. */
1304 if (dump_file && (dump_flags & TDF_STATS))
1305 dump_dominator_optimization_stats (dump_file);
1306
7e0311ae 1307 loop_optimizer_finalize ();
1308
62b180e1 1309 /* Delete our main hashtable. */
c1f445d2 1310 delete avail_exprs;
1311 avail_exprs = NULL;
4ee9c684 1312
8dbf774a 1313 /* Free asserted bitmaps and stacks. */
27335ffd 1314 BITMAP_FREE (need_eh_cleanup);
25959a39 1315 need_noreturn_fixup.release ();
f1f41a6c 1316 avail_exprs_stack.release ();
545372c5 1317 delete const_and_copies;
48e1416a 1318
f003f9fd 1319 /* Free the value-handle array. */
1320 threadedge_finalize_values ();
f003f9fd 1321
2a1990e9 1322 return 0;
4ee9c684 1323}
1324
cbe8bda8 1325} // anon namespace
1326
1327gimple_opt_pass *
1328make_pass_dominator (gcc::context *ctxt)
1329{
1330 return new pass_dominator (ctxt);
1331}
1332
4ee9c684 1333
75a70cf9 1334/* Given a conditional statement CONDSTMT, convert the
1335 condition to a canonical form. */
54aceb26 1336
1337static void
1a91d914 1338canonicalize_comparison (gcond *condstmt)
54aceb26 1339{
54aceb26 1340 tree op0;
1341 tree op1;
75a70cf9 1342 enum tree_code code;
54aceb26 1343
75a70cf9 1344 gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
54aceb26 1345
75a70cf9 1346 op0 = gimple_cond_lhs (condstmt);
1347 op1 = gimple_cond_rhs (condstmt);
1348
1349 code = gimple_cond_code (condstmt);
54aceb26 1350
1351 /* If it would be profitable to swap the operands, then do so to
1352 canonicalize the statement, enabling better optimization.
1353
1354 By placing canonicalization of such expressions here we
1355 transparently keep statements in canonical form, even
1356 when the statement is modified. */
1357 if (tree_swap_operands_p (op0, op1, false))
1358 {
1359 /* For relationals we need to swap the operands
1360 and change the code. */
1361 if (code == LT_EXPR
1362 || code == GT_EXPR
1363 || code == LE_EXPR
1364 || code == GE_EXPR)
1365 {
75a70cf9 1366 code = swap_tree_comparison (code);
1367
1368 gimple_cond_set_code (condstmt, code);
1369 gimple_cond_set_lhs (condstmt, op1);
1370 gimple_cond_set_rhs (condstmt, op0);
1371
1372 update_stmt (condstmt);
54aceb26 1373 }
1374 }
1375}
4ee9c684 1376
4ee9c684 1377/* Initialize local stacks for this optimizer and record equivalences
1378 upon entry to BB. Equivalences can come from the edge traversed to
1379 reach BB or they may come from PHI nodes at the start of BB. */
1380
4ee9c684 1381/* Remove all the expressions in LOCALS from TABLE, stopping when there are
1382 LIMIT entries left in LOCALs. */
1383
1384static void
9c629f0e 1385remove_local_expressions_from_table (void)
4ee9c684 1386{
4ee9c684 1387 /* Remove all the expressions made available in this block. */
f1f41a6c 1388 while (avail_exprs_stack.length () > 0)
4ee9c684 1389 {
387312e6 1390 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim
1391 = avail_exprs_stack.pop ();
d9dd21a8 1392 expr_hash_elt **slot;
9c629f0e 1393
387312e6 1394 if (victim.first == NULL)
9c629f0e 1395 break;
4ee9c684 1396
75a70cf9 1397 /* This must precede the actual removal from the hash table,
1398 as ELEMENT and the table entry may share a call argument
1399 vector which will be freed during removal. */
1400 if (dump_file && (dump_flags & TDF_DETAILS))
1401 {
1402 fprintf (dump_file, "<<<< ");
387312e6 1403 print_expr_hash_elt (dump_file, victim.first);
75a70cf9 1404 }
1405
387312e6 1406 slot = avail_exprs->find_slot (victim.first, NO_INSERT);
1407 gcc_assert (slot && *slot == victim.first);
1408 if (victim.second != NULL)
1409 {
1410 free_expr_hash_elt (*slot);
1411 *slot = victim.second;
1412 }
1413 else
1414 avail_exprs->clear_slot (slot);
4ee9c684 1415 }
1416}
1417
62b180e1 1418/* A trivial wrapper so that we can present the generic jump
1419 threading code with a simple API for simplifying statements. */
1420static tree
75a70cf9 1421simplify_stmt_for_jump_threading (gimple stmt,
1422 gimple within_stmt ATTRIBUTE_UNUSED)
62b180e1 1423{
1424 return lookup_avail_expr (stmt, false);
1425}
1426
9358e89c 1427/* Record into the equivalence tables any equivalences implied by
1428 traversing edge E (which are cached in E->aux).
1429
1430 Callers are responsible for managing the unwinding markers. */
629ec126 1431static void
1432record_temporary_equivalences (edge e)
1433{
1434 int i;
1435 struct edge_info *edge_info = (struct edge_info *) e->aux;
1436
1437 /* If we have info associated with this edge, record it into
1438 our equivalence tables. */
1439 if (edge_info)
1440 {
1441 cond_equivalence *eq;
1442 tree lhs = edge_info->lhs;
1443 tree rhs = edge_info->rhs;
1444
1445 /* If we have a simple NAME = VALUE equivalence, record it. */
1446 if (lhs && TREE_CODE (lhs) == SSA_NAME)
545372c5 1447 const_and_copies->record_const_or_copy (lhs, rhs);
629ec126 1448
1449 /* If we have 0 = COND or 1 = COND equivalences, record them
1450 into our expression hash tables. */
1451 for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
1452 record_cond (eq);
1453 }
1454}
1455
62b180e1 1456/* Wrapper for common code to attempt to thread an edge. For example,
1457 it handles lazily building the dummy condition and the bookkeeping
1458 when jump threading is successful. */
1459
54c91640 1460void
1461dom_opt_dom_walker::thread_across_edge (edge e)
62b180e1 1462{
ae84f584 1463 if (! m_dummy_cond)
1464 m_dummy_cond =
75a70cf9 1465 gimple_build_cond (NE_EXPR,
1466 integer_zero_node, integer_zero_node,
1467 NULL, NULL);
62b180e1 1468
629ec126 1469 /* Push a marker on both stacks so we can unwind the tables back to their
1470 current state. */
387312e6 1471 avail_exprs_stack.safe_push
1472 (std::pair<expr_hash_elt_t, expr_hash_elt_t> (NULL, NULL));
545372c5 1473 const_and_copies->push_marker ();
629ec126 1474
1475 /* Traversing E may result in equivalences we can utilize. */
1476 record_temporary_equivalences (e);
1477
1478 /* With all the edge equivalences in the tables, go ahead and attempt
1479 to thread through E->dest. */
ae84f584 1480 ::thread_across_edge (m_dummy_cond, e, false,
545372c5 1481 const_and_copies,
54c91640 1482 simplify_stmt_for_jump_threading);
629ec126 1483
1484 /* And restore the various tables to their state before
1485 we threaded this edge.
1486
1487 XXX The code in tree-ssa-threadedge.c will restore the state of
1488 the const_and_copies table. We we just have to restore the expression
1489 table. */
1490 remove_local_expressions_from_table ();
62b180e1 1491}
1492
4ee9c684 1493/* PHI nodes can create equivalences too.
1494
1495 Ignoring any alternatives which are the same as the result, if
1496 all the alternatives are equal, then the PHI node creates an
8dbf774a 1497 equivalence. */
6e9a4371 1498
4ee9c684 1499static void
2f0993e7 1500record_equivalences_from_phis (basic_block bb)
4ee9c684 1501{
1a91d914 1502 gphi_iterator gsi;
48e1416a 1503
75a70cf9 1504 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4ee9c684 1505 {
1a91d914 1506 gphi *phi = gsi.phi ();
75a70cf9 1507
1508 tree lhs = gimple_phi_result (phi);
4ee9c684 1509 tree rhs = NULL;
75a70cf9 1510 size_t i;
4ee9c684 1511
75a70cf9 1512 for (i = 0; i < gimple_phi_num_args (phi); i++)
4ee9c684 1513 {
75a70cf9 1514 tree t = gimple_phi_arg_def (phi, i);
4ee9c684 1515
2fb4af30 1516 /* Ignore alternatives which are the same as our LHS. Since
1517 LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
1518 can simply compare pointers. */
fcf57fc2 1519 if (lhs == t)
92527855 1520 continue;
1521
1522 /* If we have not processed an alternative yet, then set
1523 RHS to this alternative. */
1524 if (rhs == NULL)
1525 rhs = t;
1526 /* If we have processed an alternative (stored in RHS), then
1527 see if it is equal to this one. If it isn't, then stop
1528 the search. */
1529 else if (! operand_equal_for_phi_arg_p (rhs, t))
4ee9c684 1530 break;
1531 }
1532
1533 /* If we had no interesting alternatives, then all the RHS alternatives
1534 must have been the same as LHS. */
1535 if (!rhs)
1536 rhs = lhs;
1537
1538 /* If we managed to iterate through each PHI alternative without
1539 breaking out of the loop, then we have a PHI which may create
1540 a useful equivalence. We do not need to record unwind data for
1541 this, since this is a true assignment and not an equivalence
365db11e 1542 inferred from a comparison. All uses of this ssa name are dominated
4ee9c684 1543 by this assignment, so unwinding just costs time and space. */
27510cd8 1544 if (i == gimple_phi_num_args (phi)
3beff0e1 1545 && may_propagate_copy (lhs, rhs))
f003f9fd 1546 set_ssa_name_value (lhs, rhs);
4ee9c684 1547 }
1548}
1549
c0735efa 1550/* Ignoring loop backedges, if BB has precisely one incoming edge then
1551 return that edge. Otherwise return NULL. */
1552static edge
1553single_incoming_edge_ignoring_loop_edges (basic_block bb)
1554{
1555 edge retval = NULL;
1556 edge e;
cd665a06 1557 edge_iterator ei;
c0735efa 1558
cd665a06 1559 FOR_EACH_EDGE (e, ei, bb->preds)
c0735efa 1560 {
1561 /* A loop back edge can be identified by the destination of
1562 the edge dominating the source of the edge. */
1563 if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1564 continue;
1565
1566 /* If we have already seen a non-loop edge, then we must have
1567 multiple incoming non-loop edges and thus we return NULL. */
1568 if (retval)
1569 return NULL;
1570
1571 /* This is the first non-loop incoming edge we have found. Record
1572 it. */
1573 retval = e;
1574 }
1575
1576 return retval;
1577}
1578
4ee9c684 1579/* Record any equivalences created by the incoming edge to BB. If BB
1580 has more than one incoming edge, then no equivalence is created. */
1581
1582static void
2f0993e7 1583record_equivalences_from_incoming_edge (basic_block bb)
4ee9c684 1584{
2f0993e7 1585 edge e;
4ee9c684 1586 basic_block parent;
2f0993e7 1587 struct edge_info *edge_info;
4ee9c684 1588
0975351b 1589 /* If our parent block ended with a control statement, then we may be
4ee9c684 1590 able to record some equivalences based on which outgoing edge from
1591 the parent was followed. */
1592 parent = get_immediate_dominator (CDI_DOMINATORS, bb);
4ee9c684 1593
2f0993e7 1594 e = single_incoming_edge_ignoring_loop_edges (bb);
4ee9c684 1595
2f0993e7 1596 /* If we had a single incoming edge from our parent block, then enter
1597 any data associated with the edge into our tables. */
1598 if (e && e->src == parent)
4ee9c684 1599 {
2f0993e7 1600 unsigned int i;
4ee9c684 1601
945865c5 1602 edge_info = (struct edge_info *) e->aux;
4ee9c684 1603
2f0993e7 1604 if (edge_info)
4ee9c684 1605 {
2f0993e7 1606 tree lhs = edge_info->lhs;
1607 tree rhs = edge_info->rhs;
7aab1427 1608 cond_equivalence *eq;
2f0993e7 1609
1610 if (lhs)
1611 record_equality (lhs, rhs);
1612
5aa24950 1613 /* If LHS is an SSA_NAME and RHS is a constant integer and LHS was
1614 set via a widening type conversion, then we may be able to record
799b7423 1615 additional equivalences. */
1616 if (lhs
1617 && TREE_CODE (lhs) == SSA_NAME
5aa24950 1618 && is_gimple_constant (rhs)
1619 && TREE_CODE (rhs) == INTEGER_CST)
799b7423 1620 {
1621 gimple defstmt = SSA_NAME_DEF_STMT (lhs);
1622
1623 if (defstmt
1624 && is_gimple_assign (defstmt)
1625 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (defstmt)))
1626 {
1627 tree old_rhs = gimple_assign_rhs1 (defstmt);
5aa24950 1628
99db618c 1629 /* If the conversion widens the original value and
1630 the constant is in the range of the type of OLD_RHS,
b99a7d6d 1631 then convert the constant and record the equivalence.
99db618c 1632
1633 Note that int_fits_type_p does not check the precision
1634 if the upper and lower bounds are OK. */
96a1fa3a 1635 if (INTEGRAL_TYPE_P (TREE_TYPE (old_rhs))
99db618c 1636 && (TYPE_PRECISION (TREE_TYPE (lhs))
1637 > TYPE_PRECISION (TREE_TYPE (old_rhs)))
96a1fa3a 1638 && int_fits_type_p (rhs, TREE_TYPE (old_rhs)))
5aa24950 1639 {
1640 tree newval = fold_convert (TREE_TYPE (old_rhs), rhs);
1641 record_equality (old_rhs, newval);
1642 }
799b7423 1643 }
1644 }
1645
f1f41a6c 1646 for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
7aab1427 1647 record_cond (eq);
4ee9c684 1648 }
1649 }
4ee9c684 1650}
1651
1652/* Dump SSA statistics on FILE. */
1653
1654void
1655dump_dominator_optimization_stats (FILE *file)
1656{
4ee9c684 1657 fprintf (file, "Total number of statements: %6ld\n\n",
1658 opt_stats.num_stmts);
1659 fprintf (file, "Exprs considered for dominator optimizations: %6ld\n",
1660 opt_stats.num_exprs_considered);
1661
4ee9c684 1662 fprintf (file, "\nHash table statistics:\n");
1663
1664 fprintf (file, " avail_exprs: ");
c1f445d2 1665 htab_statistics (file, *avail_exprs);
4ee9c684 1666}
1667
1668
1669/* Dump SSA statistics on stderr. */
1670
4b987fac 1671DEBUG_FUNCTION void
4ee9c684 1672debug_dominator_optimization_stats (void)
1673{
1674 dump_dominator_optimization_stats (stderr);
1675}
1676
1677
970975d0 1678/* Dump statistics for the hash table HTAB. */
043d0665 1679
970975d0 1680static void
1681htab_statistics (FILE *file, const hash_table<expr_elt_hasher> &htab)
1682{
1683 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
1684 (long) htab.size (),
1685 (long) htab.elements (),
1686 htab.collisions ());
1687}
043d0665 1688
043d0665 1689
970975d0 1690/* Enter condition equivalence into the expression hash table.
1691 This indicates that a conditional expression has a known
1692 boolean value. */
2f0993e7 1693
970975d0 1694static void
1695record_cond (cond_equivalence *p)
1696{
1697 struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
1698 expr_hash_elt **slot;
75a70cf9 1699
970975d0 1700 initialize_hash_element_from_expr (&p->cond, p->value, element);
1701
1702 slot = avail_exprs->find_slot_with_hash (element, element->hash, INSERT);
1703 if (*slot == NULL)
1704 {
1705 *slot = element;
1706
1707 if (dump_file && (dump_flags & TDF_DETAILS))
1708 {
1709 fprintf (dump_file, "1>>> ");
1710 print_expr_hash_elt (dump_file, element);
1711 }
1712
1713 avail_exprs_stack.safe_push
1714 (std::pair<expr_hash_elt_t, expr_hash_elt_t> (element, NULL));
1715 }
1716 else
1717 free_expr_hash_elt (element);
043d0665 1718}
1719
2b5ad301 1720/* Return the loop depth of the basic block of the defining statement of X.
1721 This number should not be treated as absolutely correct because the loop
1722 information may not be completely up-to-date when dom runs. However, it
1723 will be relatively correct, and as more passes are taught to keep loop info
1724 up to date, the result will become more and more accurate. */
1725
1726static int
1727loop_depth_of_name (tree x)
1728{
1729 gimple defstmt;
1730 basic_block defbb;
1731
1732 /* If it's not an SSA_NAME, we have no clue where the definition is. */
1733 if (TREE_CODE (x) != SSA_NAME)
1734 return 0;
1735
1736 /* Otherwise return the loop depth of the defining statement's bb.
1737 Note that there may not actually be a bb for this statement, if the
1738 ssa_name is live on entry. */
1739 defstmt = SSA_NAME_DEF_STMT (x);
1740 defbb = gimple_bb (defstmt);
1741 if (!defbb)
1742 return 0;
1743
1744 return bb_loop_depth (defbb);
1745}
1746
4ee9c684 1747/* Similarly, but assume that X and Y are the two operands of an EQ_EXPR.
1748 This constrains the cases in which we may treat this as assignment. */
1749
1750static void
da43203c 1751record_equality (tree x, tree y)
4ee9c684 1752{
1753 tree prev_x = NULL, prev_y = NULL;
1754
1755 if (TREE_CODE (x) == SSA_NAME)
4c7a0518 1756 prev_x = SSA_NAME_VALUE (x);
4ee9c684 1757 if (TREE_CODE (y) == SSA_NAME)
4c7a0518 1758 prev_y = SSA_NAME_VALUE (y);
4ee9c684 1759
ba4c299c 1760 /* If one of the previous values is invariant, or invariant in more loops
1761 (by depth), then use that.
4ee9c684 1762 Otherwise it doesn't matter which value we choose, just so
1763 long as we canonicalize on one value. */
71d9af81 1764 if (is_gimple_min_invariant (y))
4ee9c684 1765 ;
2b5ad301 1766 else if (is_gimple_min_invariant (x)
1767 /* ??? When threading over backedges the following is important
1768 for correctness. See PR61757. */
1769 || (loop_depth_of_name (x) <= loop_depth_of_name (y)))
4ee9c684 1770 prev_x = x, x = y, y = prev_x, prev_x = prev_y;
71d9af81 1771 else if (prev_x && is_gimple_min_invariant (prev_x))
4ee9c684 1772 x = y, y = prev_x, prev_x = prev_y;
f6c33c78 1773 else if (prev_y)
4ee9c684 1774 y = prev_y;
1775
1776 /* After the swapping, we must have one SSA_NAME. */
1777 if (TREE_CODE (x) != SSA_NAME)
1778 return;
1779
1780 /* For IEEE, -0.0 == 0.0, so we don't necessarily know the sign of a
1781 variable compared against zero. If we're honoring signed zeros,
1782 then we cannot record this value unless we know that the value is
365db11e 1783 nonzero. */
fe994837 1784 if (HONOR_SIGNED_ZEROS (x)
4ee9c684 1785 && (TREE_CODE (y) != REAL_CST
1786 || REAL_VALUES_EQUAL (dconst0, TREE_REAL_CST (y))))
1787 return;
1788
545372c5 1789 const_and_copies->record_const_or_copy (x, y, prev_x);
4ee9c684 1790}
1791
119a0489 1792/* Returns true when STMT is a simple iv increment. It detects the
1793 following situation:
48e1416a 1794
119a0489 1795 i_1 = phi (..., i_2)
1796 i_2 = i_1 +/- ... */
1797
cd22a796 1798bool
75a70cf9 1799simple_iv_increment_p (gimple stmt)
119a0489 1800{
cd22a796 1801 enum tree_code code;
75a70cf9 1802 tree lhs, preinc;
1803 gimple phi;
1804 size_t i;
119a0489 1805
75a70cf9 1806 if (gimple_code (stmt) != GIMPLE_ASSIGN)
119a0489 1807 return false;
1808
75a70cf9 1809 lhs = gimple_assign_lhs (stmt);
119a0489 1810 if (TREE_CODE (lhs) != SSA_NAME)
1811 return false;
1812
cd22a796 1813 code = gimple_assign_rhs_code (stmt);
1814 if (code != PLUS_EXPR
1815 && code != MINUS_EXPR
1816 && code != POINTER_PLUS_EXPR)
119a0489 1817 return false;
1818
75a70cf9 1819 preinc = gimple_assign_rhs1 (stmt);
119a0489 1820 if (TREE_CODE (preinc) != SSA_NAME)
1821 return false;
1822
1823 phi = SSA_NAME_DEF_STMT (preinc);
75a70cf9 1824 if (gimple_code (phi) != GIMPLE_PHI)
119a0489 1825 return false;
1826
75a70cf9 1827 for (i = 0; i < gimple_phi_num_args (phi); i++)
1828 if (gimple_phi_arg_def (phi, i) == lhs)
119a0489 1829 return true;
1830
1831 return false;
1832}
1833
591c2a30 1834/* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
48e1416a 1835 known value for that SSA_NAME (or NULL if no value is known).
591c2a30 1836
8dbf774a 1837 Propagate values from CONST_AND_COPIES into the PHI nodes of the
1838 successors of BB. */
591c2a30 1839
1840static void
8dbf774a 1841cprop_into_successor_phis (basic_block bb)
591c2a30 1842{
1843 edge e;
cd665a06 1844 edge_iterator ei;
591c2a30 1845
cd665a06 1846 FOR_EACH_EDGE (e, ei, bb->succs)
591c2a30 1847 {
5f50f9bf 1848 int indx;
1a91d914 1849 gphi_iterator gsi;
591c2a30 1850
1851 /* If this is an abnormal edge, then we do not want to copy propagate
1852 into the PHI alternative associated with this edge. */
1853 if (e->flags & EDGE_ABNORMAL)
1854 continue;
1855
75a70cf9 1856 gsi = gsi_start_phis (e->dest);
1857 if (gsi_end_p (gsi))
591c2a30 1858 continue;
1859
b99a7d6d 1860 /* We may have an equivalence associated with this edge. While
1861 we can not propagate it into non-dominated blocks, we can
1862 propagate them into PHIs in non-dominated blocks. */
1863
1864 /* Push the unwind marker so we can reset the const and copies
1865 table back to its original state after processing this edge. */
545372c5 1866 const_and_copies->push_marker ();
b99a7d6d 1867
1868 /* Extract and record any simple NAME = VALUE equivalences.
1869
1870 Don't bother with [01] = COND equivalences, they're not useful
1871 here. */
1872 struct edge_info *edge_info = (struct edge_info *) e->aux;
1873 if (edge_info)
1874 {
1875 tree lhs = edge_info->lhs;
1876 tree rhs = edge_info->rhs;
1877
1878 if (lhs && TREE_CODE (lhs) == SSA_NAME)
545372c5 1879 const_and_copies->record_const_or_copy (lhs, rhs);
b99a7d6d 1880 }
1881
5f50f9bf 1882 indx = e->dest_idx;
75a70cf9 1883 for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
591c2a30 1884 {
f0d6e81c 1885 tree new_val;
591c2a30 1886 use_operand_p orig_p;
f0d6e81c 1887 tree orig_val;
1a91d914 1888 gphi *phi = gsi.phi ();
591c2a30 1889
591c2a30 1890 /* The alternative may be associated with a constant, so verify
1891 it is an SSA_NAME before doing anything with it. */
75a70cf9 1892 orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
1893 orig_val = get_use_from_ptr (orig_p);
f0d6e81c 1894 if (TREE_CODE (orig_val) != SSA_NAME)
591c2a30 1895 continue;
1896
591c2a30 1897 /* If we have *ORIG_P in our constant/copy table, then replace
1898 ORIG_P with its value in our constant/copy table. */
f0d6e81c 1899 new_val = SSA_NAME_VALUE (orig_val);
1900 if (new_val
1901 && new_val != orig_val
1902 && (TREE_CODE (new_val) == SSA_NAME
1903 || is_gimple_min_invariant (new_val))
1904 && may_propagate_copy (orig_val, new_val))
1905 propagate_value (orig_p, new_val);
591c2a30 1906 }
b99a7d6d 1907
545372c5 1908 const_and_copies->pop_to_marker ();
591c2a30 1909 }
1910}
1911
54c91640 1912void
1913dom_opt_dom_walker::before_dom_children (basic_block bb)
4ee9c684 1914{
6bf320fb 1915 gimple_stmt_iterator gsi;
1916
1917 if (dump_file && (dump_flags & TDF_DETAILS))
1918 fprintf (dump_file, "\n\nOptimizing block #%d\n\n", bb->index);
1919
1920 /* Push a marker on the stacks of local information so that we know how
1921 far to unwind when we finalize this block. */
387312e6 1922 avail_exprs_stack.safe_push
1923 (std::pair<expr_hash_elt_t, expr_hash_elt_t> (NULL, NULL));
545372c5 1924 const_and_copies->push_marker ();
6bf320fb 1925
1926 record_equivalences_from_incoming_edge (bb);
1927
1928 /* PHI nodes can create equivalences too. */
1929 record_equivalences_from_phis (bb);
1930
889ef038 1931 /* Create equivalences from redundant PHIs. PHIs are only truly
1932 redundant when they exist in the same block, so push another
1933 marker and unwind right afterwards. */
387312e6 1934 avail_exprs_stack.safe_push
1935 (std::pair<expr_hash_elt_t, expr_hash_elt_t> (NULL, NULL));
889ef038 1936 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1937 eliminate_redundant_computations (&gsi);
1938 remove_local_expressions_from_table ();
1939
6bf320fb 1940 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1941 optimize_stmt (bb, gsi);
1942
1943 /* Now prepare to process dominated blocks. */
2f0993e7 1944 record_edge_info (bb);
8dbf774a 1945 cprop_into_successor_phis (bb);
4ee9c684 1946}
1947
6bf320fb 1948/* We have finished processing the dominator children of BB, perform
1949 any finalization actions in preparation for leaving this node in
1950 the dominator tree. */
1951
54c91640 1952void
1953dom_opt_dom_walker::after_dom_children (basic_block bb)
6bf320fb 1954{
1955 gimple last;
1956
1957 /* If we have an outgoing edge to a block with multiple incoming and
1958 outgoing edges, then we may be able to thread the edge, i.e., we
1959 may be able to statically determine which of the outgoing edges
1960 will be traversed when the incoming edge from BB is traversed. */
1961 if (single_succ_p (bb)
1962 && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0
1963 && potentially_threadable_block (single_succ (bb)))
1964 {
54c91640 1965 thread_across_edge (single_succ_edge (bb));
6bf320fb 1966 }
1967 else if ((last = last_stmt (bb))
1968 && gimple_code (last) == GIMPLE_COND
1969 && EDGE_COUNT (bb->succs) == 2
1970 && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
1971 && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
1972 {
1973 edge true_edge, false_edge;
1974
1975 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1976
1977 /* Only try to thread the edge if it reaches a target block with
1978 more than one predecessor and more than one successor. */
1979 if (potentially_threadable_block (true_edge->dest))
629ec126 1980 thread_across_edge (true_edge);
6bf320fb 1981
1982 /* Similarly for the ELSE arm. */
1983 if (potentially_threadable_block (false_edge->dest))
629ec126 1984 thread_across_edge (false_edge);
6bf320fb 1985
6bf320fb 1986 }
1987
629ec126 1988 /* These remove expressions local to BB from the tables. */
6bf320fb 1989 remove_local_expressions_from_table ();
545372c5 1990 const_and_copies->pop_to_marker ();
6bf320fb 1991}
1992
4ee9c684 1993/* Search for redundant computations in STMT. If any are found, then
1994 replace them with the variable holding the result of the computation.
1995
1996 If safe, record this expression into the available expression hash
1997 table. */
1998
e08ff65a 1999static void
75a70cf9 2000eliminate_redundant_computations (gimple_stmt_iterator* gsi)
4ee9c684 2001{
75a70cf9 2002 tree expr_type;
4ee9c684 2003 tree cached_lhs;
889ef038 2004 tree def;
75a70cf9 2005 bool insert = true;
75a70cf9 2006 bool assigns_var_p = false;
4ee9c684 2007
75a70cf9 2008 gimple stmt = gsi_stmt (*gsi);
2009
889ef038 2010 if (gimple_code (stmt) == GIMPLE_PHI)
2011 def = gimple_phi_result (stmt);
2012 else
2013 def = gimple_get_lhs (stmt);
4ee9c684 2014
2015 /* Certain expressions on the RHS can be optimized away, but can not
dac49aa5 2016 themselves be entered into the hash tables. */
9d637cc5 2017 if (! def
4ee9c684 2018 || TREE_CODE (def) != SSA_NAME
2019 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
dd277d48 2020 || gimple_vdef (stmt)
119a0489 2021 /* Do not record equivalences for increments of ivs. This would create
2022 overlapping live ranges for a very questionable gain. */
2023 || simple_iv_increment_p (stmt))
4ee9c684 2024 insert = false;
2025
2026 /* Check if the expression has been computed before. */
9c629f0e 2027 cached_lhs = lookup_avail_expr (stmt, insert);
4ee9c684 2028
4ee9c684 2029 opt_stats.num_exprs_considered++;
2030
75a70cf9 2031 /* Get the type of the expression we are trying to optimize. */
2032 if (is_gimple_assign (stmt))
f6be5aa5 2033 {
75a70cf9 2034 expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
2035 assigns_var_p = true;
f6be5aa5 2036 }
75a70cf9 2037 else if (gimple_code (stmt) == GIMPLE_COND)
2038 expr_type = boolean_type_node;
2039 else if (is_gimple_call (stmt))
f6be5aa5 2040 {
75a70cf9 2041 gcc_assert (gimple_call_lhs (stmt));
2042 expr_type = TREE_TYPE (gimple_call_lhs (stmt));
2043 assigns_var_p = true;
f6be5aa5 2044 }
1a91d914 2045 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
2046 expr_type = TREE_TYPE (gimple_switch_index (swtch_stmt));
889ef038 2047 else if (gimple_code (stmt) == GIMPLE_PHI)
2048 /* We can't propagate into a phi, so the logic below doesn't apply.
2049 Instead record an equivalence between the cached LHS and the
2050 PHI result of this statement, provided they are in the same block.
2051 This should be sufficient to kill the redundant phi. */
2052 {
2053 if (def && cached_lhs)
545372c5 2054 const_and_copies->record_const_or_copy (def, cached_lhs);
889ef038 2055 return;
2056 }
75a70cf9 2057 else
2058 gcc_unreachable ();
2059
2060 if (!cached_lhs)
e08ff65a 2061 return;
4ee9c684 2062
2063 /* It is safe to ignore types here since we have already done
2064 type checking in the hashing and equality routines. In fact
2065 type checking here merely gets in the way of constant
2066 propagation. Also, make sure that it is safe to propagate
75a70cf9 2067 CACHED_LHS into the expression in STMT. */
2068 if ((TREE_CODE (cached_lhs) != SSA_NAME
2069 && (assigns_var_p
2070 || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
2071 || may_propagate_copy_into_stmt (stmt, cached_lhs))
2072 {
1b4345f7 2073 gcc_checking_assert (TREE_CODE (cached_lhs) == SSA_NAME
2074 || is_gimple_min_invariant (cached_lhs));
75a70cf9 2075
4ee9c684 2076 if (dump_file && (dump_flags & TDF_DETAILS))
2077 {
2078 fprintf (dump_file, " Replaced redundant expr '");
75a70cf9 2079 print_gimple_expr (dump_file, stmt, 0, dump_flags);
4ee9c684 2080 fprintf (dump_file, "' with '");
2081 print_generic_expr (dump_file, cached_lhs, dump_flags);
75a70cf9 2082 fprintf (dump_file, "'\n");
4ee9c684 2083 }
2084
2085 opt_stats.num_re++;
48e1416a 2086
75a70cf9 2087 if (assigns_var_p
2088 && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
2089 cached_lhs = fold_convert (expr_type, cached_lhs);
4ee9c684 2090
75a70cf9 2091 propagate_tree_value_into_stmt (gsi, cached_lhs);
2092
2093 /* Since it is always necessary to mark the result as modified,
2094 perhaps we should move this into propagate_tree_value_into_stmt
2095 itself. */
2096 gimple_set_modified (gsi_stmt (*gsi), true);
2097 }
4ee9c684 2098}
2099
75a70cf9 2100/* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
4ee9c684 2101 the available expressions table or the const_and_copies table.
2102 Detect and record those equivalences. */
75a70cf9 2103/* We handle only very simple copy equivalences here. The heavy
2104 lifing is done by eliminate_redundant_computations. */
4ee9c684 2105
2106static void
75a70cf9 2107record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
4ee9c684 2108{
75a70cf9 2109 tree lhs;
2110 enum tree_code lhs_code;
4ee9c684 2111
75a70cf9 2112 gcc_assert (is_gimple_assign (stmt));
2113
2114 lhs = gimple_assign_lhs (stmt);
2115 lhs_code = TREE_CODE (lhs);
4ee9c684 2116
75a70cf9 2117 if (lhs_code == SSA_NAME
912886f2 2118 && gimple_assign_single_p (stmt))
75a70cf9 2119 {
2120 tree rhs = gimple_assign_rhs1 (stmt);
48e1416a 2121
4ee9c684 2122 /* If the RHS of the assignment is a constant or another variable that
2123 may be propagated, register it in the CONST_AND_COPIES table. We
2124 do not need to record unwind data for this, since this is a true
365db11e 2125 assignment and not an equivalence inferred from a comparison. All
4ee9c684 2126 uses of this ssa name are dominated by this assignment, so unwinding
2127 just costs time and space. */
2128 if (may_optimize_p
2129 && (TREE_CODE (rhs) == SSA_NAME
2130 || is_gimple_min_invariant (rhs)))
75a70cf9 2131 {
2132 if (dump_file && (dump_flags & TDF_DETAILS))
2133 {
2134 fprintf (dump_file, "==== ASGN ");
2135 print_generic_expr (dump_file, lhs, 0);
2136 fprintf (dump_file, " = ");
2137 print_generic_expr (dump_file, rhs, 0);
2138 fprintf (dump_file, "\n");
2139 }
2140
f003f9fd 2141 set_ssa_name_value (lhs, rhs);
75a70cf9 2142 }
4ee9c684 2143 }
2144
387312e6 2145 /* Make sure we can propagate &x + CST. */
2146 if (lhs_code == SSA_NAME
2147 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2148 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR
2149 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
2150 {
2151 tree op0 = gimple_assign_rhs1 (stmt);
2152 tree op1 = gimple_assign_rhs2 (stmt);
2153 tree new_rhs
2154 = build_fold_addr_expr (fold_build2 (MEM_REF,
2155 TREE_TYPE (TREE_TYPE (op0)),
2156 unshare_expr (op0),
2157 fold_convert (ptr_type_node,
2158 op1)));
2159 if (dump_file && (dump_flags & TDF_DETAILS))
2160 {
2161 fprintf (dump_file, "==== ASGN ");
2162 print_generic_expr (dump_file, lhs, 0);
2163 fprintf (dump_file, " = ");
2164 print_generic_expr (dump_file, new_rhs, 0);
2165 fprintf (dump_file, "\n");
2166 }
2167
2168 set_ssa_name_value (lhs, new_rhs);
2169 }
2170
4ee9c684 2171 /* A memory store, even an aliased store, creates a useful
2172 equivalence. By exchanging the LHS and RHS, creating suitable
2173 vops and recording the result in the available expression table,
2174 we may be able to expose more redundant loads. */
75a70cf9 2175 if (!gimple_has_volatile_ops (stmt)
2176 && gimple_references_memory_p (stmt)
2177 && gimple_assign_single_p (stmt)
2178 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2179 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
4ee9c684 2180 && !is_gimple_reg (lhs))
2181 {
75a70cf9 2182 tree rhs = gimple_assign_rhs1 (stmt);
1a91d914 2183 gassign *new_stmt;
4ee9c684 2184
e607210a 2185 /* Build a new statement with the RHS and LHS exchanged. */
75a70cf9 2186 if (TREE_CODE (rhs) == SSA_NAME)
2187 {
2188 /* NOTE tuples. The call to gimple_build_assign below replaced
2189 a call to build_gimple_modify_stmt, which did not set the
2190 SSA_NAME_DEF_STMT on the LHS of the assignment. Doing so
2191 may cause an SSA validation failure, as the LHS may be a
2192 default-initialized name and should have no definition. I'm
2193 a bit dubious of this, as the artificial statement that we
2194 generate here may in fact be ill-formed, but it is simply
2195 used as an internal device in this pass, and never becomes
2196 part of the CFG. */
2197 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2198 new_stmt = gimple_build_assign (rhs, lhs);
2199 SSA_NAME_DEF_STMT (rhs) = defstmt;
2200 }
2201 else
2202 new_stmt = gimple_build_assign (rhs, lhs);
2203
dd277d48 2204 gimple_set_vuse (new_stmt, gimple_vdef (stmt));
4ee9c684 2205
e607210a 2206 /* Finally enter the statement into the available expression
2207 table. */
2208 lookup_avail_expr (new_stmt, true);
4ee9c684 2209 }
2210}
2211
591c2a30 2212/* Replace *OP_P in STMT with any known equivalent value for *OP_P from
2213 CONST_AND_COPIES. */
2214
e08ff65a 2215static void
75a70cf9 2216cprop_operand (gimple stmt, use_operand_p op_p)
591c2a30 2217{
591c2a30 2218 tree val;
2219 tree op = USE_FROM_PTR (op_p);
2220
2221 /* If the operand has a known constant value or it is known to be a
2222 copy of some other variable, use the value or copy stored in
2223 CONST_AND_COPIES. */
4c7a0518 2224 val = SSA_NAME_VALUE (op);
f6c33c78 2225 if (val && val != op)
591c2a30 2226 {
93b4f514 2227 /* Do not replace hard register operands in asm statements. */
75a70cf9 2228 if (gimple_code (stmt) == GIMPLE_ASM
93b4f514 2229 && !may_propagate_copy_into_asm (op))
e08ff65a 2230 return;
93b4f514 2231
591c2a30 2232 /* Certain operands are not allowed to be copy propagated due
2233 to their interaction with exception handling and some GCC
2234 extensions. */
93f3673b 2235 if (!may_propagate_copy (op, val))
e08ff65a 2236 return;
93f3673b 2237
0dbca901 2238 /* Do not propagate copies into BIVs.
2239 See PR23821 and PR62217 for how this can disturb IV and
2240 number of iteration analysis. */
2241 if (TREE_CODE (val) != INTEGER_CST)
2242 {
2243 gimple def = SSA_NAME_DEF_STMT (op);
2244 if (gimple_code (def) == GIMPLE_PHI
2245 && gimple_bb (def)->loop_father->header == gimple_bb (def))
2246 return;
2247 }
09d0041c 2248
591c2a30 2249 /* Dump details. */
2250 if (dump_file && (dump_flags & TDF_DETAILS))
2251 {
2252 fprintf (dump_file, " Replaced '");
2253 print_generic_expr (dump_file, op, dump_flags);
2254 fprintf (dump_file, "' with %s '",
2255 (TREE_CODE (val) != SSA_NAME ? "constant" : "variable"));
2256 print_generic_expr (dump_file, val, dump_flags);
2257 fprintf (dump_file, "'\n");
2258 }
2259
88dbf20f 2260 if (TREE_CODE (val) != SSA_NAME)
2261 opt_stats.num_const_prop++;
2262 else
2263 opt_stats.num_copy_prop++;
2264
591c2a30 2265 propagate_value (op_p, val);
2266
2267 /* And note that we modified this statement. This is now
2268 safe, even if we changed virtual operands since we will
2269 rescan the statement and rewrite its operands again. */
75a70cf9 2270 gimple_set_modified (stmt, true);
591c2a30 2271 }
591c2a30 2272}
2273
2274/* CONST_AND_COPIES is a table which maps an SSA_NAME to the current
48e1416a 2275 known value for that SSA_NAME (or NULL if no value is known).
591c2a30 2276
2277 Propagate values from CONST_AND_COPIES into the uses, vuses and
de6ed584 2278 vdef_ops of STMT. */
591c2a30 2279
e08ff65a 2280static void
75a70cf9 2281cprop_into_stmt (gimple stmt)
591c2a30 2282{
43daa21e 2283 use_operand_p op_p;
2284 ssa_op_iter iter;
591c2a30 2285
8bf038d5 2286 FOR_EACH_SSA_USE_OPERAND (op_p, stmt, iter, SSA_OP_USE)
2287 cprop_operand (stmt, op_p);
591c2a30 2288}
2289
5206b159 2290/* Optimize the statement pointed to by iterator SI.
48e1416a 2291
4ee9c684 2292 We try to perform some simplistic global redundancy elimination and
2293 constant propagation:
2294
2295 1- To detect global redundancy, we keep track of expressions that have
2296 been computed in this block and its dominators. If we find that the
2297 same expression is computed more than once, we eliminate repeated
2298 computations by using the target of the first one.
2299
2300 2- Constant values and copy assignments. This is used to do very
2301 simplistic constant and copy propagation. When a constant or copy
2302 assignment is found, we map the value on the RHS of the assignment to
2303 the variable in the LHS in the CONST_AND_COPIES table. */
2304
2305static void
6bf320fb 2306optimize_stmt (basic_block bb, gimple_stmt_iterator si)
4ee9c684 2307{
75a70cf9 2308 gimple stmt, old_stmt;
4ee9c684 2309 bool may_optimize_p;
c8c258fe 2310 bool modified_p = false;
25959a39 2311 bool was_noreturn;
4ee9c684 2312
75a70cf9 2313 old_stmt = stmt = gsi_stmt (si);
25959a39 2314 was_noreturn = is_gimple_call (stmt) && gimple_call_noreturn_p (stmt);
48e1416a 2315
4ee9c684 2316 if (dump_file && (dump_flags & TDF_DETAILS))
2317 {
2318 fprintf (dump_file, "Optimizing statement ");
75a70cf9 2319 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4ee9c684 2320 }
2321
8bf038d5 2322 if (gimple_code (stmt) == GIMPLE_COND)
1a91d914 2323 canonicalize_comparison (as_a <gcond *> (stmt));
8bf038d5 2324
2325 update_stmt_if_modified (stmt);
2326 opt_stats.num_stmts++;
2327
de6ed584 2328 /* Const/copy propagate into USES, VUSES and the RHS of VDEFs. */
e08ff65a 2329 cprop_into_stmt (stmt);
4ee9c684 2330
2331 /* If the statement has been modified with constant replacements,
2332 fold its RHS before checking for redundant computations. */
75a70cf9 2333 if (gimple_modified_p (stmt))
4ee9c684 2334 {
75a70cf9 2335 tree rhs = NULL;
f2fae51f 2336
4ee9c684 2337 /* Try to fold the statement making sure that STMT is kept
2338 up to date. */
75a70cf9 2339 if (fold_stmt (&si))
4ee9c684 2340 {
75a70cf9 2341 stmt = gsi_stmt (si);
7548e9fe 2342 gimple_set_modified (stmt, true);
4ee9c684 2343
2344 if (dump_file && (dump_flags & TDF_DETAILS))
2345 {
2346 fprintf (dump_file, " Folded to: ");
75a70cf9 2347 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4ee9c684 2348 }
2349 }
2350
75a70cf9 2351 /* We only need to consider cases that can yield a gimple operand. */
2352 if (gimple_assign_single_p (stmt))
2353 rhs = gimple_assign_rhs1 (stmt);
2354 else if (gimple_code (stmt) == GIMPLE_GOTO)
2355 rhs = gimple_goto_dest (stmt);
1a91d914 2356 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
75a70cf9 2357 /* This should never be an ADDR_EXPR. */
1a91d914 2358 rhs = gimple_switch_index (swtch_stmt);
75a70cf9 2359
f2fae51f 2360 if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
75a70cf9 2361 recompute_tree_invariant_for_addr_expr (rhs);
f2fae51f 2362
c8c258fe 2363 /* Indicate that maybe_clean_or_replace_eh_stmt needs to be called,
2364 even if fold_stmt updated the stmt already and thus cleared
2365 gimple_modified_p flag on it. */
2366 modified_p = true;
4ee9c684 2367 }
2368
2369 /* Check for redundant computations. Do this optimization only
2370 for assignments that have no volatile ops and conditionals. */
f1960495 2371 may_optimize_p = (!gimple_has_side_effects (stmt)
2372 && (is_gimple_assign (stmt)
75a70cf9 2373 || (is_gimple_call (stmt)
f1960495 2374 && gimple_call_lhs (stmt) != NULL_TREE)
75a70cf9 2375 || gimple_code (stmt) == GIMPLE_COND
2376 || gimple_code (stmt) == GIMPLE_SWITCH));
4ee9c684 2377
2378 if (may_optimize_p)
75a70cf9 2379 {
a65c4d64 2380 if (gimple_code (stmt) == GIMPLE_CALL)
2381 {
2382 /* Resolve __builtin_constant_p. If it hasn't been
2383 folded to integer_one_node by now, it's fairly
2384 certain that the value simply isn't constant. */
2385 tree callee = gimple_call_fndecl (stmt);
2386 if (callee
2387 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2388 && DECL_FUNCTION_CODE (callee) == BUILT_IN_CONSTANT_P)
2389 {
2390 propagate_tree_value_into_stmt (&si, integer_zero_node);
2391 stmt = gsi_stmt (si);
2392 }
2393 }
04c3b49f 2394
2395 update_stmt_if_modified (stmt);
2396 eliminate_redundant_computations (&si);
2397 stmt = gsi_stmt (si);
31047158 2398
2399 /* Perform simple redundant store elimination. */
2400 if (gimple_assign_single_p (stmt)
2401 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
2402 {
2403 tree lhs = gimple_assign_lhs (stmt);
2404 tree rhs = gimple_assign_rhs1 (stmt);
2405 tree cached_lhs;
1a91d914 2406 gassign *new_stmt;
31047158 2407 if (TREE_CODE (rhs) == SSA_NAME)
2408 {
2409 tree tem = SSA_NAME_VALUE (rhs);
2410 if (tem)
2411 rhs = tem;
2412 }
2413 /* Build a new statement with the RHS and LHS exchanged. */
2414 if (TREE_CODE (rhs) == SSA_NAME)
2415 {
2416 gimple defstmt = SSA_NAME_DEF_STMT (rhs);
2417 new_stmt = gimple_build_assign (rhs, lhs);
2418 SSA_NAME_DEF_STMT (rhs) = defstmt;
2419 }
2420 else
2421 new_stmt = gimple_build_assign (rhs, lhs);
2422 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2423 cached_lhs = lookup_avail_expr (new_stmt, false);
2424 if (cached_lhs
2425 && rhs == cached_lhs)
2426 {
2427 basic_block bb = gimple_bb (stmt);
31047158 2428 unlink_stmt_vdef (stmt);
13ff78a4 2429 if (gsi_remove (&si, true))
31047158 2430 {
2431 bitmap_set_bit (need_eh_cleanup, bb->index);
2432 if (dump_file && (dump_flags & TDF_DETAILS))
2433 fprintf (dump_file, " Flagged to clear EH edges.\n");
2434 }
bc8a8451 2435 release_defs (stmt);
31047158 2436 return;
2437 }
2438 }
75a70cf9 2439 }
4ee9c684 2440
2441 /* Record any additional equivalences created by this statement. */
75a70cf9 2442 if (is_gimple_assign (stmt))
2443 record_equivalences_from_stmt (stmt, may_optimize_p);
4ee9c684 2444
4ee9c684 2445 /* If STMT is a COND_EXPR and it was modified, then we may know
2446 where it goes. If that is the case, then mark the CFG as altered.
2447
2448 This will cause us to later call remove_unreachable_blocks and
48e1416a 2449 cleanup_tree_cfg when it is safe to do so. It is not safe to
4ee9c684 2450 clean things up here since removal of edges and such can trigger
2451 the removal of PHI nodes, which in turn can release SSA_NAMEs to
2452 the manager.
2453
2454 That's all fine and good, except that once SSA_NAMEs are released
2455 to the manager, we must not call create_ssa_name until all references
2456 to released SSA_NAMEs have been eliminated.
2457
2458 All references to the deleted SSA_NAMEs can not be eliminated until
2459 we remove unreachable blocks.
2460
2461 We can not remove unreachable blocks until after we have completed
2462 any queued jump threading.
2463
2464 We can not complete any queued jump threads until we have taken
2465 appropriate variables out of SSA form. Taking variables out of
2466 SSA form can call create_ssa_name and thus we lose.
2467
2468 Ultimately I suspect we're going to need to change the interface
2469 into the SSA_NAME manager. */
c8c258fe 2470 if (gimple_modified_p (stmt) || modified_p)
4ee9c684 2471 {
2472 tree val = NULL;
48e1416a 2473
04c3b49f 2474 update_stmt_if_modified (stmt);
4ee9c684 2475
75a70cf9 2476 if (gimple_code (stmt) == GIMPLE_COND)
389dd41b 2477 val = fold_binary_loc (gimple_location (stmt),
2478 gimple_cond_code (stmt), boolean_type_node,
75a70cf9 2479 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
1a91d914 2480 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
2481 val = gimple_switch_index (swtch_stmt);
4ee9c684 2482
35c15734 2483 if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
4ee9c684 2484 cfg_altered = true;
35c15734 2485
2486 /* If we simplified a statement in such a way as to be shown that it
2487 cannot trap, update the eh information and the cfg to match. */
4c27dd45 2488 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
35c15734 2489 {
2490 bitmap_set_bit (need_eh_cleanup, bb->index);
2491 if (dump_file && (dump_flags & TDF_DETAILS))
2492 fprintf (dump_file, " Flagged to clear EH edges.\n");
2493 }
25959a39 2494
2495 if (!was_noreturn
2496 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2497 need_noreturn_fixup.safe_push (stmt);
4ee9c684 2498 }
4ee9c684 2499}
2500
387312e6 2501/* Helper for walk_non_aliased_vuses. Determine if we arrived at
2502 the desired memory state. */
2503
2504static void *
2505vuse_eq (ao_ref *, tree vuse1, unsigned int cnt, void *data)
2506{
2507 tree vuse2 = (tree) data;
2508 if (vuse1 == vuse2)
2509 return data;
2510
2511 /* This bounds the stmt walks we perform on reference lookups
2512 to O(1) instead of O(N) where N is the number of dominating
2513 stores leading to a candidate. We re-use the SCCVN param
2514 for this as it is basically the same complexity. */
2515 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
2516 return (void *)-1;
2517
2518 return NULL;
2519}
2520
75a70cf9 2521/* Search for an existing instance of STMT in the AVAIL_EXPRS table.
2522 If found, return its LHS. Otherwise insert STMT in the table and
2523 return NULL_TREE.
4ee9c684 2524
75a70cf9 2525 Also, when an expression is first inserted in the table, it is also
2526 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
2527 we finish processing this block and its children. */
4ee9c684 2528
2529static tree
75a70cf9 2530lookup_avail_expr (gimple stmt, bool insert)
4ee9c684 2531{
d9dd21a8 2532 expr_hash_elt **slot;
4ee9c684 2533 tree lhs;
2534 tree temp;
88006128 2535 struct expr_hash_elt element;
4ee9c684 2536
889ef038 2537 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
2538 if (gimple_code (stmt) == GIMPLE_PHI)
2539 lhs = gimple_phi_result (stmt);
2540 else
2541 lhs = gimple_get_lhs (stmt);
4ee9c684 2542
88006128 2543 initialize_hash_element (stmt, lhs, &element);
4ee9c684 2544
75a70cf9 2545 if (dump_file && (dump_flags & TDF_DETAILS))
2546 {
2547 fprintf (dump_file, "LKUP ");
88006128 2548 print_expr_hash_elt (dump_file, &element);
75a70cf9 2549 }
2550
4ee9c684 2551 /* Don't bother remembering constant assignments and copy operations.
2552 Constants and copy operations are handled by the constant/copy propagator
2553 in optimize_stmt. */
88006128 2554 if (element.expr.kind == EXPR_SINGLE
2555 && (TREE_CODE (element.expr.ops.single.rhs) == SSA_NAME
2556 || is_gimple_min_invariant (element.expr.ops.single.rhs)))
2557 return NULL_TREE;
4ee9c684 2558
4ee9c684 2559 /* Finally try to find the expression in the main expression hash table. */
c1f445d2 2560 slot = avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
4ee9c684 2561 if (slot == NULL)
78d53e33 2562 {
2563 free_expr_hash_elt_contents (&element);
2564 return NULL_TREE;
2565 }
2566 else if (*slot == NULL)
4ee9c684 2567 {
88006128 2568 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2569 *element2 = element;
2570 element2->stamp = element2;
d9dd21a8 2571 *slot = element2;
75a70cf9 2572
2573 if (dump_file && (dump_flags & TDF_DETAILS))
2574 {
2575 fprintf (dump_file, "2>>> ");
88006128 2576 print_expr_hash_elt (dump_file, element2);
75a70cf9 2577 }
2578
387312e6 2579 avail_exprs_stack.safe_push
2580 (std::pair<expr_hash_elt_t, expr_hash_elt_t> (element2, NULL));
4ee9c684 2581 return NULL_TREE;
2582 }
387312e6 2583
2584 /* If we found a redundant memory operation do an alias walk to
2585 check if we can re-use it. */
2586 if (gimple_vuse (stmt) != (*slot)->vop)
2587 {
2588 tree vuse1 = (*slot)->vop;
2589 tree vuse2 = gimple_vuse (stmt);
2590 /* If we have a load of a register and a candidate in the
2591 hash with vuse1 then try to reach its stmt by walking
2592 up the virtual use-def chain using walk_non_aliased_vuses.
2593 But don't do this when removing expressions from the hash. */
2594 ao_ref ref;
2595 if (!(vuse1 && vuse2
2596 && gimple_assign_single_p (stmt)
2597 && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
2598 && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)), true)
2599 && walk_non_aliased_vuses (&ref, vuse2,
46816709 2600 vuse_eq, NULL, NULL, vuse1) != NULL))
387312e6 2601 {
18526d26 2602 if (insert)
387312e6 2603 {
18526d26 2604 struct expr_hash_elt *element2 = XNEW (struct expr_hash_elt);
2605 *element2 = element;
2606 element2->stamp = element2;
2607
2608 /* Insert the expr into the hash by replacing the current
2609 entry and recording the value to restore in the
2610 avail_exprs_stack. */
2611 avail_exprs_stack.safe_push (std::make_pair (element2, *slot));
2612 *slot = element2;
2613 if (dump_file && (dump_flags & TDF_DETAILS))
2614 {
2615 fprintf (dump_file, "2>>> ");
2616 print_expr_hash_elt (dump_file, *slot);
2617 }
387312e6 2618 }
2619 return NULL_TREE;
2620 }
2621 }
2622
2623 free_expr_hash_elt_contents (&element);
4ee9c684 2624
2625 /* Extract the LHS of the assignment so that it can be used as the current
2626 definition of another variable. */
387312e6 2627 lhs = (*slot)->lhs;
4ee9c684 2628
2629 /* See if the LHS appears in the CONST_AND_COPIES table. If it does, then
2630 use the value from the const_and_copies table. */
2631 if (TREE_CODE (lhs) == SSA_NAME)
2632 {
4c7a0518 2633 temp = SSA_NAME_VALUE (lhs);
f6c33c78 2634 if (temp)
4ee9c684 2635 lhs = temp;
2636 }
2637
75a70cf9 2638 if (dump_file && (dump_flags & TDF_DETAILS))
2639 {
2640 fprintf (dump_file, "FIND: ");
2641 print_generic_expr (dump_file, lhs, 0);
2642 fprintf (dump_file, "\n");
2643 }
2644
4ee9c684 2645 return lhs;
2646}
2647
75a70cf9 2648/* Hashing and equality functions for AVAIL_EXPRS. We compute a value number
2649 for expressions using the code of the expression and the SSA numbers of
2650 its operands. */
4ee9c684 2651
2652static hashval_t
2653avail_expr_hash (const void *p)
2654{
75a70cf9 2655 const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
a01e9a5b 2656 inchash::hash hstate;
4ee9c684 2657
a01e9a5b 2658 inchash::add_hashable_expr (expr, hstate);
4ee9c684 2659
a01e9a5b 2660 return hstate.end ();
4ee9c684 2661}
2662
d1d2af7d 2663/* PHI-ONLY copy and constant propagation. This pass is meant to clean
2664 up degenerate PHIs created by or exposed by jump threading. */
2665
75a70cf9 2666/* Given a statement STMT, which is either a PHI node or an assignment,
d1d2af7d 2667 remove it from the IL. */
2668
2669static void
75a70cf9 2670remove_stmt_or_phi (gimple stmt)
d1d2af7d 2671{
75a70cf9 2672 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2673
2674 if (gimple_code (stmt) == GIMPLE_PHI)
2675 remove_phi_node (&gsi, true);
d1d2af7d 2676 else
2677 {
75a70cf9 2678 gsi_remove (&gsi, true);
2679 release_defs (stmt);
d1d2af7d 2680 }
2681}
2682
75a70cf9 2683/* Given a statement STMT, which is either a PHI node or an assignment,
d1d2af7d 2684 return the "rhs" of the node, in the case of a non-degenerate
75a70cf9 2685 phi, NULL is returned. */
d1d2af7d 2686
2687static tree
75a70cf9 2688get_rhs_or_phi_arg (gimple stmt)
d1d2af7d 2689{
75a70cf9 2690 if (gimple_code (stmt) == GIMPLE_PHI)
1a91d914 2691 return degenerate_phi_result (as_a <gphi *> (stmt));
75a70cf9 2692 else if (gimple_assign_single_p (stmt))
2693 return gimple_assign_rhs1 (stmt);
2694 else
2695 gcc_unreachable ();
d1d2af7d 2696}
2697
2698
75a70cf9 2699/* Given a statement STMT, which is either a PHI node or an assignment,
d1d2af7d 2700 return the "lhs" of the node. */
2701
2702static tree
75a70cf9 2703get_lhs_or_phi_result (gimple stmt)
d1d2af7d 2704{
75a70cf9 2705 if (gimple_code (stmt) == GIMPLE_PHI)
2706 return gimple_phi_result (stmt);
2707 else if (is_gimple_assign (stmt))
2708 return gimple_assign_lhs (stmt);
2709 else
2710 gcc_unreachable ();
d1d2af7d 2711}
2712
2713/* Propagate RHS into all uses of LHS (when possible).
2714
2715 RHS and LHS are derived from STMT, which is passed in solely so
2716 that we can remove it if propagation is successful.
2717
2718 When propagating into a PHI node or into a statement which turns
2719 into a trivial copy or constant initialization, set the
2720 appropriate bit in INTERESTING_NAMEs so that we will visit those
2721 nodes as well in an effort to pick up secondary optimization
2722 opportunities. */
2723
48e1416a 2724static void
75a70cf9 2725propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
d1d2af7d 2726{
3beff0e1 2727 /* First verify that propagation is valid. */
2b5ad301 2728 if (may_propagate_copy (lhs, rhs))
d1d2af7d 2729 {
2730 use_operand_p use_p;
2731 imm_use_iterator iter;
75a70cf9 2732 gimple use_stmt;
d1d2af7d 2733 bool all = true;
2734
2735 /* Dump details. */
2736 if (dump_file && (dump_flags & TDF_DETAILS))
2737 {
2738 fprintf (dump_file, " Replacing '");
2739 print_generic_expr (dump_file, lhs, dump_flags);
2740 fprintf (dump_file, "' with %s '",
2741 (TREE_CODE (rhs) != SSA_NAME ? "constant" : "variable"));
2742 print_generic_expr (dump_file, rhs, dump_flags);
2743 fprintf (dump_file, "'\n");
2744 }
2745
48e1416a 2746 /* Walk over every use of LHS and try to replace the use with RHS.
d1d2af7d 2747 At this point the only reason why such a propagation would not
2748 be successful would be if the use occurs in an ASM_EXPR. */
09aca5bc 2749 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
d1d2af7d 2750 {
9845d120 2751 /* Leave debug stmts alone. If we succeed in propagating
2752 all non-debug uses, we'll drop the DEF, and propagation
2753 into debug stmts will occur then. */
2754 if (gimple_debug_bind_p (use_stmt))
2755 continue;
48e1416a 2756
d1d2af7d 2757 /* It's not always safe to propagate into an ASM_EXPR. */
75a70cf9 2758 if (gimple_code (use_stmt) == GIMPLE_ASM
2759 && ! may_propagate_copy_into_asm (lhs))
d1d2af7d 2760 {
2761 all = false;
2762 continue;
2763 }
2764
ec396ca3 2765 /* It's not ok to propagate into the definition stmt of RHS.
2766 <bb 9>:
2767 # prephitmp.12_36 = PHI <g_67.1_6(9)>
2768 g_67.1_6 = prephitmp.12_36;
2769 goto <bb 9>;
2770 While this is strictly all dead code we do not want to
2771 deal with this here. */
2772 if (TREE_CODE (rhs) == SSA_NAME
2773 && SSA_NAME_DEF_STMT (rhs) == use_stmt)
2774 {
2775 all = false;
2776 continue;
2777 }
2778
d1d2af7d 2779 /* Dump details. */
2780 if (dump_file && (dump_flags & TDF_DETAILS))
2781 {
2782 fprintf (dump_file, " Original statement:");
75a70cf9 2783 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
d1d2af7d 2784 }
2785
d76551d9 2786 /* Propagate the RHS into this use of the LHS. */
09aca5bc 2787 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2788 propagate_value (use_p, rhs);
d76551d9 2789
2790 /* Special cases to avoid useless calls into the folding
2791 routines, operand scanning, etc.
2792
7ecda5e8 2793 Propagation into a PHI may cause the PHI to become
d76551d9 2794 a degenerate, so mark the PHI as interesting. No other
7ecda5e8 2795 actions are necessary. */
2796 if (gimple_code (use_stmt) == GIMPLE_PHI)
d76551d9 2797 {
7ecda5e8 2798 tree result;
2799
d76551d9 2800 /* Dump details. */
2801 if (dump_file && (dump_flags & TDF_DETAILS))
2802 {
2803 fprintf (dump_file, " Updated statement:");
75a70cf9 2804 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
d76551d9 2805 }
2806
7ecda5e8 2807 result = get_lhs_or_phi_result (use_stmt);
2808 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
d76551d9 2809 continue;
2810 }
2811
48e1416a 2812 /* From this point onward we are propagating into a
d76551d9 2813 real statement. Folding may (or may not) be possible,
2814 we may expose new operands, expose dead EH edges,
2815 etc. */
75a70cf9 2816 /* NOTE tuples. In the tuples world, fold_stmt_inplace
2817 cannot fold a call that simplifies to a constant,
2818 because the GIMPLE_CALL must be replaced by a
2819 GIMPLE_ASSIGN, and there is no way to effect such a
2820 transformation in-place. We might want to consider
2821 using the more general fold_stmt here. */
50aacf4c 2822 {
2823 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
2824 fold_stmt_inplace (&gsi);
2825 }
2f067d14 2826
2827 /* Sometimes propagation can expose new operands to the
4c5fd53c 2828 renamer. */
2829 update_stmt (use_stmt);
d1d2af7d 2830
2831 /* Dump details. */
2832 if (dump_file && (dump_flags & TDF_DETAILS))
2833 {
2834 fprintf (dump_file, " Updated statement:");
75a70cf9 2835 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
d1d2af7d 2836 }
2837
d1d2af7d 2838 /* If we replaced a variable index with a constant, then
2839 we would need to update the invariant flag for ADDR_EXPRs. */
75a70cf9 2840 if (gimple_assign_single_p (use_stmt)
2841 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
35cc02b5 2842 recompute_tree_invariant_for_addr_expr
75a70cf9 2843 (gimple_assign_rhs1 (use_stmt));
d1d2af7d 2844
2845 /* If we cleaned up EH information from the statement,
d65e4929 2846 mark its containing block as needing EH cleanups. */
d1d2af7d 2847 if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
d65e4929 2848 {
75a70cf9 2849 bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
d65e4929 2850 if (dump_file && (dump_flags & TDF_DETAILS))
2851 fprintf (dump_file, " Flagged to clear EH edges.\n");
2852 }
d1d2af7d 2853
d76551d9 2854 /* Propagation may expose new trivial copy/constant propagation
2855 opportunities. */
75a70cf9 2856 if (gimple_assign_single_p (use_stmt)
2857 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
2858 && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
2859 || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
2860 {
d1d2af7d 2861 tree result = get_lhs_or_phi_result (use_stmt);
2862 bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
2863 }
2864
2865 /* Propagation into these nodes may make certain edges in
2866 the CFG unexecutable. We want to identify them as PHI nodes
2867 at the destination of those unexecutable edges may become
2868 degenerates. */
75a70cf9 2869 else if (gimple_code (use_stmt) == GIMPLE_COND
2870 || gimple_code (use_stmt) == GIMPLE_SWITCH
2871 || gimple_code (use_stmt) == GIMPLE_GOTO)
2872 {
d1d2af7d 2873 tree val;
2874
75a70cf9 2875 if (gimple_code (use_stmt) == GIMPLE_COND)
389dd41b 2876 val = fold_binary_loc (gimple_location (use_stmt),
2877 gimple_cond_code (use_stmt),
75a70cf9 2878 boolean_type_node,
2879 gimple_cond_lhs (use_stmt),
2880 gimple_cond_rhs (use_stmt));
2881 else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
1a91d914 2882 val = gimple_switch_index (as_a <gswitch *> (use_stmt));
d1d2af7d 2883 else
75a70cf9 2884 val = gimple_goto_dest (use_stmt);
d1d2af7d 2885
75a70cf9 2886 if (val && is_gimple_min_invariant (val))
d1d2af7d 2887 {
75a70cf9 2888 basic_block bb = gimple_bb (use_stmt);
d1d2af7d 2889 edge te = find_taken_edge (bb, val);
2890 edge_iterator ei;
2891 edge e;
1a91d914 2892 gimple_stmt_iterator gsi;
2893 gphi_iterator psi;
d1d2af7d 2894
2895 /* Remove all outgoing edges except TE. */
2896 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
2897 {
2898 if (e != te)
2899 {
d1d2af7d 2900 /* Mark all the PHI nodes at the destination of
2901 the unexecutable edge as interesting. */
75a70cf9 2902 for (psi = gsi_start_phis (e->dest);
2903 !gsi_end_p (psi);
2904 gsi_next (&psi))
2905 {
1a91d914 2906 gphi *phi = psi.phi ();
75a70cf9 2907
2908 tree result = gimple_phi_result (phi);
d1d2af7d 2909 int version = SSA_NAME_VERSION (result);
2910
2911 bitmap_set_bit (interesting_names, version);
2912 }
2913
2914 te->probability += e->probability;
2915
2916 te->count += e->count;
2917 remove_edge (e);
9a17dd7d 2918 cfg_altered = true;
d1d2af7d 2919 }
2920 else
2921 ei_next (&ei);
2922 }
2923
75a70cf9 2924 gsi = gsi_last_bb (gimple_bb (use_stmt));
2925 gsi_remove (&gsi, true);
d1d2af7d 2926
2927 /* And fixup the flags on the single remaining edge. */
2928 te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
2929 te->flags &= ~EDGE_ABNORMAL;
2930 te->flags |= EDGE_FALLTHRU;
2931 if (te->probability > REG_BR_PROB_BASE)
2932 te->probability = REG_BR_PROB_BASE;
2933 }
2934 }
2935 }
2936
48e1416a 2937 /* Ensure there is nothing else to do. */
76e6653a 2938 gcc_assert (!all || has_zero_uses (lhs));
ea54ad41 2939
d1d2af7d 2940 /* If we were able to propagate away all uses of LHS, then
2941 we can remove STMT. */
2942 if (all)
2943 remove_stmt_or_phi (stmt);
2944 }
2945}
2946
75a70cf9 2947/* STMT is either a PHI node (potentially a degenerate PHI node) or
d1d2af7d 2948 a statement that is a trivial copy or constant initialization.
2949
2950 Attempt to eliminate T by propagating its RHS into all uses of
2951 its LHS. This may in turn set new bits in INTERESTING_NAMES
2952 for nodes we want to revisit later.
2953
2954 All exit paths should clear INTERESTING_NAMES for the result
75a70cf9 2955 of STMT. */
d1d2af7d 2956
2957static void
75a70cf9 2958eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
d1d2af7d 2959{
75a70cf9 2960 tree lhs = get_lhs_or_phi_result (stmt);
d1d2af7d 2961 tree rhs;
2962 int version = SSA_NAME_VERSION (lhs);
2963
2964 /* If the LHS of this statement or PHI has no uses, then we can
2965 just eliminate it. This can occur if, for example, the PHI
2966 was created by block duplication due to threading and its only
2967 use was in the conditional at the end of the block which was
2968 deleted. */
2969 if (has_zero_uses (lhs))
2970 {
2971 bitmap_clear_bit (interesting_names, version);
75a70cf9 2972 remove_stmt_or_phi (stmt);
d1d2af7d 2973 return;
2974 }
2975
2976 /* Get the RHS of the assignment or PHI node if the PHI is a
2977 degenerate. */
75a70cf9 2978 rhs = get_rhs_or_phi_arg (stmt);
d1d2af7d 2979 if (!rhs)
2980 {
2981 bitmap_clear_bit (interesting_names, version);
2982 return;
2983 }
2984
a3c29ebc 2985 if (!virtual_operand_p (lhs))
2986 propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
2987 else
2988 {
2989 gimple use_stmt;
2990 imm_use_iterator iter;
2991 use_operand_p use_p;
2992 /* For virtual operands we have to propagate into all uses as
2993 otherwise we will create overlapping life-ranges. */
2994 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2995 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2996 SET_USE (use_p, rhs);
2997 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2998 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
2999 remove_stmt_or_phi (stmt);
3000 }
d1d2af7d 3001
75a70cf9 3002 /* Note that STMT may well have been deleted by now, so do
d1d2af7d 3003 not access it, instead use the saved version # to clear
3004 T's entry in the worklist. */
3005 bitmap_clear_bit (interesting_names, version);
3006}
3007
3008/* The first phase in degenerate PHI elimination.
3009
3010 Eliminate the degenerate PHIs in BB, then recurse on the
3011 dominator children of BB. */
3012
3013static void
3014eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
3015{
1a91d914 3016 gphi_iterator gsi;
d1d2af7d 3017 basic_block son;
3018
75a70cf9 3019 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
d1d2af7d 3020 {
1a91d914 3021 gphi *phi = gsi.phi ();
75a70cf9 3022
d1d2af7d 3023 eliminate_const_or_copy (phi, interesting_names);
3024 }
3025
3026 /* Recurse into the dominator children of BB. */
3027 for (son = first_dom_son (CDI_DOMINATORS, bb);
3028 son;
3029 son = next_dom_son (CDI_DOMINATORS, son))
3030 eliminate_degenerate_phis_1 (son, interesting_names);
3031}
3032
3033
3034/* A very simple pass to eliminate degenerate PHI nodes from the
3035 IL. This is meant to be fast enough to be able to be run several
3036 times in the optimization pipeline.
3037
3038 Certain optimizations, particularly those which duplicate blocks
3039 or remove edges from the CFG can create or expose PHIs which are
3040 trivial copies or constant initializations.
3041
3042 While we could pick up these optimizations in DOM or with the
3043 combination of copy-prop and CCP, those solutions are far too
3044 heavy-weight for our needs.
3045
3046 This implementation has two phases so that we can efficiently
3047 eliminate the first order degenerate PHIs and second order
3048 degenerate PHIs.
3049
3050 The first phase performs a dominator walk to identify and eliminate
3051 the vast majority of the degenerate PHIs. When a degenerate PHI
3052 is identified and eliminated any affected statements or PHIs
3053 are put on a worklist.
3054
3055 The second phase eliminates degenerate PHIs and trivial copies
3056 or constant initializations using the worklist. This is how we
3057 pick up the secondary optimization opportunities with minimal
3058 cost. */
3059
65b0537f 3060namespace {
3061
3062const pass_data pass_data_phi_only_cprop =
3063{
3064 GIMPLE_PASS, /* type */
3065 "phicprop", /* name */
3066 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 3067 TV_TREE_PHI_CPROP, /* tv_id */
3068 ( PROP_cfg | PROP_ssa ), /* properties_required */
3069 0, /* properties_provided */
3070 0, /* properties_destroyed */
3071 0, /* todo_flags_start */
8b88439e 3072 ( TODO_cleanup_cfg | TODO_update_ssa ), /* todo_flags_finish */
65b0537f 3073};
3074
3075class pass_phi_only_cprop : public gimple_opt_pass
3076{
3077public:
3078 pass_phi_only_cprop (gcc::context *ctxt)
3079 : gimple_opt_pass (pass_data_phi_only_cprop, ctxt)
3080 {}
3081
3082 /* opt_pass methods: */
3083 opt_pass * clone () { return new pass_phi_only_cprop (m_ctxt); }
3084 virtual bool gate (function *) { return flag_tree_dom != 0; }
3085 virtual unsigned int execute (function *);
3086
3087}; // class pass_phi_only_cprop
3088
3089unsigned int
3090pass_phi_only_cprop::execute (function *fun)
d1d2af7d 3091{
3092 bitmap interesting_names;
76af66a6 3093 bitmap interesting_names1;
d1d2af7d 3094
d65e4929 3095 /* Bitmap of blocks which need EH information updated. We can not
3096 update it on-the-fly as doing so invalidates the dominator tree. */
3097 need_eh_cleanup = BITMAP_ALLOC (NULL);
3098
d1d2af7d 3099 /* INTERESTING_NAMES is effectively our worklist, indexed by
3100 SSA_NAME_VERSION.
3101
3102 A set bit indicates that the statement or PHI node which
3103 defines the SSA_NAME should be (re)examined to determine if
4562961a 3104 it has become a degenerate PHI or trivial const/copy propagation
48e1416a 3105 opportunity.
d1d2af7d 3106
3107 Experiments have show we generally get better compilation
3108 time behavior with bitmaps rather than sbitmaps. */
3109 interesting_names = BITMAP_ALLOC (NULL);
76af66a6 3110 interesting_names1 = BITMAP_ALLOC (NULL);
d1d2af7d 3111
9a17dd7d 3112 calculate_dominance_info (CDI_DOMINATORS);
3113 cfg_altered = false;
3114
9ca2c29a 3115 /* First phase. Eliminate degenerate PHIs via a dominator
d1d2af7d 3116 walk of the CFG.
3117
3118 Experiments have indicated that we generally get better
3119 compile-time behavior by visiting blocks in the first
3120 phase in dominator order. Presumably this is because walking
3121 in dominator order leaves fewer PHIs for later examination
3122 by the worklist phase. */
65b0537f 3123 eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR_FOR_FN (fun),
34154e27 3124 interesting_names);
d1d2af7d 3125
9ca2c29a 3126 /* Second phase. Eliminate second order degenerate PHIs as well
d1d2af7d 3127 as trivial copies or constant initializations identified by
3128 the first phase or this phase. Basically we keep iterating
3129 until our set of INTERESTING_NAMEs is empty. */
3130 while (!bitmap_empty_p (interesting_names))
3131 {
3132 unsigned int i;
3133 bitmap_iterator bi;
3134
76af66a6 3135 /* EXECUTE_IF_SET_IN_BITMAP does not like its bitmap
3136 changed during the loop. Copy it to another bitmap and
3137 use that. */
3138 bitmap_copy (interesting_names1, interesting_names);
3139
3140 EXECUTE_IF_SET_IN_BITMAP (interesting_names1, 0, i, bi)
d1d2af7d 3141 {
3142 tree name = ssa_name (i);
3143
3144 /* Ignore SSA_NAMEs that have been released because
3145 their defining statement was deleted (unreachable). */
3146 if (name)
3147 eliminate_const_or_copy (SSA_NAME_DEF_STMT (ssa_name (i)),
3148 interesting_names);
3149 }
3150 }
d65e4929 3151
9a17dd7d 3152 if (cfg_altered)
5daac166 3153 {
3154 free_dominance_info (CDI_DOMINATORS);
3155 /* If we changed the CFG schedule loops for fixup by cfgcleanup. */
b3083327 3156 loops_state_set (LOOPS_NEED_FIXUP);
5daac166 3157 }
9a17dd7d 3158
d65e4929 3159 /* Propagation of const and copies may make some EH edges dead. Purge
3160 such edges from the CFG as needed. */
3161 if (!bitmap_empty_p (need_eh_cleanup))
3162 {
75a70cf9 3163 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
d65e4929 3164 BITMAP_FREE (need_eh_cleanup);
3165 }
d1d2af7d 3166
3167 BITMAP_FREE (interesting_names);
76af66a6 3168 BITMAP_FREE (interesting_names1);
d1d2af7d 3169 return 0;
3170}
3171
cbe8bda8 3172} // anon namespace
3173
3174gimple_opt_pass *
3175make_pass_phi_only_cprop (gcc::context *ctxt)
3176{
3177 return new pass_phi_only_cprop (ctxt);
3178}