2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
31 #include "gimple-iterator.h"
32 #include "gimple-ssa.h"
34 #include "tree-phinodes.h"
35 #include "ssa-iterators.h"
36 #include "tree-ssanames.h"
37 #include "tree-ssa-loop.h"
38 #include "tree-into-ssa.h"
41 #include "hash-table.h"
42 #include "tree-iterator.h"
43 #include "alloc-pool.h"
45 #include "tree-pass.h"
47 #include "langhooks.h"
49 #include "tree-ssa-sccvn.h"
50 #include "tree-scalar-evolution.h"
55 #include "tree-ssa-propagate.h"
59 1. Avail sets can be shared by making an avail_find_leader that
60 walks up the dominator tree and looks in those avail sets.
61 This might affect code optimality, it's unclear right now.
62 2. Strength reduction can be performed by anticipating expressions
63 we can repair later on.
64 3. We can do back-substitution or smarter value numbering to catch
65 commutative expressions split up over multiple statements.
68 /* For ease of terminology, "expression node" in the below refers to
69 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
70 represent the actual statement containing the expressions we care about,
71 and we cache the value number by putting it in the expression. */
75 First we walk the statements to generate the AVAIL sets, the
76 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
77 generation of values/expressions by a given block. We use them
78 when computing the ANTIC sets. The AVAIL sets consist of
79 SSA_NAME's that represent values, so we know what values are
80 available in what blocks. AVAIL is a forward dataflow problem. In
81 SSA, values are never killed, so we don't need a kill set, or a
82 fixpoint iteration, in order to calculate the AVAIL sets. In
83 traditional parlance, AVAIL sets tell us the downsafety of the
86 Next, we generate the ANTIC sets. These sets represent the
87 anticipatable expressions. ANTIC is a backwards dataflow
88 problem. An expression is anticipatable in a given block if it could
89 be generated in that block. This means that if we had to perform
90 an insertion in that block, of the value of that expression, we
91 could. Calculating the ANTIC sets requires phi translation of
92 expressions, because the flow goes backwards through phis. We must
93 iterate to a fixpoint of the ANTIC sets, because we have a kill
94 set. Even in SSA form, values are not live over the entire
95 function, only from their definition point onwards. So we have to
96 remove values from the ANTIC set once we go past the definition
97 point of the leaders that make them up.
98 compute_antic/compute_antic_aux performs this computation.
100 Third, we perform insertions to make partially redundant
101 expressions fully redundant.
103 An expression is partially redundant (excluding partial
106 1. It is AVAIL in some, but not all, of the predecessors of a
108 2. It is ANTIC in all the predecessors.
110 In order to make it fully redundant, we insert the expression into
111 the predecessors where it is not available, but is ANTIC.
113 For the partial anticipation case, we only perform insertion if it
114 is partially anticipated in some block, and fully available in all
117 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
118 performs these steps.
120 Fourth, we eliminate fully redundant expressions.
121 This is a simple statement walk that replaces redundant
122 calculations with the now available values. */
124 /* Representations of value numbers:
126 Value numbers are represented by a representative SSA_NAME. We
127 will create fake SSA_NAME's in situations where we need a
128 representative but do not have one (because it is a complex
129 expression). In order to facilitate storing the value numbers in
130 bitmaps, and keep the number of wasted SSA_NAME's down, we also
131 associate a value_id with each value number, and create full blown
132 ssa_name's only where we actually need them (IE in operands of
133 existing expressions).
135 Theoretically you could replace all the value_id's with
136 SSA_NAME_VERSION, but this would allocate a large number of
137 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
138 It would also require an additional indirection at each point we
141 /* Representation of expressions on value numbers:
143 Expressions consisting of value numbers are represented the same
144 way as our VN internally represents them, with an additional
145 "pre_expr" wrapping around them in order to facilitate storing all
146 of the expressions in the same sets. */
148 /* Representation of sets:
150 The dataflow sets do not need to be sorted in any particular order
151 for the majority of their lifetime, are simply represented as two
152 bitmaps, one that keeps track of values present in the set, and one
153 that keeps track of expressions present in the set.
155 When we need them in topological order, we produce it on demand by
156 transforming the bitmap into an array and sorting it into topo
159 /* Type of expression, used to know which member of the PRE_EXPR union
170 typedef union pre_expr_union_d
175 vn_reference_t reference
;
178 typedef struct pre_expr_d
: typed_noop_remove
<pre_expr_d
>
180 enum pre_expr_kind kind
;
184 /* hash_table support. */
185 typedef pre_expr_d value_type
;
186 typedef pre_expr_d compare_type
;
187 static inline hashval_t
hash (const pre_expr_d
*);
188 static inline int equal (const pre_expr_d
*, const pre_expr_d
*);
191 #define PRE_EXPR_NAME(e) (e)->u.name
192 #define PRE_EXPR_NARY(e) (e)->u.nary
193 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
194 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
196 /* Compare E1 and E1 for equality. */
199 pre_expr_d::equal (const value_type
*e1
, const compare_type
*e2
)
201 if (e1
->kind
!= e2
->kind
)
207 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1
),
208 PRE_EXPR_CONSTANT (e2
));
210 return PRE_EXPR_NAME (e1
) == PRE_EXPR_NAME (e2
);
212 return vn_nary_op_eq (PRE_EXPR_NARY (e1
), PRE_EXPR_NARY (e2
));
214 return vn_reference_eq (PRE_EXPR_REFERENCE (e1
),
215 PRE_EXPR_REFERENCE (e2
));
224 pre_expr_d::hash (const value_type
*e
)
229 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e
));
231 return SSA_NAME_VERSION (PRE_EXPR_NAME (e
));
233 return PRE_EXPR_NARY (e
)->hashcode
;
235 return PRE_EXPR_REFERENCE (e
)->hashcode
;
241 /* Next global expression id number. */
242 static unsigned int next_expression_id
;
244 /* Mapping from expression to id number we can use in bitmap sets. */
245 static vec
<pre_expr
> expressions
;
246 static hash_table
<pre_expr_d
> expression_to_id
;
247 static vec
<unsigned> name_to_id
;
249 /* Allocate an expression id for EXPR. */
251 static inline unsigned int
252 alloc_expression_id (pre_expr expr
)
254 struct pre_expr_d
**slot
;
255 /* Make sure we won't overflow. */
256 gcc_assert (next_expression_id
+ 1 > next_expression_id
);
257 expr
->id
= next_expression_id
++;
258 expressions
.safe_push (expr
);
259 if (expr
->kind
== NAME
)
261 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
262 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
263 re-allocations by using vec::reserve upfront. There is no
264 vec::quick_grow_cleared unfortunately. */
265 unsigned old_len
= name_to_id
.length ();
266 name_to_id
.reserve (num_ssa_names
- old_len
);
267 name_to_id
.safe_grow_cleared (num_ssa_names
);
268 gcc_assert (name_to_id
[version
] == 0);
269 name_to_id
[version
] = expr
->id
;
273 slot
= expression_to_id
.find_slot (expr
, INSERT
);
277 return next_expression_id
- 1;
280 /* Return the expression id for tree EXPR. */
282 static inline unsigned int
283 get_expression_id (const pre_expr expr
)
288 static inline unsigned int
289 lookup_expression_id (const pre_expr expr
)
291 struct pre_expr_d
**slot
;
293 if (expr
->kind
== NAME
)
295 unsigned version
= SSA_NAME_VERSION (PRE_EXPR_NAME (expr
));
296 if (name_to_id
.length () <= version
)
298 return name_to_id
[version
];
302 slot
= expression_to_id
.find_slot (expr
, NO_INSERT
);
305 return ((pre_expr
)*slot
)->id
;
309 /* Return the existing expression id for EXPR, or create one if one
310 does not exist yet. */
312 static inline unsigned int
313 get_or_alloc_expression_id (pre_expr expr
)
315 unsigned int id
= lookup_expression_id (expr
);
317 return alloc_expression_id (expr
);
318 return expr
->id
= id
;
321 /* Return the expression that has expression id ID */
323 static inline pre_expr
324 expression_for_id (unsigned int id
)
326 return expressions
[id
];
329 /* Free the expression id field in all of our expressions,
330 and then destroy the expressions array. */
333 clear_expression_ids (void)
335 expressions
.release ();
338 static alloc_pool pre_expr_pool
;
340 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
343 get_or_alloc_expr_for_name (tree name
)
345 struct pre_expr_d expr
;
347 unsigned int result_id
;
351 PRE_EXPR_NAME (&expr
) = name
;
352 result_id
= lookup_expression_id (&expr
);
354 return expression_for_id (result_id
);
356 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
358 PRE_EXPR_NAME (result
) = name
;
359 alloc_expression_id (result
);
363 /* An unordered bitmap set. One bitmap tracks values, the other,
365 typedef struct bitmap_set
367 bitmap_head expressions
;
371 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
372 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
374 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
375 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
377 /* Mapping from value id to expressions with that value_id. */
378 static vec
<bitmap
> value_expressions
;
380 /* Sets that we need to keep track of. */
381 typedef struct bb_bitmap_sets
383 /* The EXP_GEN set, which represents expressions/values generated in
385 bitmap_set_t exp_gen
;
387 /* The PHI_GEN set, which represents PHI results generated in a
389 bitmap_set_t phi_gen
;
391 /* The TMP_GEN set, which represents results/temporaries generated
392 in a basic block. IE the LHS of an expression. */
393 bitmap_set_t tmp_gen
;
395 /* The AVAIL_OUT set, which represents which values are available in
396 a given basic block. */
397 bitmap_set_t avail_out
;
399 /* The ANTIC_IN set, which represents which values are anticipatable
400 in a given basic block. */
401 bitmap_set_t antic_in
;
403 /* The PA_IN set, which represents which values are
404 partially anticipatable in a given basic block. */
407 /* The NEW_SETS set, which is used during insertion to augment the
408 AVAIL_OUT set of blocks with the new insertions performed during
409 the current iteration. */
410 bitmap_set_t new_sets
;
412 /* A cache for value_dies_in_block_x. */
415 /* True if we have visited this block during ANTIC calculation. */
416 unsigned int visited
: 1;
418 /* True we have deferred processing this block during ANTIC
419 calculation until its successor is processed. */
420 unsigned int deferred
: 1;
422 /* True when the block contains a call that might not return. */
423 unsigned int contains_may_not_return_call
: 1;
426 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
427 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
428 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
429 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
430 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
431 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
432 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
433 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
434 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
435 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
436 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
439 /* Basic block list in postorder. */
440 static int *postorder
;
441 static int postorder_num
;
443 /* This structure is used to keep track of statistics on what
444 optimization PRE was able to perform. */
447 /* The number of RHS computations eliminated by PRE. */
450 /* The number of new expressions/temporaries generated by PRE. */
453 /* The number of inserts found due to partial anticipation */
456 /* The number of new PHI nodes added by PRE. */
460 static bool do_partial_partial
;
461 static pre_expr
bitmap_find_leader (bitmap_set_t
, unsigned int);
462 static void bitmap_value_insert_into_set (bitmap_set_t
, pre_expr
);
463 static void bitmap_value_replace_in_set (bitmap_set_t
, pre_expr
);
464 static void bitmap_set_copy (bitmap_set_t
, bitmap_set_t
);
465 static bool bitmap_set_contains_value (bitmap_set_t
, unsigned int);
466 static void bitmap_insert_into_set (bitmap_set_t
, pre_expr
);
467 static void bitmap_insert_into_set_1 (bitmap_set_t
, pre_expr
,
469 static bitmap_set_t
bitmap_set_new (void);
470 static tree
create_expression_by_pieces (basic_block
, pre_expr
, gimple_seq
*,
472 static tree
find_or_generate_expression (basic_block
, tree
, gimple_seq
*);
473 static unsigned int get_expr_value_id (pre_expr
);
475 /* We can add and remove elements and entries to and from sets
476 and hash tables, so we use alloc pools for them. */
478 static alloc_pool bitmap_set_pool
;
479 static bitmap_obstack grand_bitmap_obstack
;
481 /* Set of blocks with statements that have had their EH properties changed. */
482 static bitmap need_eh_cleanup
;
484 /* Set of blocks with statements that have had their AB properties changed. */
485 static bitmap need_ab_cleanup
;
487 /* A three tuple {e, pred, v} used to cache phi translations in the
488 phi_translate_table. */
490 typedef struct expr_pred_trans_d
: typed_free_remove
<expr_pred_trans_d
>
492 /* The expression. */
495 /* The predecessor block along which we translated the expression. */
498 /* The value that resulted from the translation. */
501 /* The hashcode for the expression, pred pair. This is cached for
505 /* hash_table support. */
506 typedef expr_pred_trans_d value_type
;
507 typedef expr_pred_trans_d compare_type
;
508 static inline hashval_t
hash (const value_type
*);
509 static inline int equal (const value_type
*, const compare_type
*);
510 } *expr_pred_trans_t
;
511 typedef const struct expr_pred_trans_d
*const_expr_pred_trans_t
;
514 expr_pred_trans_d::hash (const expr_pred_trans_d
*e
)
520 expr_pred_trans_d::equal (const value_type
*ve1
,
521 const compare_type
*ve2
)
523 basic_block b1
= ve1
->pred
;
524 basic_block b2
= ve2
->pred
;
526 /* If they are not translations for the same basic block, they can't
530 return pre_expr_d::equal (ve1
->e
, ve2
->e
);
533 /* The phi_translate_table caches phi translations for a given
534 expression and predecessor. */
535 static hash_table
<expr_pred_trans_d
> phi_translate_table
;
537 /* Add the tuple mapping from {expression E, basic block PRED} to
538 the phi translation table and return whether it pre-existed. */
541 phi_trans_add (expr_pred_trans_t
*entry
, pre_expr e
, basic_block pred
)
543 expr_pred_trans_t
*slot
;
544 expr_pred_trans_d tem
;
545 hashval_t hash
= iterative_hash_hashval_t (pre_expr_d::hash (e
),
550 slot
= phi_translate_table
.find_slot_with_hash (&tem
, hash
, INSERT
);
557 *entry
= *slot
= XNEW (struct expr_pred_trans_d
);
559 (*entry
)->pred
= pred
;
560 (*entry
)->hashcode
= hash
;
565 /* Add expression E to the expression set of value id V. */
568 add_to_value (unsigned int v
, pre_expr e
)
572 gcc_checking_assert (get_expr_value_id (e
) == v
);
574 if (v
>= value_expressions
.length ())
576 value_expressions
.safe_grow_cleared (v
+ 1);
579 set
= value_expressions
[v
];
582 set
= BITMAP_ALLOC (&grand_bitmap_obstack
);
583 value_expressions
[v
] = set
;
586 bitmap_set_bit (set
, get_or_alloc_expression_id (e
));
589 /* Create a new bitmap set and return it. */
592 bitmap_set_new (void)
594 bitmap_set_t ret
= (bitmap_set_t
) pool_alloc (bitmap_set_pool
);
595 bitmap_initialize (&ret
->expressions
, &grand_bitmap_obstack
);
596 bitmap_initialize (&ret
->values
, &grand_bitmap_obstack
);
600 /* Return the value id for a PRE expression EXPR. */
603 get_expr_value_id (pre_expr expr
)
609 id
= get_constant_value_id (PRE_EXPR_CONSTANT (expr
));
612 id
= VN_INFO (PRE_EXPR_NAME (expr
))->value_id
;
615 id
= PRE_EXPR_NARY (expr
)->value_id
;
618 id
= PRE_EXPR_REFERENCE (expr
)->value_id
;
623 /* ??? We cannot assert that expr has a value-id (it can be 0), because
624 we assign value-ids only to expressions that have a result
625 in set_hashtable_value_ids. */
629 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
632 sccvn_valnum_from_value_id (unsigned int val
)
636 bitmap exprset
= value_expressions
[val
];
637 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
639 pre_expr vexpr
= expression_for_id (i
);
640 if (vexpr
->kind
== NAME
)
641 return VN_INFO (PRE_EXPR_NAME (vexpr
))->valnum
;
642 else if (vexpr
->kind
== CONSTANT
)
643 return PRE_EXPR_CONSTANT (vexpr
);
648 /* Remove an expression EXPR from a bitmapped set. */
651 bitmap_remove_from_set (bitmap_set_t set
, pre_expr expr
)
653 unsigned int val
= get_expr_value_id (expr
);
654 if (!value_id_constant_p (val
))
656 bitmap_clear_bit (&set
->values
, val
);
657 bitmap_clear_bit (&set
->expressions
, get_expression_id (expr
));
662 bitmap_insert_into_set_1 (bitmap_set_t set
, pre_expr expr
,
663 unsigned int val
, bool allow_constants
)
665 if (allow_constants
|| !value_id_constant_p (val
))
667 /* We specifically expect this and only this function to be able to
668 insert constants into a set. */
669 bitmap_set_bit (&set
->values
, val
);
670 bitmap_set_bit (&set
->expressions
, get_or_alloc_expression_id (expr
));
674 /* Insert an expression EXPR into a bitmapped set. */
677 bitmap_insert_into_set (bitmap_set_t set
, pre_expr expr
)
679 bitmap_insert_into_set_1 (set
, expr
, get_expr_value_id (expr
), false);
682 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
685 bitmap_set_copy (bitmap_set_t dest
, bitmap_set_t orig
)
687 bitmap_copy (&dest
->expressions
, &orig
->expressions
);
688 bitmap_copy (&dest
->values
, &orig
->values
);
692 /* Free memory used up by SET. */
694 bitmap_set_free (bitmap_set_t set
)
696 bitmap_clear (&set
->expressions
);
697 bitmap_clear (&set
->values
);
701 /* Generate an topological-ordered array of bitmap set SET. */
704 sorted_array_from_bitmap_set (bitmap_set_t set
)
707 bitmap_iterator bi
, bj
;
708 vec
<pre_expr
> result
;
710 /* Pre-allocate roughly enough space for the array. */
711 result
.create (bitmap_count_bits (&set
->values
));
713 FOR_EACH_VALUE_ID_IN_SET (set
, i
, bi
)
715 /* The number of expressions having a given value is usually
716 relatively small. Thus, rather than making a vector of all
717 the expressions and sorting it by value-id, we walk the values
718 and check in the reverse mapping that tells us what expressions
719 have a given value, to filter those in our set. As a result,
720 the expressions are inserted in value-id order, which means
723 If this is somehow a significant lose for some cases, we can
724 choose which set to walk based on the set size. */
725 bitmap exprset
= value_expressions
[i
];
726 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, j
, bj
)
728 if (bitmap_bit_p (&set
->expressions
, j
))
729 result
.safe_push (expression_for_id (j
));
736 /* Perform bitmapped set operation DEST &= ORIG. */
739 bitmap_set_and (bitmap_set_t dest
, bitmap_set_t orig
)
747 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
749 bitmap_and_into (&dest
->values
, &orig
->values
);
750 bitmap_copy (&temp
, &dest
->expressions
);
751 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
753 pre_expr expr
= expression_for_id (i
);
754 unsigned int value_id
= get_expr_value_id (expr
);
755 if (!bitmap_bit_p (&dest
->values
, value_id
))
756 bitmap_clear_bit (&dest
->expressions
, i
);
758 bitmap_clear (&temp
);
762 /* Subtract all values and expressions contained in ORIG from DEST. */
765 bitmap_set_subtract (bitmap_set_t dest
, bitmap_set_t orig
)
767 bitmap_set_t result
= bitmap_set_new ();
771 bitmap_and_compl (&result
->expressions
, &dest
->expressions
,
774 FOR_EACH_EXPR_ID_IN_SET (result
, i
, bi
)
776 pre_expr expr
= expression_for_id (i
);
777 unsigned int value_id
= get_expr_value_id (expr
);
778 bitmap_set_bit (&result
->values
, value_id
);
784 /* Subtract all the values in bitmap set B from bitmap set A. */
787 bitmap_set_subtract_values (bitmap_set_t a
, bitmap_set_t b
)
793 bitmap_initialize (&temp
, &grand_bitmap_obstack
);
795 bitmap_copy (&temp
, &a
->expressions
);
796 EXECUTE_IF_SET_IN_BITMAP (&temp
, 0, i
, bi
)
798 pre_expr expr
= expression_for_id (i
);
799 if (bitmap_set_contains_value (b
, get_expr_value_id (expr
)))
800 bitmap_remove_from_set (a
, expr
);
802 bitmap_clear (&temp
);
806 /* Return true if bitmapped set SET contains the value VALUE_ID. */
809 bitmap_set_contains_value (bitmap_set_t set
, unsigned int value_id
)
811 if (value_id_constant_p (value_id
))
814 if (!set
|| bitmap_empty_p (&set
->expressions
))
817 return bitmap_bit_p (&set
->values
, value_id
);
821 bitmap_set_contains_expr (bitmap_set_t set
, const pre_expr expr
)
823 return bitmap_bit_p (&set
->expressions
, get_expression_id (expr
));
826 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
829 bitmap_set_replace_value (bitmap_set_t set
, unsigned int lookfor
,
836 if (value_id_constant_p (lookfor
))
839 if (!bitmap_set_contains_value (set
, lookfor
))
842 /* The number of expressions having a given value is usually
843 significantly less than the total number of expressions in SET.
844 Thus, rather than check, for each expression in SET, whether it
845 has the value LOOKFOR, we walk the reverse mapping that tells us
846 what expressions have a given value, and see if any of those
847 expressions are in our set. For large testcases, this is about
848 5-10x faster than walking the bitmap. If this is somehow a
849 significant lose for some cases, we can choose which set to walk
850 based on the set size. */
851 exprset
= value_expressions
[lookfor
];
852 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
854 if (bitmap_clear_bit (&set
->expressions
, i
))
856 bitmap_set_bit (&set
->expressions
, get_expression_id (expr
));
864 /* Return true if two bitmap sets are equal. */
867 bitmap_set_equal (bitmap_set_t a
, bitmap_set_t b
)
869 return bitmap_equal_p (&a
->values
, &b
->values
);
872 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
873 and add it otherwise. */
876 bitmap_value_replace_in_set (bitmap_set_t set
, pre_expr expr
)
878 unsigned int val
= get_expr_value_id (expr
);
880 if (bitmap_set_contains_value (set
, val
))
881 bitmap_set_replace_value (set
, val
, expr
);
883 bitmap_insert_into_set (set
, expr
);
886 /* Insert EXPR into SET if EXPR's value is not already present in
890 bitmap_value_insert_into_set (bitmap_set_t set
, pre_expr expr
)
892 unsigned int val
= get_expr_value_id (expr
);
894 gcc_checking_assert (expr
->id
== get_or_alloc_expression_id (expr
));
896 /* Constant values are always considered to be part of the set. */
897 if (value_id_constant_p (val
))
900 /* If the value membership changed, add the expression. */
901 if (bitmap_set_bit (&set
->values
, val
))
902 bitmap_set_bit (&set
->expressions
, expr
->id
);
905 /* Print out EXPR to outfile. */
908 print_pre_expr (FILE *outfile
, const pre_expr expr
)
913 print_generic_expr (outfile
, PRE_EXPR_CONSTANT (expr
), 0);
916 print_generic_expr (outfile
, PRE_EXPR_NAME (expr
), 0);
921 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
922 fprintf (outfile
, "{%s,", get_tree_code_name (nary
->opcode
));
923 for (i
= 0; i
< nary
->length
; i
++)
925 print_generic_expr (outfile
, nary
->op
[i
], 0);
926 if (i
!= (unsigned) nary
->length
- 1)
927 fprintf (outfile
, ",");
929 fprintf (outfile
, "}");
935 vn_reference_op_t vro
;
937 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
938 fprintf (outfile
, "{");
940 ref
->operands
.iterate (i
, &vro
);
943 bool closebrace
= false;
944 if (vro
->opcode
!= SSA_NAME
945 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
947 fprintf (outfile
, "%s", get_tree_code_name (vro
->opcode
));
950 fprintf (outfile
, "<");
956 print_generic_expr (outfile
, vro
->op0
, 0);
959 fprintf (outfile
, ",");
960 print_generic_expr (outfile
, vro
->op1
, 0);
964 fprintf (outfile
, ",");
965 print_generic_expr (outfile
, vro
->op2
, 0);
969 fprintf (outfile
, ">");
970 if (i
!= ref
->operands
.length () - 1)
971 fprintf (outfile
, ",");
973 fprintf (outfile
, "}");
976 fprintf (outfile
, "@");
977 print_generic_expr (outfile
, ref
->vuse
, 0);
983 void debug_pre_expr (pre_expr
);
985 /* Like print_pre_expr but always prints to stderr. */
987 debug_pre_expr (pre_expr e
)
989 print_pre_expr (stderr
, e
);
990 fprintf (stderr
, "\n");
993 /* Print out SET to OUTFILE. */
996 print_bitmap_set (FILE *outfile
, bitmap_set_t set
,
997 const char *setname
, int blockindex
)
999 fprintf (outfile
, "%s[%d] := { ", setname
, blockindex
);
1006 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
1008 const pre_expr expr
= expression_for_id (i
);
1011 fprintf (outfile
, ", ");
1013 print_pre_expr (outfile
, expr
);
1015 fprintf (outfile
, " (%04d)", get_expr_value_id (expr
));
1018 fprintf (outfile
, " }\n");
1021 void debug_bitmap_set (bitmap_set_t
);
1024 debug_bitmap_set (bitmap_set_t set
)
1026 print_bitmap_set (stderr
, set
, "debug", 0);
1029 void debug_bitmap_sets_for (basic_block
);
1032 debug_bitmap_sets_for (basic_block bb
)
1034 print_bitmap_set (stderr
, AVAIL_OUT (bb
), "avail_out", bb
->index
);
1035 print_bitmap_set (stderr
, EXP_GEN (bb
), "exp_gen", bb
->index
);
1036 print_bitmap_set (stderr
, PHI_GEN (bb
), "phi_gen", bb
->index
);
1037 print_bitmap_set (stderr
, TMP_GEN (bb
), "tmp_gen", bb
->index
);
1038 print_bitmap_set (stderr
, ANTIC_IN (bb
), "antic_in", bb
->index
);
1039 if (do_partial_partial
)
1040 print_bitmap_set (stderr
, PA_IN (bb
), "pa_in", bb
->index
);
1041 print_bitmap_set (stderr
, NEW_SETS (bb
), "new_sets", bb
->index
);
1044 /* Print out the expressions that have VAL to OUTFILE. */
1047 print_value_expressions (FILE *outfile
, unsigned int val
)
1049 bitmap set
= value_expressions
[val
];
1054 sprintf (s
, "%04d", val
);
1055 x
.expressions
= *set
;
1056 print_bitmap_set (outfile
, &x
, s
, 0);
1062 debug_value_expressions (unsigned int val
)
1064 print_value_expressions (stderr
, val
);
1067 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1071 get_or_alloc_expr_for_constant (tree constant
)
1073 unsigned int result_id
;
1074 unsigned int value_id
;
1075 struct pre_expr_d expr
;
1078 expr
.kind
= CONSTANT
;
1079 PRE_EXPR_CONSTANT (&expr
) = constant
;
1080 result_id
= lookup_expression_id (&expr
);
1082 return expression_for_id (result_id
);
1084 newexpr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1085 newexpr
->kind
= CONSTANT
;
1086 PRE_EXPR_CONSTANT (newexpr
) = constant
;
1087 alloc_expression_id (newexpr
);
1088 value_id
= get_or_alloc_constant_value_id (constant
);
1089 add_to_value (value_id
, newexpr
);
1093 /* Given a value id V, find the actual tree representing the constant
1094 value if there is one, and return it. Return NULL if we can't find
1098 get_constant_for_value_id (unsigned int v
)
1100 if (value_id_constant_p (v
))
1104 bitmap exprset
= value_expressions
[v
];
1106 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1108 pre_expr expr
= expression_for_id (i
);
1109 if (expr
->kind
== CONSTANT
)
1110 return PRE_EXPR_CONSTANT (expr
);
1116 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1117 Currently only supports constants and SSA_NAMES. */
1119 get_or_alloc_expr_for (tree t
)
1121 if (TREE_CODE (t
) == SSA_NAME
)
1122 return get_or_alloc_expr_for_name (t
);
1123 else if (is_gimple_min_invariant (t
))
1124 return get_or_alloc_expr_for_constant (t
);
1127 /* More complex expressions can result from SCCVN expression
1128 simplification that inserts values for them. As they all
1129 do not have VOPs the get handled by the nary ops struct. */
1130 vn_nary_op_t result
;
1131 unsigned int result_id
;
1132 vn_nary_op_lookup (t
, &result
);
1135 pre_expr e
= (pre_expr
) pool_alloc (pre_expr_pool
);
1137 PRE_EXPR_NARY (e
) = result
;
1138 result_id
= lookup_expression_id (e
);
1141 pool_free (pre_expr_pool
, e
);
1142 e
= expression_for_id (result_id
);
1145 alloc_expression_id (e
);
1152 /* Return the folded version of T if T, when folded, is a gimple
1153 min_invariant. Otherwise, return T. */
1156 fully_constant_expression (pre_expr e
)
1164 vn_nary_op_t nary
= PRE_EXPR_NARY (e
);
1165 switch (TREE_CODE_CLASS (nary
->opcode
))
1168 case tcc_comparison
:
1170 /* We have to go from trees to pre exprs to value ids to
1172 tree naryop0
= nary
->op
[0];
1173 tree naryop1
= nary
->op
[1];
1175 if (!is_gimple_min_invariant (naryop0
))
1177 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1178 unsigned int vrep0
= get_expr_value_id (rep0
);
1179 tree const0
= get_constant_for_value_id (vrep0
);
1181 naryop0
= fold_convert (TREE_TYPE (naryop0
), const0
);
1183 if (!is_gimple_min_invariant (naryop1
))
1185 pre_expr rep1
= get_or_alloc_expr_for (naryop1
);
1186 unsigned int vrep1
= get_expr_value_id (rep1
);
1187 tree const1
= get_constant_for_value_id (vrep1
);
1189 naryop1
= fold_convert (TREE_TYPE (naryop1
), const1
);
1191 result
= fold_binary (nary
->opcode
, nary
->type
,
1193 if (result
&& is_gimple_min_invariant (result
))
1194 return get_or_alloc_expr_for_constant (result
);
1195 /* We might have simplified the expression to a
1196 SSA_NAME for example from x_1 * 1. But we cannot
1197 insert a PHI for x_1 unconditionally as x_1 might
1198 not be available readily. */
1202 if (nary
->opcode
!= REALPART_EXPR
1203 && nary
->opcode
!= IMAGPART_EXPR
1204 && nary
->opcode
!= VIEW_CONVERT_EXPR
)
1209 /* We have to go from trees to pre exprs to value ids to
1211 tree naryop0
= nary
->op
[0];
1212 tree const0
, result
;
1213 if (is_gimple_min_invariant (naryop0
))
1217 pre_expr rep0
= get_or_alloc_expr_for (naryop0
);
1218 unsigned int vrep0
= get_expr_value_id (rep0
);
1219 const0
= get_constant_for_value_id (vrep0
);
1224 tree type1
= TREE_TYPE (nary
->op
[0]);
1225 const0
= fold_convert (type1
, const0
);
1226 result
= fold_unary (nary
->opcode
, nary
->type
, const0
);
1228 if (result
&& is_gimple_min_invariant (result
))
1229 return get_or_alloc_expr_for_constant (result
);
1238 vn_reference_t ref
= PRE_EXPR_REFERENCE (e
);
1240 if ((folded
= fully_constant_vn_reference_p (ref
)))
1241 return get_or_alloc_expr_for_constant (folded
);
1250 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1251 it has the value it would have in BLOCK. Set *SAME_VALID to true
1252 in case the new vuse doesn't change the value id of the OPERANDS. */
1255 translate_vuse_through_block (vec
<vn_reference_op_s
> operands
,
1256 alias_set_type set
, tree type
, tree vuse
,
1257 basic_block phiblock
,
1258 basic_block block
, bool *same_valid
)
1260 gimple phi
= SSA_NAME_DEF_STMT (vuse
);
1267 if (gimple_bb (phi
) != phiblock
)
1270 use_oracle
= ao_ref_init_from_vn_reference (&ref
, set
, type
, operands
);
1272 /* Use the alias-oracle to find either the PHI node in this block,
1273 the first VUSE used in this block that is equivalent to vuse or
1274 the first VUSE which definition in this block kills the value. */
1275 if (gimple_code (phi
) == GIMPLE_PHI
)
1276 e
= find_edge (block
, phiblock
);
1277 else if (use_oracle
)
1278 while (!stmt_may_clobber_ref_p_1 (phi
, &ref
))
1280 vuse
= gimple_vuse (phi
);
1281 phi
= SSA_NAME_DEF_STMT (vuse
);
1282 if (gimple_bb (phi
) != phiblock
)
1284 if (gimple_code (phi
) == GIMPLE_PHI
)
1286 e
= find_edge (block
, phiblock
);
1297 bitmap visited
= NULL
;
1299 /* Try to find a vuse that dominates this phi node by skipping
1300 non-clobbering statements. */
1301 vuse
= get_continuation_for_phi (phi
, &ref
, &cnt
, &visited
, false);
1303 BITMAP_FREE (visited
);
1309 /* If we didn't find any, the value ID can't stay the same,
1310 but return the translated vuse. */
1311 *same_valid
= false;
1312 vuse
= PHI_ARG_DEF (phi
, e
->dest_idx
);
1314 /* ??? We would like to return vuse here as this is the canonical
1315 upmost vdef that this reference is associated with. But during
1316 insertion of the references into the hash tables we only ever
1317 directly insert with their direct gimple_vuse, hence returning
1318 something else would make us not find the other expression. */
1319 return PHI_ARG_DEF (phi
, e
->dest_idx
);
1325 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1326 SET2. This is used to avoid making a set consisting of the union
1327 of PA_IN and ANTIC_IN during insert. */
1329 static inline pre_expr
1330 find_leader_in_sets (unsigned int val
, bitmap_set_t set1
, bitmap_set_t set2
)
1334 result
= bitmap_find_leader (set1
, val
);
1335 if (!result
&& set2
)
1336 result
= bitmap_find_leader (set2
, val
);
1340 /* Get the tree type for our PRE expression e. */
1343 get_expr_type (const pre_expr e
)
1348 return TREE_TYPE (PRE_EXPR_NAME (e
));
1350 return TREE_TYPE (PRE_EXPR_CONSTANT (e
));
1352 return PRE_EXPR_REFERENCE (e
)->type
;
1354 return PRE_EXPR_NARY (e
)->type
;
1359 /* Get a representative SSA_NAME for a given expression.
1360 Since all of our sub-expressions are treated as values, we require
1361 them to be SSA_NAME's for simplicity.
1362 Prior versions of GVNPRE used to use "value handles" here, so that
1363 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1364 either case, the operands are really values (IE we do not expect
1365 them to be usable without finding leaders). */
1368 get_representative_for (const pre_expr e
)
1371 unsigned int value_id
= get_expr_value_id (e
);
1376 return PRE_EXPR_NAME (e
);
1378 return PRE_EXPR_CONSTANT (e
);
1382 /* Go through all of the expressions representing this value
1383 and pick out an SSA_NAME. */
1386 bitmap exprs
= value_expressions
[value_id
];
1387 EXECUTE_IF_SET_IN_BITMAP (exprs
, 0, i
, bi
)
1389 pre_expr rep
= expression_for_id (i
);
1390 if (rep
->kind
== NAME
)
1391 return PRE_EXPR_NAME (rep
);
1392 else if (rep
->kind
== CONSTANT
)
1393 return PRE_EXPR_CONSTANT (rep
);
1399 /* If we reached here we couldn't find an SSA_NAME. This can
1400 happen when we've discovered a value that has never appeared in
1401 the program as set to an SSA_NAME, as the result of phi translation.
1403 ??? We should be able to re-use this when we insert the statement
1405 name
= make_temp_ssa_name (get_expr_type (e
), gimple_build_nop (), "pretmp");
1406 VN_INFO_GET (name
)->value_id
= value_id
;
1407 VN_INFO (name
)->valnum
= name
;
1408 /* ??? For now mark this SSA name for release by SCCVN. */
1409 VN_INFO (name
)->needs_insertion
= true;
1410 add_to_value (value_id
, get_or_alloc_expr_for_name (name
));
1411 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1413 fprintf (dump_file
, "Created SSA_NAME representative ");
1414 print_generic_expr (dump_file
, name
, 0);
1415 fprintf (dump_file
, " for expression:");
1416 print_pre_expr (dump_file
, e
);
1417 fprintf (dump_file
, " (%04d)\n", value_id
);
1426 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1427 basic_block pred
, basic_block phiblock
);
1429 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1430 the phis in PRED. Return NULL if we can't find a leader for each part
1431 of the translated expression. */
1434 phi_translate_1 (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1435 basic_block pred
, basic_block phiblock
)
1442 bool changed
= false;
1443 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1444 vn_nary_op_t newnary
= XALLOCAVAR (struct vn_nary_op_s
,
1445 sizeof_vn_nary_op (nary
->length
));
1446 memcpy (newnary
, nary
, sizeof_vn_nary_op (nary
->length
));
1448 for (i
= 0; i
< newnary
->length
; i
++)
1450 if (TREE_CODE (newnary
->op
[i
]) != SSA_NAME
)
1454 pre_expr leader
, result
;
1455 unsigned int op_val_id
= VN_INFO (newnary
->op
[i
])->value_id
;
1456 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1457 result
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1458 if (result
&& result
!= leader
)
1460 tree name
= get_representative_for (result
);
1463 newnary
->op
[i
] = name
;
1468 changed
|= newnary
->op
[i
] != nary
->op
[i
];
1474 unsigned int new_val_id
;
1476 tree result
= vn_nary_op_lookup_pieces (newnary
->length
,
1481 if (result
&& is_gimple_min_invariant (result
))
1482 return get_or_alloc_expr_for_constant (result
);
1484 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1489 PRE_EXPR_NARY (expr
) = nary
;
1490 constant
= fully_constant_expression (expr
);
1491 if (constant
!= expr
)
1494 new_val_id
= nary
->value_id
;
1495 get_or_alloc_expression_id (expr
);
1499 new_val_id
= get_next_value_id ();
1500 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
1501 nary
= vn_nary_op_insert_pieces (newnary
->length
,
1505 result
, new_val_id
);
1506 PRE_EXPR_NARY (expr
) = nary
;
1507 constant
= fully_constant_expression (expr
);
1508 if (constant
!= expr
)
1510 get_or_alloc_expression_id (expr
);
1512 add_to_value (new_val_id
, expr
);
1520 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1521 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1522 tree vuse
= ref
->vuse
;
1523 tree newvuse
= vuse
;
1524 vec
<vn_reference_op_s
> newoperands
= vNULL
;
1525 bool changed
= false, same_valid
= true;
1526 unsigned int i
, j
, n
;
1527 vn_reference_op_t operand
;
1528 vn_reference_t newref
;
1531 operands
.iterate (i
, &operand
); i
++, j
++)
1536 tree type
= operand
->type
;
1537 vn_reference_op_s newop
= *operand
;
1538 op
[0] = operand
->op0
;
1539 op
[1] = operand
->op1
;
1540 op
[2] = operand
->op2
;
1541 for (n
= 0; n
< 3; ++n
)
1543 unsigned int op_val_id
;
1546 if (TREE_CODE (op
[n
]) != SSA_NAME
)
1548 /* We can't possibly insert these. */
1550 && !is_gimple_min_invariant (op
[n
]))
1554 op_val_id
= VN_INFO (op
[n
])->value_id
;
1555 leader
= find_leader_in_sets (op_val_id
, set1
, set2
);
1558 opresult
= phi_translate (leader
, set1
, set2
, pred
, phiblock
);
1561 if (opresult
!= leader
)
1563 tree name
= get_representative_for (opresult
);
1566 changed
|= name
!= op
[n
];
1572 newoperands
.release ();
1575 if (!newoperands
.exists ())
1576 newoperands
= operands
.copy ();
1577 /* We may have changed from an SSA_NAME to a constant */
1578 if (newop
.opcode
== SSA_NAME
&& TREE_CODE (op
[0]) != SSA_NAME
)
1579 newop
.opcode
= TREE_CODE (op
[0]);
1584 /* If it transforms a non-constant ARRAY_REF into a constant
1585 one, adjust the constant offset. */
1586 if (newop
.opcode
== ARRAY_REF
1588 && TREE_CODE (op
[0]) == INTEGER_CST
1589 && TREE_CODE (op
[1]) == INTEGER_CST
1590 && TREE_CODE (op
[2]) == INTEGER_CST
)
1592 double_int off
= tree_to_double_int (op
[0]);
1593 off
+= -tree_to_double_int (op
[1]);
1594 off
*= tree_to_double_int (op
[2]);
1595 if (off
.fits_shwi ())
1596 newop
.off
= off
.low
;
1598 newoperands
[j
] = newop
;
1599 /* If it transforms from an SSA_NAME to an address, fold with
1600 a preceding indirect reference. */
1601 if (j
> 0 && op
[0] && TREE_CODE (op
[0]) == ADDR_EXPR
1602 && newoperands
[j
- 1].opcode
== MEM_REF
)
1603 vn_reference_fold_indirect (&newoperands
, &j
);
1605 if (i
!= operands
.length ())
1607 newoperands
.release ();
1613 newvuse
= translate_vuse_through_block (newoperands
,
1614 ref
->set
, ref
->type
,
1615 vuse
, phiblock
, pred
,
1617 if (newvuse
== NULL_TREE
)
1619 newoperands
.release ();
1624 if (changed
|| newvuse
!= vuse
)
1626 unsigned int new_val_id
;
1629 tree result
= vn_reference_lookup_pieces (newvuse
, ref
->set
,
1634 newoperands
.release ();
1636 /* We can always insert constants, so if we have a partial
1637 redundant constant load of another type try to translate it
1638 to a constant of appropriate type. */
1639 if (result
&& is_gimple_min_invariant (result
))
1642 if (!useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1644 tem
= fold_unary (VIEW_CONVERT_EXPR
, ref
->type
, result
);
1645 if (tem
&& !is_gimple_min_invariant (tem
))
1649 return get_or_alloc_expr_for_constant (tem
);
1652 /* If we'd have to convert things we would need to validate
1653 if we can insert the translated expression. So fail
1654 here for now - we cannot insert an alias with a different
1655 type in the VN tables either, as that would assert. */
1657 && !useless_type_conversion_p (ref
->type
, TREE_TYPE (result
)))
1659 else if (!result
&& newref
1660 && !useless_type_conversion_p (ref
->type
, newref
->type
))
1662 newoperands
.release ();
1666 expr
= (pre_expr
) pool_alloc (pre_expr_pool
);
1667 expr
->kind
= REFERENCE
;
1672 PRE_EXPR_REFERENCE (expr
) = newref
;
1673 constant
= fully_constant_expression (expr
);
1674 if (constant
!= expr
)
1677 new_val_id
= newref
->value_id
;
1678 get_or_alloc_expression_id (expr
);
1682 if (changed
|| !same_valid
)
1684 new_val_id
= get_next_value_id ();
1685 value_expressions
.safe_grow_cleared
1686 (get_max_value_id () + 1);
1689 new_val_id
= ref
->value_id
;
1690 newref
= vn_reference_insert_pieces (newvuse
, ref
->set
,
1693 result
, new_val_id
);
1694 newoperands
.create (0);
1695 PRE_EXPR_REFERENCE (expr
) = newref
;
1696 constant
= fully_constant_expression (expr
);
1697 if (constant
!= expr
)
1699 get_or_alloc_expression_id (expr
);
1701 add_to_value (new_val_id
, expr
);
1703 newoperands
.release ();
1710 tree name
= PRE_EXPR_NAME (expr
);
1711 gimple def_stmt
= SSA_NAME_DEF_STMT (name
);
1712 /* If the SSA name is defined by a PHI node in this block,
1714 if (gimple_code (def_stmt
) == GIMPLE_PHI
1715 && gimple_bb (def_stmt
) == phiblock
)
1717 edge e
= find_edge (pred
, gimple_bb (def_stmt
));
1718 tree def
= PHI_ARG_DEF (def_stmt
, e
->dest_idx
);
1720 /* Handle constant. */
1721 if (is_gimple_min_invariant (def
))
1722 return get_or_alloc_expr_for_constant (def
);
1724 return get_or_alloc_expr_for_name (def
);
1726 /* Otherwise return it unchanged - it will get cleaned if its
1727 value is not available in PREDs AVAIL_OUT set of expressions. */
1736 /* Wrapper around phi_translate_1 providing caching functionality. */
1739 phi_translate (pre_expr expr
, bitmap_set_t set1
, bitmap_set_t set2
,
1740 basic_block pred
, basic_block phiblock
)
1742 expr_pred_trans_t slot
= NULL
;
1748 /* Constants contain no values that need translation. */
1749 if (expr
->kind
== CONSTANT
)
1752 if (value_id_constant_p (get_expr_value_id (expr
)))
1755 /* Don't add translations of NAMEs as those are cheap to translate. */
1756 if (expr
->kind
!= NAME
)
1758 if (phi_trans_add (&slot
, expr
, pred
))
1760 /* Store NULL for the value we want to return in the case of
1766 phitrans
= phi_translate_1 (expr
, set1
, set2
, pred
, phiblock
);
1773 /* Remove failed translations again, they cause insert
1774 iteration to not pick up new opportunities reliably. */
1775 phi_translate_table
.remove_elt_with_hash (slot
, slot
->hashcode
);
1782 /* For each expression in SET, translate the values through phi nodes
1783 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1784 expressions in DEST. */
1787 phi_translate_set (bitmap_set_t dest
, bitmap_set_t set
, basic_block pred
,
1788 basic_block phiblock
)
1790 vec
<pre_expr
> exprs
;
1794 if (gimple_seq_empty_p (phi_nodes (phiblock
)))
1796 bitmap_set_copy (dest
, set
);
1800 exprs
= sorted_array_from_bitmap_set (set
);
1801 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
1803 pre_expr translated
;
1804 translated
= phi_translate (expr
, set
, NULL
, pred
, phiblock
);
1808 /* We might end up with multiple expressions from SET being
1809 translated to the same value. In this case we do not want
1810 to retain the NARY or REFERENCE expression but prefer a NAME
1811 which would be the leader. */
1812 if (translated
->kind
== NAME
)
1813 bitmap_value_replace_in_set (dest
, translated
);
1815 bitmap_value_insert_into_set (dest
, translated
);
1820 /* Find the leader for a value (i.e., the name representing that
1821 value) in a given set, and return it. Return NULL if no leader
1825 bitmap_find_leader (bitmap_set_t set
, unsigned int val
)
1827 if (value_id_constant_p (val
))
1831 bitmap exprset
= value_expressions
[val
];
1833 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
1835 pre_expr expr
= expression_for_id (i
);
1836 if (expr
->kind
== CONSTANT
)
1840 if (bitmap_set_contains_value (set
, val
))
1842 /* Rather than walk the entire bitmap of expressions, and see
1843 whether any of them has the value we are looking for, we look
1844 at the reverse mapping, which tells us the set of expressions
1845 that have a given value (IE value->expressions with that
1846 value) and see if any of those expressions are in our set.
1847 The number of expressions per value is usually significantly
1848 less than the number of expressions in the set. In fact, for
1849 large testcases, doing it this way is roughly 5-10x faster
1850 than walking the bitmap.
1851 If this is somehow a significant lose for some cases, we can
1852 choose which set to walk based on which set is smaller. */
1855 bitmap exprset
= value_expressions
[val
];
1857 EXECUTE_IF_AND_IN_BITMAP (exprset
, &set
->expressions
, 0, i
, bi
)
1858 return expression_for_id (i
);
1863 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1864 BLOCK by seeing if it is not killed in the block. Note that we are
1865 only determining whether there is a store that kills it. Because
1866 of the order in which clean iterates over values, we are guaranteed
1867 that altered operands will have caused us to be eliminated from the
1868 ANTIC_IN set already. */
1871 value_dies_in_block_x (pre_expr expr
, basic_block block
)
1873 tree vuse
= PRE_EXPR_REFERENCE (expr
)->vuse
;
1874 vn_reference_t refx
= PRE_EXPR_REFERENCE (expr
);
1876 gimple_stmt_iterator gsi
;
1877 unsigned id
= get_expression_id (expr
);
1884 /* Lookup a previously calculated result. */
1885 if (EXPR_DIES (block
)
1886 && bitmap_bit_p (EXPR_DIES (block
), id
* 2))
1887 return bitmap_bit_p (EXPR_DIES (block
), id
* 2 + 1);
1889 /* A memory expression {e, VUSE} dies in the block if there is a
1890 statement that may clobber e. If, starting statement walk from the
1891 top of the basic block, a statement uses VUSE there can be no kill
1892 inbetween that use and the original statement that loaded {e, VUSE},
1893 so we can stop walking. */
1894 ref
.base
= NULL_TREE
;
1895 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1897 tree def_vuse
, def_vdef
;
1898 def
= gsi_stmt (gsi
);
1899 def_vuse
= gimple_vuse (def
);
1900 def_vdef
= gimple_vdef (def
);
1902 /* Not a memory statement. */
1906 /* Not a may-def. */
1909 /* A load with the same VUSE, we're done. */
1910 if (def_vuse
== vuse
)
1916 /* Init ref only if we really need it. */
1917 if (ref
.base
== NULL_TREE
1918 && !ao_ref_init_from_vn_reference (&ref
, refx
->set
, refx
->type
,
1924 /* If the statement may clobber expr, it dies. */
1925 if (stmt_may_clobber_ref_p_1 (def
, &ref
))
1932 /* Remember the result. */
1933 if (!EXPR_DIES (block
))
1934 EXPR_DIES (block
) = BITMAP_ALLOC (&grand_bitmap_obstack
);
1935 bitmap_set_bit (EXPR_DIES (block
), id
* 2);
1937 bitmap_set_bit (EXPR_DIES (block
), id
* 2 + 1);
1943 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1944 contains its value-id. */
1947 op_valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, tree op
)
1949 if (op
&& TREE_CODE (op
) == SSA_NAME
)
1951 unsigned int value_id
= VN_INFO (op
)->value_id
;
1952 if (!(bitmap_set_contains_value (set1
, value_id
)
1953 || (set2
&& bitmap_set_contains_value (set2
, value_id
))))
1959 /* Determine if the expression EXPR is valid in SET1 U SET2.
1960 ONLY SET2 CAN BE NULL.
1961 This means that we have a leader for each part of the expression
1962 (if it consists of values), or the expression is an SSA_NAME.
1963 For loads/calls, we also see if the vuse is killed in this block. */
1966 valid_in_sets (bitmap_set_t set1
, bitmap_set_t set2
, pre_expr expr
,
1972 return bitmap_find_leader (AVAIL_OUT (block
),
1973 get_expr_value_id (expr
)) != NULL
;
1977 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
1978 for (i
= 0; i
< nary
->length
; i
++)
1979 if (!op_valid_in_sets (set1
, set2
, nary
->op
[i
]))
1986 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
1987 vn_reference_op_t vro
;
1990 FOR_EACH_VEC_ELT (ref
->operands
, i
, vro
)
1992 if (!op_valid_in_sets (set1
, set2
, vro
->op0
)
1993 || !op_valid_in_sets (set1
, set2
, vro
->op1
)
1994 || !op_valid_in_sets (set1
, set2
, vro
->op2
))
2004 /* Clean the set of expressions that are no longer valid in SET1 or
2005 SET2. This means expressions that are made up of values we have no
2006 leaders for in SET1 or SET2. This version is used for partial
2007 anticipation, which means it is not valid in either ANTIC_IN or
2011 dependent_clean (bitmap_set_t set1
, bitmap_set_t set2
, basic_block block
)
2013 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set1
);
2017 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2019 if (!valid_in_sets (set1
, set2
, expr
, block
))
2020 bitmap_remove_from_set (set1
, expr
);
2025 /* Clean the set of expressions that are no longer valid in SET. This
2026 means expressions that are made up of values we have no leaders for
2030 clean (bitmap_set_t set
, basic_block block
)
2032 vec
<pre_expr
> exprs
= sorted_array_from_bitmap_set (set
);
2036 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
2038 if (!valid_in_sets (set
, NULL
, expr
, block
))
2039 bitmap_remove_from_set (set
, expr
);
2044 /* Clean the set of expressions that are no longer valid in SET because
2045 they are clobbered in BLOCK or because they trap and may not be executed. */
2048 prune_clobbered_mems (bitmap_set_t set
, basic_block block
)
2053 FOR_EACH_EXPR_ID_IN_SET (set
, i
, bi
)
2055 pre_expr expr
= expression_for_id (i
);
2056 if (expr
->kind
== REFERENCE
)
2058 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2061 gimple def_stmt
= SSA_NAME_DEF_STMT (ref
->vuse
);
2062 if (!gimple_nop_p (def_stmt
)
2063 && ((gimple_bb (def_stmt
) != block
2064 && !dominated_by_p (CDI_DOMINATORS
,
2065 block
, gimple_bb (def_stmt
)))
2066 || (gimple_bb (def_stmt
) == block
2067 && value_dies_in_block_x (expr
, block
))))
2068 bitmap_remove_from_set (set
, expr
);
2071 else if (expr
->kind
== NARY
)
2073 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2074 /* If the NARY may trap make sure the block does not contain
2075 a possible exit point.
2076 ??? This is overly conservative if we translate AVAIL_OUT
2077 as the available expression might be after the exit point. */
2078 if (BB_MAY_NOTRETURN (block
)
2079 && vn_nary_may_trap (nary
))
2080 bitmap_remove_from_set (set
, expr
);
2085 static sbitmap has_abnormal_preds
;
2087 /* List of blocks that may have changed during ANTIC computation and
2088 thus need to be iterated over. */
2090 static sbitmap changed_blocks
;
2092 /* Decide whether to defer a block for a later iteration, or PHI
2093 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2094 should defer the block, and true if we processed it. */
2097 defer_or_phi_translate_block (bitmap_set_t dest
, bitmap_set_t source
,
2098 basic_block block
, basic_block phiblock
)
2100 if (!BB_VISITED (phiblock
))
2102 bitmap_set_bit (changed_blocks
, block
->index
);
2103 BB_VISITED (block
) = 0;
2104 BB_DEFERRED (block
) = 1;
2108 phi_translate_set (dest
, source
, block
, phiblock
);
2112 /* Compute the ANTIC set for BLOCK.
2114 If succs(BLOCK) > 1 then
2115 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2116 else if succs(BLOCK) == 1 then
2117 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2119 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2123 compute_antic_aux (basic_block block
, bool block_has_abnormal_pred_edge
)
2125 bool changed
= false;
2126 bitmap_set_t S
, old
, ANTIC_OUT
;
2132 old
= ANTIC_OUT
= S
= NULL
;
2133 BB_VISITED (block
) = 1;
2135 /* If any edges from predecessors are abnormal, antic_in is empty,
2137 if (block_has_abnormal_pred_edge
)
2138 goto maybe_dump_sets
;
2140 old
= ANTIC_IN (block
);
2141 ANTIC_OUT
= bitmap_set_new ();
2143 /* If the block has no successors, ANTIC_OUT is empty. */
2144 if (EDGE_COUNT (block
->succs
) == 0)
2146 /* If we have one successor, we could have some phi nodes to
2147 translate through. */
2148 else if (single_succ_p (block
))
2150 basic_block succ_bb
= single_succ (block
);
2152 /* We trade iterations of the dataflow equations for having to
2153 phi translate the maximal set, which is incredibly slow
2154 (since the maximal set often has 300+ members, even when you
2155 have a small number of blocks).
2156 Basically, we defer the computation of ANTIC for this block
2157 until we have processed it's successor, which will inevitably
2158 have a *much* smaller set of values to phi translate once
2159 clean has been run on it.
2160 The cost of doing this is that we technically perform more
2161 iterations, however, they are lower cost iterations.
2163 Timings for PRE on tramp3d-v4:
2164 without maximal set fix: 11 seconds
2165 with maximal set fix/without deferring: 26 seconds
2166 with maximal set fix/with deferring: 11 seconds
2169 if (!defer_or_phi_translate_block (ANTIC_OUT
, ANTIC_IN (succ_bb
),
2173 goto maybe_dump_sets
;
2176 /* If we have multiple successors, we take the intersection of all of
2177 them. Note that in the case of loop exit phi nodes, we may have
2178 phis to translate through. */
2181 vec
<basic_block
> worklist
;
2183 basic_block bprime
, first
= NULL
;
2185 worklist
.create (EDGE_COUNT (block
->succs
));
2186 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2189 && BB_VISITED (e
->dest
))
2191 else if (BB_VISITED (e
->dest
))
2192 worklist
.quick_push (e
->dest
);
2195 /* Of multiple successors we have to have visited one already. */
2198 bitmap_set_bit (changed_blocks
, block
->index
);
2199 BB_VISITED (block
) = 0;
2200 BB_DEFERRED (block
) = 1;
2202 worklist
.release ();
2203 goto maybe_dump_sets
;
2206 if (!gimple_seq_empty_p (phi_nodes (first
)))
2207 phi_translate_set (ANTIC_OUT
, ANTIC_IN (first
), block
, first
);
2209 bitmap_set_copy (ANTIC_OUT
, ANTIC_IN (first
));
2211 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2213 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2215 bitmap_set_t tmp
= bitmap_set_new ();
2216 phi_translate_set (tmp
, ANTIC_IN (bprime
), block
, bprime
);
2217 bitmap_set_and (ANTIC_OUT
, tmp
);
2218 bitmap_set_free (tmp
);
2221 bitmap_set_and (ANTIC_OUT
, ANTIC_IN (bprime
));
2223 worklist
.release ();
2226 /* Prune expressions that are clobbered in block and thus become
2227 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2228 prune_clobbered_mems (ANTIC_OUT
, block
);
2230 /* Generate ANTIC_OUT - TMP_GEN. */
2231 S
= bitmap_set_subtract (ANTIC_OUT
, TMP_GEN (block
));
2233 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2234 ANTIC_IN (block
) = bitmap_set_subtract (EXP_GEN (block
),
2237 /* Then union in the ANTIC_OUT - TMP_GEN values,
2238 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2239 FOR_EACH_EXPR_ID_IN_SET (S
, bii
, bi
)
2240 bitmap_value_insert_into_set (ANTIC_IN (block
),
2241 expression_for_id (bii
));
2243 clean (ANTIC_IN (block
), block
);
2245 if (!bitmap_set_equal (old
, ANTIC_IN (block
)))
2248 bitmap_set_bit (changed_blocks
, block
->index
);
2249 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2250 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2253 bitmap_clear_bit (changed_blocks
, block
->index
);
2256 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2258 if (!BB_DEFERRED (block
) || BB_VISITED (block
))
2261 print_bitmap_set (dump_file
, ANTIC_OUT
, "ANTIC_OUT", block
->index
);
2263 print_bitmap_set (dump_file
, ANTIC_IN (block
), "ANTIC_IN",
2267 print_bitmap_set (dump_file
, S
, "S", block
->index
);
2272 "Block %d was deferred for a future iteration.\n",
2277 bitmap_set_free (old
);
2279 bitmap_set_free (S
);
2281 bitmap_set_free (ANTIC_OUT
);
2285 /* Compute PARTIAL_ANTIC for BLOCK.
2287 If succs(BLOCK) > 1 then
2288 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2289 in ANTIC_OUT for all succ(BLOCK)
2290 else if succs(BLOCK) == 1 then
2291 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2293 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2298 compute_partial_antic_aux (basic_block block
,
2299 bool block_has_abnormal_pred_edge
)
2301 bool changed
= false;
2302 bitmap_set_t old_PA_IN
;
2303 bitmap_set_t PA_OUT
;
2306 unsigned long max_pa
= PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH
);
2308 old_PA_IN
= PA_OUT
= NULL
;
2310 /* If any edges from predecessors are abnormal, antic_in is empty,
2312 if (block_has_abnormal_pred_edge
)
2313 goto maybe_dump_sets
;
2315 /* If there are too many partially anticipatable values in the
2316 block, phi_translate_set can take an exponential time: stop
2317 before the translation starts. */
2319 && single_succ_p (block
)
2320 && bitmap_count_bits (&PA_IN (single_succ (block
))->values
) > max_pa
)
2321 goto maybe_dump_sets
;
2323 old_PA_IN
= PA_IN (block
);
2324 PA_OUT
= bitmap_set_new ();
2326 /* If the block has no successors, ANTIC_OUT is empty. */
2327 if (EDGE_COUNT (block
->succs
) == 0)
2329 /* If we have one successor, we could have some phi nodes to
2330 translate through. Note that we can't phi translate across DFS
2331 back edges in partial antic, because it uses a union operation on
2332 the successors. For recurrences like IV's, we will end up
2333 generating a new value in the set on each go around (i + 3 (VH.1)
2334 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2335 else if (single_succ_p (block
))
2337 basic_block succ
= single_succ (block
);
2338 if (!(single_succ_edge (block
)->flags
& EDGE_DFS_BACK
))
2339 phi_translate_set (PA_OUT
, PA_IN (succ
), block
, succ
);
2341 /* If we have multiple successors, we take the union of all of
2345 vec
<basic_block
> worklist
;
2349 worklist
.create (EDGE_COUNT (block
->succs
));
2350 FOR_EACH_EDGE (e
, ei
, block
->succs
)
2352 if (e
->flags
& EDGE_DFS_BACK
)
2354 worklist
.quick_push (e
->dest
);
2356 if (worklist
.length () > 0)
2358 FOR_EACH_VEC_ELT (worklist
, i
, bprime
)
2363 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime
), i
, bi
)
2364 bitmap_value_insert_into_set (PA_OUT
,
2365 expression_for_id (i
));
2366 if (!gimple_seq_empty_p (phi_nodes (bprime
)))
2368 bitmap_set_t pa_in
= bitmap_set_new ();
2369 phi_translate_set (pa_in
, PA_IN (bprime
), block
, bprime
);
2370 FOR_EACH_EXPR_ID_IN_SET (pa_in
, i
, bi
)
2371 bitmap_value_insert_into_set (PA_OUT
,
2372 expression_for_id (i
));
2373 bitmap_set_free (pa_in
);
2376 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime
), i
, bi
)
2377 bitmap_value_insert_into_set (PA_OUT
,
2378 expression_for_id (i
));
2381 worklist
.release ();
2384 /* Prune expressions that are clobbered in block and thus become
2385 invalid if translated from PA_OUT to PA_IN. */
2386 prune_clobbered_mems (PA_OUT
, block
);
2388 /* PA_IN starts with PA_OUT - TMP_GEN.
2389 Then we subtract things from ANTIC_IN. */
2390 PA_IN (block
) = bitmap_set_subtract (PA_OUT
, TMP_GEN (block
));
2392 /* For partial antic, we want to put back in the phi results, since
2393 we will properly avoid making them partially antic over backedges. */
2394 bitmap_ior_into (&PA_IN (block
)->values
, &PHI_GEN (block
)->values
);
2395 bitmap_ior_into (&PA_IN (block
)->expressions
, &PHI_GEN (block
)->expressions
);
2397 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2398 bitmap_set_subtract_values (PA_IN (block
), ANTIC_IN (block
));
2400 dependent_clean (PA_IN (block
), ANTIC_IN (block
), block
);
2402 if (!bitmap_set_equal (old_PA_IN
, PA_IN (block
)))
2405 bitmap_set_bit (changed_blocks
, block
->index
);
2406 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2407 bitmap_set_bit (changed_blocks
, e
->src
->index
);
2410 bitmap_clear_bit (changed_blocks
, block
->index
);
2413 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2416 print_bitmap_set (dump_file
, PA_OUT
, "PA_OUT", block
->index
);
2418 print_bitmap_set (dump_file
, PA_IN (block
), "PA_IN", block
->index
);
2421 bitmap_set_free (old_PA_IN
);
2423 bitmap_set_free (PA_OUT
);
2427 /* Compute ANTIC and partial ANTIC sets. */
2430 compute_antic (void)
2432 bool changed
= true;
2433 int num_iterations
= 0;
2437 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2438 We pre-build the map of blocks with incoming abnormal edges here. */
2439 has_abnormal_preds
= sbitmap_alloc (last_basic_block
);
2440 bitmap_clear (has_abnormal_preds
);
2447 FOR_EACH_EDGE (e
, ei
, block
->preds
)
2449 e
->flags
&= ~EDGE_DFS_BACK
;
2450 if (e
->flags
& EDGE_ABNORMAL
)
2452 bitmap_set_bit (has_abnormal_preds
, block
->index
);
2457 BB_VISITED (block
) = 0;
2458 BB_DEFERRED (block
) = 0;
2460 /* While we are here, give empty ANTIC_IN sets to each block. */
2461 ANTIC_IN (block
) = bitmap_set_new ();
2462 PA_IN (block
) = bitmap_set_new ();
2465 /* At the exit block we anticipate nothing. */
2466 BB_VISITED (EXIT_BLOCK_PTR
) = 1;
2468 changed_blocks
= sbitmap_alloc (last_basic_block
+ 1);
2469 bitmap_ones (changed_blocks
);
2472 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2473 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2474 /* ??? We need to clear our PHI translation cache here as the
2475 ANTIC sets shrink and we restrict valid translations to
2476 those having operands with leaders in ANTIC. Same below
2477 for PA ANTIC computation. */
2480 for (i
= postorder_num
- 1; i
>= 0; i
--)
2482 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2484 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2485 changed
|= compute_antic_aux (block
,
2486 bitmap_bit_p (has_abnormal_preds
,
2490 /* Theoretically possible, but *highly* unlikely. */
2491 gcc_checking_assert (num_iterations
< 500);
2494 statistics_histogram_event (cfun
, "compute_antic iterations",
2497 if (do_partial_partial
)
2499 bitmap_ones (changed_blocks
);
2500 mark_dfs_back_edges ();
2505 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2506 fprintf (dump_file
, "Starting iteration %d\n", num_iterations
);
2509 for (i
= postorder_num
- 1 ; i
>= 0; i
--)
2511 if (bitmap_bit_p (changed_blocks
, postorder
[i
]))
2513 basic_block block
= BASIC_BLOCK (postorder
[i
]);
2515 |= compute_partial_antic_aux (block
,
2516 bitmap_bit_p (has_abnormal_preds
,
2520 /* Theoretically possible, but *highly* unlikely. */
2521 gcc_checking_assert (num_iterations
< 500);
2523 statistics_histogram_event (cfun
, "compute_partial_antic iterations",
2526 sbitmap_free (has_abnormal_preds
);
2527 sbitmap_free (changed_blocks
);
2531 /* Inserted expressions are placed onto this worklist, which is used
2532 for performing quick dead code elimination of insertions we made
2533 that didn't turn out to be necessary. */
2534 static bitmap inserted_exprs
;
2536 /* The actual worker for create_component_ref_by_pieces. */
2539 create_component_ref_by_pieces_1 (basic_block block
, vn_reference_t ref
,
2540 unsigned int *operand
, gimple_seq
*stmts
)
2542 vn_reference_op_t currop
= &ref
->operands
[*operand
];
2545 switch (currop
->opcode
)
2549 tree folded
, sc
= NULL_TREE
;
2550 unsigned int nargs
= 0;
2552 if (TREE_CODE (currop
->op0
) == FUNCTION_DECL
)
2555 fn
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2560 sc
= find_or_generate_expression (block
, currop
->op1
, stmts
);
2564 args
= XNEWVEC (tree
, ref
->operands
.length () - 1);
2565 while (*operand
< ref
->operands
.length ())
2567 args
[nargs
] = create_component_ref_by_pieces_1 (block
, ref
,
2573 folded
= build_call_array (currop
->type
,
2574 (TREE_CODE (fn
) == FUNCTION_DECL
2575 ? build_fold_addr_expr (fn
) : fn
),
2579 CALL_EXPR_STATIC_CHAIN (folded
) = sc
;
2585 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2589 tree offset
= currop
->op0
;
2590 if (TREE_CODE (baseop
) == ADDR_EXPR
2591 && handled_component_p (TREE_OPERAND (baseop
, 0)))
2595 base
= get_addr_base_and_unit_offset (TREE_OPERAND (baseop
, 0),
2598 offset
= int_const_binop (PLUS_EXPR
, offset
,
2599 build_int_cst (TREE_TYPE (offset
),
2601 baseop
= build_fold_addr_expr (base
);
2603 return fold_build2 (MEM_REF
, currop
->type
, baseop
, offset
);
2606 case TARGET_MEM_REF
:
2608 tree genop0
= NULL_TREE
, genop1
= NULL_TREE
;
2609 vn_reference_op_t nextop
= &ref
->operands
[++*operand
];
2610 tree baseop
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2616 genop0
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2622 genop1
= find_or_generate_expression (block
, nextop
->op0
, stmts
);
2626 return build5 (TARGET_MEM_REF
, currop
->type
,
2627 baseop
, currop
->op2
, genop0
, currop
->op1
, genop1
);
2633 gcc_assert (is_gimple_min_invariant (currop
->op0
));
2639 case VIEW_CONVERT_EXPR
:
2641 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2645 return fold_build1 (currop
->opcode
, currop
->type
, genop0
);
2648 case WITH_SIZE_EXPR
:
2650 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2654 tree genop1
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2657 return fold_build2 (currop
->opcode
, currop
->type
, genop0
, genop1
);
2662 tree genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2666 tree op1
= currop
->op0
;
2667 tree op2
= currop
->op1
;
2668 return fold_build3 (BIT_FIELD_REF
, currop
->type
, genop0
, op1
, op2
);
2671 /* For array ref vn_reference_op's, operand 1 of the array ref
2672 is op0 of the reference op and operand 3 of the array ref is
2674 case ARRAY_RANGE_REF
:
2678 tree genop1
= currop
->op0
;
2679 tree genop2
= currop
->op1
;
2680 tree genop3
= currop
->op2
;
2681 genop0
= create_component_ref_by_pieces_1 (block
, ref
, operand
,
2685 genop1
= find_or_generate_expression (block
, genop1
, stmts
);
2690 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (genop0
));
2691 /* Drop zero minimum index if redundant. */
2692 if (integer_zerop (genop2
)
2694 || integer_zerop (TYPE_MIN_VALUE (domain_type
))))
2698 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2705 tree elmt_type
= TREE_TYPE (TREE_TYPE (genop0
));
2706 /* We can't always put a size in units of the element alignment
2707 here as the element alignment may be not visible. See
2708 PR43783. Simply drop the element size for constant
2710 if (tree_int_cst_equal (genop3
, TYPE_SIZE_UNIT (elmt_type
)))
2714 genop3
= size_binop (EXACT_DIV_EXPR
, genop3
,
2715 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
2716 genop3
= find_or_generate_expression (block
, genop3
, stmts
);
2721 return build4 (currop
->opcode
, currop
->type
, genop0
, genop1
,
2728 tree genop2
= currop
->op1
;
2729 op0
= create_component_ref_by_pieces_1 (block
, ref
, operand
, stmts
);
2732 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2736 genop2
= find_or_generate_expression (block
, genop2
, stmts
);
2740 return fold_build3 (COMPONENT_REF
, TREE_TYPE (op1
), op0
, op1
, genop2
);
2745 genop
= find_or_generate_expression (block
, currop
->op0
, stmts
);
2766 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2767 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2768 trying to rename aggregates into ssa form directly, which is a no no.
2770 Thus, this routine doesn't create temporaries, it just builds a
2771 single access expression for the array, calling
2772 find_or_generate_expression to build the innermost pieces.
2774 This function is a subroutine of create_expression_by_pieces, and
2775 should not be called on it's own unless you really know what you
2779 create_component_ref_by_pieces (basic_block block
, vn_reference_t ref
,
2782 unsigned int op
= 0;
2783 return create_component_ref_by_pieces_1 (block
, ref
, &op
, stmts
);
2786 /* Find a simple leader for an expression, or generate one using
2787 create_expression_by_pieces from a NARY expression for the value.
2788 BLOCK is the basic_block we are looking for leaders in.
2789 OP is the tree expression to find a leader for or generate.
2790 Returns the leader or NULL_TREE on failure. */
2793 find_or_generate_expression (basic_block block
, tree op
, gimple_seq
*stmts
)
2795 pre_expr expr
= get_or_alloc_expr_for (op
);
2796 unsigned int lookfor
= get_expr_value_id (expr
);
2797 pre_expr leader
= bitmap_find_leader (AVAIL_OUT (block
), lookfor
);
2800 if (leader
->kind
== NAME
)
2801 return PRE_EXPR_NAME (leader
);
2802 else if (leader
->kind
== CONSTANT
)
2803 return PRE_EXPR_CONSTANT (leader
);
2809 /* It must be a complex expression, so generate it recursively. Note
2810 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2811 where the insert algorithm fails to insert a required expression. */
2812 bitmap exprset
= value_expressions
[lookfor
];
2815 EXECUTE_IF_SET_IN_BITMAP (exprset
, 0, i
, bi
)
2817 pre_expr temp
= expression_for_id (i
);
2818 /* We cannot insert random REFERENCE expressions at arbitrary
2819 places. We can insert NARYs which eventually re-materializes
2820 its operand values. */
2821 if (temp
->kind
== NARY
)
2822 return create_expression_by_pieces (block
, temp
, stmts
,
2823 get_expr_type (expr
));
2830 #define NECESSARY GF_PLF_1
2832 /* Create an expression in pieces, so that we can handle very complex
2833 expressions that may be ANTIC, but not necessary GIMPLE.
2834 BLOCK is the basic block the expression will be inserted into,
2835 EXPR is the expression to insert (in value form)
2836 STMTS is a statement list to append the necessary insertions into.
2838 This function will die if we hit some value that shouldn't be
2839 ANTIC but is (IE there is no leader for it, or its components).
2840 The function returns NULL_TREE in case a different antic expression
2841 has to be inserted first.
2842 This function may also generate expressions that are themselves
2843 partially or fully redundant. Those that are will be either made
2844 fully redundant during the next iteration of insert (for partially
2845 redundant ones), or eliminated by eliminate (for fully redundant
2849 create_expression_by_pieces (basic_block block
, pre_expr expr
,
2850 gimple_seq
*stmts
, tree type
)
2854 gimple_seq forced_stmts
= NULL
;
2855 unsigned int value_id
;
2856 gimple_stmt_iterator gsi
;
2857 tree exprtype
= type
? type
: get_expr_type (expr
);
2863 /* We may hit the NAME/CONSTANT case if we have to convert types
2864 that value numbering saw through. */
2866 folded
= PRE_EXPR_NAME (expr
);
2869 folded
= PRE_EXPR_CONSTANT (expr
);
2873 vn_reference_t ref
= PRE_EXPR_REFERENCE (expr
);
2874 folded
= create_component_ref_by_pieces (block
, ref
, stmts
);
2881 vn_nary_op_t nary
= PRE_EXPR_NARY (expr
);
2882 tree
*genop
= XALLOCAVEC (tree
, nary
->length
);
2884 for (i
= 0; i
< nary
->length
; ++i
)
2886 genop
[i
] = find_or_generate_expression (block
, nary
->op
[i
], stmts
);
2889 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2890 may have conversions stripped. */
2891 if (nary
->opcode
== POINTER_PLUS_EXPR
)
2894 genop
[i
] = fold_convert (nary
->type
, genop
[i
]);
2896 genop
[i
] = convert_to_ptrofftype (genop
[i
]);
2899 genop
[i
] = fold_convert (TREE_TYPE (nary
->op
[i
]), genop
[i
]);
2901 if (nary
->opcode
== CONSTRUCTOR
)
2903 vec
<constructor_elt
, va_gc
> *elts
= NULL
;
2904 for (i
= 0; i
< nary
->length
; ++i
)
2905 CONSTRUCTOR_APPEND_ELT (elts
, NULL_TREE
, genop
[i
]);
2906 folded
= build_constructor (nary
->type
, elts
);
2910 switch (nary
->length
)
2913 folded
= fold_build1 (nary
->opcode
, nary
->type
,
2917 folded
= fold_build2 (nary
->opcode
, nary
->type
,
2918 genop
[0], genop
[1]);
2921 folded
= fold_build3 (nary
->opcode
, nary
->type
,
2922 genop
[0], genop
[1], genop
[2]);
2934 if (!useless_type_conversion_p (exprtype
, TREE_TYPE (folded
)))
2935 folded
= fold_convert (exprtype
, folded
);
2937 /* Force the generated expression to be a sequence of GIMPLE
2939 We have to call unshare_expr because force_gimple_operand may
2940 modify the tree we pass to it. */
2941 folded
= force_gimple_operand (unshare_expr (folded
), &forced_stmts
,
2944 /* If we have any intermediate expressions to the value sets, add them
2945 to the value sets and chain them in the instruction stream. */
2948 gsi
= gsi_start (forced_stmts
);
2949 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
2951 gimple stmt
= gsi_stmt (gsi
);
2952 tree forcedname
= gimple_get_lhs (stmt
);
2955 if (TREE_CODE (forcedname
) == SSA_NAME
)
2957 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (forcedname
));
2958 VN_INFO_GET (forcedname
)->valnum
= forcedname
;
2959 VN_INFO (forcedname
)->value_id
= get_next_value_id ();
2960 nameexpr
= get_or_alloc_expr_for_name (forcedname
);
2961 add_to_value (VN_INFO (forcedname
)->value_id
, nameexpr
);
2962 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2963 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2966 gimple_seq_add_seq (stmts
, forced_stmts
);
2969 name
= make_temp_ssa_name (exprtype
, NULL
, "pretmp");
2970 newstmt
= gimple_build_assign (name
, folded
);
2971 gimple_set_plf (newstmt
, NECESSARY
, false);
2973 gimple_seq_add_stmt (stmts
, newstmt
);
2974 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (name
));
2976 /* Fold the last statement. */
2977 gsi
= gsi_last (*stmts
);
2978 if (fold_stmt_inplace (&gsi
))
2979 update_stmt (gsi_stmt (gsi
));
2981 /* Add a value number to the temporary.
2982 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2983 we are creating the expression by pieces, and this particular piece of
2984 the expression may have been represented. There is no harm in replacing
2986 value_id
= get_expr_value_id (expr
);
2987 VN_INFO_GET (name
)->value_id
= value_id
;
2988 VN_INFO (name
)->valnum
= sccvn_valnum_from_value_id (value_id
);
2989 if (VN_INFO (name
)->valnum
== NULL_TREE
)
2990 VN_INFO (name
)->valnum
= name
;
2991 gcc_assert (VN_INFO (name
)->valnum
!= NULL_TREE
);
2992 nameexpr
= get_or_alloc_expr_for_name (name
);
2993 add_to_value (value_id
, nameexpr
);
2994 if (NEW_SETS (block
))
2995 bitmap_value_replace_in_set (NEW_SETS (block
), nameexpr
);
2996 bitmap_value_replace_in_set (AVAIL_OUT (block
), nameexpr
);
2998 pre_stats
.insertions
++;
2999 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3001 fprintf (dump_file
, "Inserted ");
3002 print_gimple_stmt (dump_file
, newstmt
, 0, 0);
3003 fprintf (dump_file
, " in predecessor %d (%04d)\n",
3004 block
->index
, value_id
);
3011 /* Returns true if we want to inhibit the insertions of PHI nodes
3012 for the given EXPR for basic block BB (a member of a loop).
3013 We want to do this, when we fear that the induction variable we
3014 create might inhibit vectorization. */
3017 inhibit_phi_insertion (basic_block bb
, pre_expr expr
)
3019 vn_reference_t vr
= PRE_EXPR_REFERENCE (expr
);
3020 vec
<vn_reference_op_s
> ops
= vr
->operands
;
3021 vn_reference_op_t op
;
3024 /* If we aren't going to vectorize we don't inhibit anything. */
3025 if (!flag_tree_loop_vectorize
)
3028 /* Otherwise we inhibit the insertion when the address of the
3029 memory reference is a simple induction variable. In other
3030 cases the vectorizer won't do anything anyway (either it's
3031 loop invariant or a complicated expression). */
3032 FOR_EACH_VEC_ELT (ops
, i
, op
)
3037 /* Calls are not a problem. */
3041 case ARRAY_RANGE_REF
:
3042 if (TREE_CODE (op
->op0
) != SSA_NAME
)
3047 basic_block defbb
= gimple_bb (SSA_NAME_DEF_STMT (op
->op0
));
3049 /* Default defs are loop invariant. */
3052 /* Defined outside this loop, also loop invariant. */
3053 if (!flow_bb_inside_loop_p (bb
->loop_father
, defbb
))
3055 /* If it's a simple induction variable inhibit insertion,
3056 the vectorizer might be interested in this one. */
3057 if (simple_iv (bb
->loop_father
, bb
->loop_father
,
3058 op
->op0
, &iv
, true))
3060 /* No simple IV, vectorizer can't do anything, hence no
3061 reason to inhibit the transformation for this operand. */
3071 /* Insert the to-be-made-available values of expression EXPRNUM for each
3072 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3073 merge the result with a phi node, given the same value number as
3074 NODE. Return true if we have inserted new stuff. */
3077 insert_into_preds_of_block (basic_block block
, unsigned int exprnum
,
3078 vec
<pre_expr
> avail
)
3080 pre_expr expr
= expression_for_id (exprnum
);
3082 unsigned int val
= get_expr_value_id (expr
);
3084 bool insertions
= false;
3089 tree type
= get_expr_type (expr
);
3093 /* Make sure we aren't creating an induction variable. */
3094 if (bb_loop_depth (block
) > 0 && EDGE_COUNT (block
->preds
) == 2)
3096 bool firstinsideloop
= false;
3097 bool secondinsideloop
= false;
3098 firstinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3099 EDGE_PRED (block
, 0)->src
);
3100 secondinsideloop
= flow_bb_inside_loop_p (block
->loop_father
,
3101 EDGE_PRED (block
, 1)->src
);
3102 /* Induction variables only have one edge inside the loop. */
3103 if ((firstinsideloop
^ secondinsideloop
)
3104 && (expr
->kind
!= REFERENCE
3105 || inhibit_phi_insertion (block
, expr
)))
3107 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3108 fprintf (dump_file
, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3113 /* Make the necessary insertions. */
3114 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3116 gimple_seq stmts
= NULL
;
3119 eprime
= avail
[pred
->dest_idx
];
3121 if (eprime
->kind
!= NAME
&& eprime
->kind
!= CONSTANT
)
3123 builtexpr
= create_expression_by_pieces (bprime
, eprime
,
3125 gcc_assert (!(pred
->flags
& EDGE_ABNORMAL
));
3126 gsi_insert_seq_on_edge (pred
, stmts
);
3129 /* We cannot insert a PHI node if we failed to insert
3134 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (builtexpr
);
3137 else if (eprime
->kind
== CONSTANT
)
3139 /* Constants may not have the right type, fold_convert
3140 should give us back a constant with the right type. */
3141 tree constant
= PRE_EXPR_CONSTANT (eprime
);
3142 if (!useless_type_conversion_p (type
, TREE_TYPE (constant
)))
3144 tree builtexpr
= fold_convert (type
, constant
);
3145 if (!is_gimple_min_invariant (builtexpr
))
3147 tree forcedexpr
= force_gimple_operand (builtexpr
,
3150 if (!is_gimple_min_invariant (forcedexpr
))
3152 if (forcedexpr
!= builtexpr
)
3154 VN_INFO_GET (forcedexpr
)->valnum
= PRE_EXPR_CONSTANT (eprime
);
3155 VN_INFO (forcedexpr
)->value_id
= get_expr_value_id (eprime
);
3159 gimple_stmt_iterator gsi
;
3160 gsi
= gsi_start (stmts
);
3161 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3163 gimple stmt
= gsi_stmt (gsi
);
3164 tree lhs
= gimple_get_lhs (stmt
);
3165 if (TREE_CODE (lhs
) == SSA_NAME
)
3166 bitmap_set_bit (inserted_exprs
,
3167 SSA_NAME_VERSION (lhs
));
3168 gimple_set_plf (stmt
, NECESSARY
, false);
3170 gsi_insert_seq_on_edge (pred
, stmts
);
3172 avail
[pred
->dest_idx
]
3173 = get_or_alloc_expr_for_name (forcedexpr
);
3177 avail
[pred
->dest_idx
]
3178 = get_or_alloc_expr_for_constant (builtexpr
);
3181 else if (eprime
->kind
== NAME
)
3183 /* We may have to do a conversion because our value
3184 numbering can look through types in certain cases, but
3185 our IL requires all operands of a phi node have the same
3187 tree name
= PRE_EXPR_NAME (eprime
);
3188 if (!useless_type_conversion_p (type
, TREE_TYPE (name
)))
3192 builtexpr
= fold_convert (type
, name
);
3193 forcedexpr
= force_gimple_operand (builtexpr
,
3197 if (forcedexpr
!= name
)
3199 VN_INFO_GET (forcedexpr
)->valnum
= VN_INFO (name
)->valnum
;
3200 VN_INFO (forcedexpr
)->value_id
= VN_INFO (name
)->value_id
;
3205 gimple_stmt_iterator gsi
;
3206 gsi
= gsi_start (stmts
);
3207 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3209 gimple stmt
= gsi_stmt (gsi
);
3210 tree lhs
= gimple_get_lhs (stmt
);
3211 if (TREE_CODE (lhs
) == SSA_NAME
)
3212 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
3213 gimple_set_plf (stmt
, NECESSARY
, false);
3215 gsi_insert_seq_on_edge (pred
, stmts
);
3217 avail
[pred
->dest_idx
] = get_or_alloc_expr_for_name (forcedexpr
);
3221 /* If we didn't want a phi node, and we made insertions, we still have
3222 inserted new stuff, and thus return true. If we didn't want a phi node,
3223 and didn't make insertions, we haven't added anything new, so return
3225 if (nophi
&& insertions
)
3227 else if (nophi
&& !insertions
)
3230 /* Now build a phi for the new variable. */
3231 temp
= make_temp_ssa_name (type
, NULL
, "prephitmp");
3232 phi
= create_phi_node (temp
, block
);
3234 gimple_set_plf (phi
, NECESSARY
, false);
3235 VN_INFO_GET (temp
)->value_id
= val
;
3236 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3237 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3238 VN_INFO (temp
)->valnum
= temp
;
3239 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3240 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3242 pre_expr ae
= avail
[pred
->dest_idx
];
3243 gcc_assert (get_expr_type (ae
) == type
3244 || useless_type_conversion_p (type
, get_expr_type (ae
)));
3245 if (ae
->kind
== CONSTANT
)
3246 add_phi_arg (phi
, unshare_expr (PRE_EXPR_CONSTANT (ae
)),
3247 pred
, UNKNOWN_LOCATION
);
3249 add_phi_arg (phi
, PRE_EXPR_NAME (ae
), pred
, UNKNOWN_LOCATION
);
3252 newphi
= get_or_alloc_expr_for_name (temp
);
3253 add_to_value (val
, newphi
);
3255 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3256 this insertion, since we test for the existence of this value in PHI_GEN
3257 before proceeding with the partial redundancy checks in insert_aux.
3259 The value may exist in AVAIL_OUT, in particular, it could be represented
3260 by the expression we are trying to eliminate, in which case we want the
3261 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3264 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3265 this block, because if it did, it would have existed in our dominator's
3266 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3269 bitmap_insert_into_set (PHI_GEN (block
), newphi
);
3270 bitmap_value_replace_in_set (AVAIL_OUT (block
),
3272 bitmap_insert_into_set (NEW_SETS (block
),
3275 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3277 fprintf (dump_file
, "Created phi ");
3278 print_gimple_stmt (dump_file
, phi
, 0, 0);
3279 fprintf (dump_file
, " in block %d (%04d)\n", block
->index
, val
);
3287 /* Perform insertion of partially redundant values.
3288 For BLOCK, do the following:
3289 1. Propagate the NEW_SETS of the dominator into the current block.
3290 If the block has multiple predecessors,
3291 2a. Iterate over the ANTIC expressions for the block to see if
3292 any of them are partially redundant.
3293 2b. If so, insert them into the necessary predecessors to make
3294 the expression fully redundant.
3295 2c. Insert a new PHI merging the values of the predecessors.
3296 2d. Insert the new PHI, and the new expressions, into the
3298 3. Recursively call ourselves on the dominator children of BLOCK.
3300 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3301 do_regular_insertion and do_partial_insertion.
3306 do_regular_insertion (basic_block block
, basic_block dom
)
3308 bool new_stuff
= false;
3309 vec
<pre_expr
> exprs
;
3311 vec
<pre_expr
> avail
= vNULL
;
3314 exprs
= sorted_array_from_bitmap_set (ANTIC_IN (block
));
3315 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3317 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3319 if (expr
->kind
== NARY
3320 || expr
->kind
== REFERENCE
)
3323 bool by_some
= false;
3324 bool cant_insert
= false;
3325 bool all_same
= true;
3326 pre_expr first_s
= NULL
;
3329 pre_expr eprime
= NULL
;
3331 pre_expr edoubleprime
= NULL
;
3332 bool do_insertion
= false;
3334 val
= get_expr_value_id (expr
);
3335 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3337 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3339 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3341 fprintf (dump_file
, "Found fully redundant value: ");
3342 print_pre_expr (dump_file
, expr
);
3343 fprintf (dump_file
, "\n");
3348 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3350 unsigned int vprime
;
3352 /* We should never run insertion for the exit block
3353 and so not come across fake pred edges. */
3354 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3356 eprime
= phi_translate (expr
, ANTIC_IN (block
), NULL
,
3359 /* eprime will generally only be NULL if the
3360 value of the expression, translated
3361 through the PHI for this predecessor, is
3362 undefined. If that is the case, we can't
3363 make the expression fully redundant,
3364 because its value is undefined along a
3365 predecessor path. We can thus break out
3366 early because it doesn't matter what the
3367 rest of the results are. */
3370 avail
[pred
->dest_idx
] = NULL
;
3375 eprime
= fully_constant_expression (eprime
);
3376 vprime
= get_expr_value_id (eprime
);
3377 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
),
3379 if (edoubleprime
== NULL
)
3381 avail
[pred
->dest_idx
] = eprime
;
3386 avail
[pred
->dest_idx
] = edoubleprime
;
3388 /* We want to perform insertions to remove a redundancy on
3389 a path in the CFG we want to optimize for speed. */
3390 if (optimize_edge_for_speed_p (pred
))
3391 do_insertion
= true;
3392 if (first_s
== NULL
)
3393 first_s
= edoubleprime
;
3394 else if (!pre_expr_d::equal (first_s
, edoubleprime
))
3398 /* If we can insert it, it's not the same value
3399 already existing along every predecessor, and
3400 it's defined by some predecessor, it is
3401 partially redundant. */
3402 if (!cant_insert
&& !all_same
&& by_some
)
3406 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3408 fprintf (dump_file
, "Skipping partial redundancy for "
3410 print_pre_expr (dump_file
, expr
);
3411 fprintf (dump_file
, " (%04d), no redundancy on to be "
3412 "optimized for speed edge\n", val
);
3415 else if (dbg_cnt (treepre_insert
))
3417 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3419 fprintf (dump_file
, "Found partial redundancy for "
3421 print_pre_expr (dump_file
, expr
);
3422 fprintf (dump_file
, " (%04d)\n",
3423 get_expr_value_id (expr
));
3425 if (insert_into_preds_of_block (block
,
3426 get_expression_id (expr
),
3431 /* If all edges produce the same value and that value is
3432 an invariant, then the PHI has the same value on all
3433 edges. Note this. */
3434 else if (!cant_insert
&& all_same
)
3436 gcc_assert (edoubleprime
->kind
== CONSTANT
3437 || edoubleprime
->kind
== NAME
);
3439 tree temp
= make_temp_ssa_name (get_expr_type (expr
),
3441 gimple assign
= gimple_build_assign (temp
,
3442 edoubleprime
->kind
== CONSTANT
? PRE_EXPR_CONSTANT (edoubleprime
) : PRE_EXPR_NAME (edoubleprime
));
3443 gimple_stmt_iterator gsi
= gsi_after_labels (block
);
3444 gsi_insert_before (&gsi
, assign
, GSI_NEW_STMT
);
3446 gimple_set_plf (assign
, NECESSARY
, false);
3447 VN_INFO_GET (temp
)->value_id
= val
;
3448 VN_INFO (temp
)->valnum
= sccvn_valnum_from_value_id (val
);
3449 if (VN_INFO (temp
)->valnum
== NULL_TREE
)
3450 VN_INFO (temp
)->valnum
= temp
;
3451 bitmap_set_bit (inserted_exprs
, SSA_NAME_VERSION (temp
));
3452 pre_expr newe
= get_or_alloc_expr_for_name (temp
);
3453 add_to_value (val
, newe
);
3454 bitmap_value_replace_in_set (AVAIL_OUT (block
), newe
);
3455 bitmap_insert_into_set (NEW_SETS (block
), newe
);
3466 /* Perform insertion for partially anticipatable expressions. There
3467 is only one case we will perform insertion for these. This case is
3468 if the expression is partially anticipatable, and fully available.
3469 In this case, we know that putting it earlier will enable us to
3470 remove the later computation. */
3474 do_partial_partial_insertion (basic_block block
, basic_block dom
)
3476 bool new_stuff
= false;
3477 vec
<pre_expr
> exprs
;
3479 vec
<pre_expr
> avail
= vNULL
;
3482 exprs
= sorted_array_from_bitmap_set (PA_IN (block
));
3483 avail
.safe_grow (EDGE_COUNT (block
->preds
));
3485 FOR_EACH_VEC_ELT (exprs
, i
, expr
)
3487 if (expr
->kind
== NARY
3488 || expr
->kind
== REFERENCE
)
3492 bool cant_insert
= false;
3495 pre_expr eprime
= NULL
;
3498 val
= get_expr_value_id (expr
);
3499 if (bitmap_set_contains_value (PHI_GEN (block
), val
))
3501 if (bitmap_set_contains_value (AVAIL_OUT (dom
), val
))
3504 FOR_EACH_EDGE (pred
, ei
, block
->preds
)
3506 unsigned int vprime
;
3507 pre_expr edoubleprime
;
3509 /* We should never run insertion for the exit block
3510 and so not come across fake pred edges. */
3511 gcc_assert (!(pred
->flags
& EDGE_FAKE
));
3513 eprime
= phi_translate (expr
, ANTIC_IN (block
),
3517 /* eprime will generally only be NULL if the
3518 value of the expression, translated
3519 through the PHI for this predecessor, is
3520 undefined. If that is the case, we can't
3521 make the expression fully redundant,
3522 because its value is undefined along a
3523 predecessor path. We can thus break out
3524 early because it doesn't matter what the
3525 rest of the results are. */
3528 avail
[pred
->dest_idx
] = NULL
;
3533 eprime
= fully_constant_expression (eprime
);
3534 vprime
= get_expr_value_id (eprime
);
3535 edoubleprime
= bitmap_find_leader (AVAIL_OUT (bprime
), vprime
);
3536 avail
[pred
->dest_idx
] = edoubleprime
;
3537 if (edoubleprime
== NULL
)
3544 /* If we can insert it, it's not the same value
3545 already existing along every predecessor, and
3546 it's defined by some predecessor, it is
3547 partially redundant. */
3548 if (!cant_insert
&& by_all
)
3551 bool do_insertion
= false;
3553 /* Insert only if we can remove a later expression on a path
3554 that we want to optimize for speed.
3555 The phi node that we will be inserting in BLOCK is not free,
3556 and inserting it for the sake of !optimize_for_speed successor
3557 may cause regressions on the speed path. */
3558 FOR_EACH_EDGE (succ
, ei
, block
->succs
)
3560 if (bitmap_set_contains_value (PA_IN (succ
->dest
), val
)
3561 || bitmap_set_contains_value (ANTIC_IN (succ
->dest
), val
))
3563 if (optimize_edge_for_speed_p (succ
))
3564 do_insertion
= true;
3570 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3572 fprintf (dump_file
, "Skipping partial partial redundancy "
3574 print_pre_expr (dump_file
, expr
);
3575 fprintf (dump_file
, " (%04d), not (partially) anticipated "
3576 "on any to be optimized for speed edges\n", val
);
3579 else if (dbg_cnt (treepre_insert
))
3581 pre_stats
.pa_insert
++;
3582 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3584 fprintf (dump_file
, "Found partial partial redundancy "
3586 print_pre_expr (dump_file
, expr
);
3587 fprintf (dump_file
, " (%04d)\n",
3588 get_expr_value_id (expr
));
3590 if (insert_into_preds_of_block (block
,
3591 get_expression_id (expr
),
3605 insert_aux (basic_block block
)
3608 bool new_stuff
= false;
3613 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3618 bitmap_set_t newset
= NEW_SETS (dom
);
3621 /* Note that we need to value_replace both NEW_SETS, and
3622 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3623 represented by some non-simple expression here that we want
3624 to replace it with. */
3625 FOR_EACH_EXPR_ID_IN_SET (newset
, i
, bi
)
3627 pre_expr expr
= expression_for_id (i
);
3628 bitmap_value_replace_in_set (NEW_SETS (block
), expr
);
3629 bitmap_value_replace_in_set (AVAIL_OUT (block
), expr
);
3632 if (!single_pred_p (block
))
3634 new_stuff
|= do_regular_insertion (block
, dom
);
3635 if (do_partial_partial
)
3636 new_stuff
|= do_partial_partial_insertion (block
, dom
);
3640 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3642 son
= next_dom_son (CDI_DOMINATORS
, son
))
3644 new_stuff
|= insert_aux (son
);
3650 /* Perform insertion of partially redundant values. */
3655 bool new_stuff
= true;
3657 int num_iterations
= 0;
3660 NEW_SETS (bb
) = bitmap_set_new ();
3665 if (dump_file
&& dump_flags
& TDF_DETAILS
)
3666 fprintf (dump_file
, "Starting insert iteration %d\n", num_iterations
);
3667 new_stuff
= insert_aux (ENTRY_BLOCK_PTR
);
3669 /* Clear the NEW sets before the next iteration. We have already
3670 fully propagated its contents. */
3673 bitmap_set_free (NEW_SETS (bb
));
3675 statistics_histogram_event (cfun
, "insert iterations", num_iterations
);
3679 /* Compute the AVAIL set for all basic blocks.
3681 This function performs value numbering of the statements in each basic
3682 block. The AVAIL sets are built from information we glean while doing
3683 this value numbering, since the AVAIL sets contain only one entry per
3686 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3687 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3690 compute_avail (void)
3693 basic_block block
, son
;
3694 basic_block
*worklist
;
3698 /* We pretend that default definitions are defined in the entry block.
3699 This includes function arguments and the static chain decl. */
3700 for (i
= 1; i
< num_ssa_names
; ++i
)
3702 tree name
= ssa_name (i
);
3705 || !SSA_NAME_IS_DEFAULT_DEF (name
)
3706 || has_zero_uses (name
)
3707 || virtual_operand_p (name
))
3710 e
= get_or_alloc_expr_for_name (name
);
3711 add_to_value (get_expr_value_id (e
), e
);
3712 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR
), e
);
3713 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR
), e
);
3716 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3718 print_bitmap_set (dump_file
, TMP_GEN (ENTRY_BLOCK_PTR
),
3719 "tmp_gen", ENTRY_BLOCK
);
3720 print_bitmap_set (dump_file
, AVAIL_OUT (ENTRY_BLOCK_PTR
),
3721 "avail_out", ENTRY_BLOCK
);
3724 /* Allocate the worklist. */
3725 worklist
= XNEWVEC (basic_block
, n_basic_blocks
);
3727 /* Seed the algorithm by putting the dominator children of the entry
3728 block on the worklist. */
3729 for (son
= first_dom_son (CDI_DOMINATORS
, ENTRY_BLOCK_PTR
);
3731 son
= next_dom_son (CDI_DOMINATORS
, son
))
3732 worklist
[sp
++] = son
;
3734 /* Loop until the worklist is empty. */
3737 gimple_stmt_iterator gsi
;
3741 /* Pick a block from the worklist. */
3742 block
= worklist
[--sp
];
3744 /* Initially, the set of available values in BLOCK is that of
3745 its immediate dominator. */
3746 dom
= get_immediate_dominator (CDI_DOMINATORS
, block
);
3748 bitmap_set_copy (AVAIL_OUT (block
), AVAIL_OUT (dom
));
3750 /* Generate values for PHI nodes. */
3751 for (gsi
= gsi_start_phis (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3753 tree result
= gimple_phi_result (gsi_stmt (gsi
));
3755 /* We have no need for virtual phis, as they don't represent
3756 actual computations. */
3757 if (virtual_operand_p (result
))
3760 pre_expr e
= get_or_alloc_expr_for_name (result
);
3761 add_to_value (get_expr_value_id (e
), e
);
3762 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3763 bitmap_insert_into_set (PHI_GEN (block
), e
);
3766 BB_MAY_NOTRETURN (block
) = 0;
3768 /* Now compute value numbers and populate value sets with all
3769 the expressions computed in BLOCK. */
3770 for (gsi
= gsi_start_bb (block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3775 stmt
= gsi_stmt (gsi
);
3777 /* Cache whether the basic-block has any non-visible side-effect
3779 If this isn't a call or it is the last stmt in the
3780 basic-block then the CFG represents things correctly. */
3781 if (is_gimple_call (stmt
) && !stmt_ends_bb_p (stmt
))
3783 /* Non-looping const functions always return normally.
3784 Otherwise the call might not return or have side-effects
3785 that forbids hoisting possibly trapping expressions
3787 int flags
= gimple_call_flags (stmt
);
3788 if (!(flags
& ECF_CONST
)
3789 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
3790 BB_MAY_NOTRETURN (block
) = 1;
3793 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
)
3795 pre_expr e
= get_or_alloc_expr_for_name (op
);
3797 add_to_value (get_expr_value_id (e
), e
);
3798 bitmap_insert_into_set (TMP_GEN (block
), e
);
3799 bitmap_value_insert_into_set (AVAIL_OUT (block
), e
);
3802 if (gimple_has_side_effects (stmt
)
3803 || stmt_could_throw_p (stmt
)
3804 || is_gimple_debug (stmt
))
3807 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3809 if (ssa_undefined_value_p (op
))
3811 pre_expr e
= get_or_alloc_expr_for_name (op
);
3812 bitmap_value_insert_into_set (EXP_GEN (block
), e
);
3815 switch (gimple_code (stmt
))
3823 pre_expr result
= NULL
;
3824 vec
<vn_reference_op_s
> ops
= vNULL
;
3826 /* We can value number only calls to real functions. */
3827 if (gimple_call_internal_p (stmt
))
3830 copy_reference_ops_from_call (stmt
, &ops
);
3831 vn_reference_lookup_pieces (gimple_vuse (stmt
), 0,
3832 gimple_expr_type (stmt
),
3833 ops
, &ref
, VN_NOWALK
);
3838 /* If the value of the call is not invalidated in
3839 this block until it is computed, add the expression
3841 if (!gimple_vuse (stmt
)
3843 (SSA_NAME_DEF_STMT (gimple_vuse (stmt
))) == GIMPLE_PHI
3844 || gimple_bb (SSA_NAME_DEF_STMT
3845 (gimple_vuse (stmt
))) != block
)
3847 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3848 result
->kind
= REFERENCE
;
3850 PRE_EXPR_REFERENCE (result
) = ref
;
3852 get_or_alloc_expression_id (result
);
3853 add_to_value (get_expr_value_id (result
), result
);
3854 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3861 pre_expr result
= NULL
;
3862 switch (vn_get_stmt_kind (stmt
))
3866 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3869 /* COND_EXPR and VEC_COND_EXPR are awkward in
3870 that they contain an embedded complex expression.
3871 Don't even try to shove those through PRE. */
3872 if (code
== COND_EXPR
3873 || code
== VEC_COND_EXPR
)
3876 vn_nary_op_lookup_stmt (stmt
, &nary
);
3880 /* If the NARY traps and there was a preceding
3881 point in the block that might not return avoid
3882 adding the nary to EXP_GEN. */
3883 if (BB_MAY_NOTRETURN (block
)
3884 && vn_nary_may_trap (nary
))
3887 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3888 result
->kind
= NARY
;
3890 PRE_EXPR_NARY (result
) = nary
;
3897 vn_reference_lookup (gimple_assign_rhs1 (stmt
),
3903 /* If the value of the reference is not invalidated in
3904 this block until it is computed, add the expression
3906 if (gimple_vuse (stmt
))
3910 def_stmt
= SSA_NAME_DEF_STMT (gimple_vuse (stmt
));
3911 while (!gimple_nop_p (def_stmt
)
3912 && gimple_code (def_stmt
) != GIMPLE_PHI
3913 && gimple_bb (def_stmt
) == block
)
3915 if (stmt_may_clobber_ref_p
3916 (def_stmt
, gimple_assign_rhs1 (stmt
)))
3922 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt
));
3928 result
= (pre_expr
) pool_alloc (pre_expr_pool
);
3929 result
->kind
= REFERENCE
;
3931 PRE_EXPR_REFERENCE (result
) = ref
;
3939 get_or_alloc_expression_id (result
);
3940 add_to_value (get_expr_value_id (result
), result
);
3941 bitmap_value_insert_into_set (EXP_GEN (block
), result
);
3949 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3951 print_bitmap_set (dump_file
, EXP_GEN (block
),
3952 "exp_gen", block
->index
);
3953 print_bitmap_set (dump_file
, PHI_GEN (block
),
3954 "phi_gen", block
->index
);
3955 print_bitmap_set (dump_file
, TMP_GEN (block
),
3956 "tmp_gen", block
->index
);
3957 print_bitmap_set (dump_file
, AVAIL_OUT (block
),
3958 "avail_out", block
->index
);
3961 /* Put the dominator children of BLOCK on the worklist of blocks
3962 to compute available sets for. */
3963 for (son
= first_dom_son (CDI_DOMINATORS
, block
);
3965 son
= next_dom_son (CDI_DOMINATORS
, son
))
3966 worklist
[sp
++] = son
;
3973 /* Local state for the eliminate domwalk. */
3974 static vec
<gimple
> el_to_remove
;
3975 static vec
<gimple
> el_to_update
;
3976 static unsigned int el_todo
;
3977 static vec
<tree
> el_avail
;
3978 static vec
<tree
> el_avail_stack
;
3980 /* Return a leader for OP that is available at the current point of the
3981 eliminate domwalk. */
3984 eliminate_avail (tree op
)
3986 tree valnum
= VN_INFO (op
)->valnum
;
3987 if (TREE_CODE (valnum
) == SSA_NAME
)
3989 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
3991 if (el_avail
.length () > SSA_NAME_VERSION (valnum
))
3992 return el_avail
[SSA_NAME_VERSION (valnum
)];
3994 else if (is_gimple_min_invariant (valnum
))
3999 /* At the current point of the eliminate domwalk make OP available. */
4002 eliminate_push_avail (tree op
)
4004 tree valnum
= VN_INFO (op
)->valnum
;
4005 if (TREE_CODE (valnum
) == SSA_NAME
)
4007 if (el_avail
.length () <= SSA_NAME_VERSION (valnum
))
4008 el_avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
4009 el_avail
[SSA_NAME_VERSION (valnum
)] = op
;
4010 el_avail_stack
.safe_push (op
);
4014 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4015 the leader for the expression if insertion was successful. */
4018 eliminate_insert (gimple_stmt_iterator
*gsi
, tree val
)
4020 tree expr
= vn_get_expr_for (val
);
4021 if (!CONVERT_EXPR_P (expr
)
4022 && TREE_CODE (expr
) != VIEW_CONVERT_EXPR
)
4025 tree op
= TREE_OPERAND (expr
, 0);
4026 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (op
) : op
;
4030 tree res
= make_temp_ssa_name (TREE_TYPE (val
), NULL
, "pretmp");
4031 gimple tem
= gimple_build_assign (res
,
4032 fold_build1 (TREE_CODE (expr
),
4033 TREE_TYPE (expr
), leader
));
4034 gsi_insert_before (gsi
, tem
, GSI_SAME_STMT
);
4035 VN_INFO_GET (res
)->valnum
= val
;
4037 if (TREE_CODE (leader
) == SSA_NAME
)
4038 gimple_set_plf (SSA_NAME_DEF_STMT (leader
), NECESSARY
, true);
4040 pre_stats
.insertions
++;
4041 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4043 fprintf (dump_file
, "Inserted ");
4044 print_gimple_stmt (dump_file
, tem
, 0, 0);
4050 class eliminate_dom_walker
: public dom_walker
4053 eliminate_dom_walker (cdi_direction direction
) : dom_walker (direction
) {}
4055 virtual void before_dom_children (basic_block
);
4056 virtual void after_dom_children (basic_block
);
4059 /* Perform elimination for the basic-block B during the domwalk. */
4062 eliminate_dom_walker::before_dom_children (basic_block b
)
4064 gimple_stmt_iterator gsi
;
4068 el_avail_stack
.safe_push (NULL_TREE
);
4070 for (gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
4072 gimple stmt
, phi
= gsi_stmt (gsi
);
4073 tree sprime
= NULL_TREE
, res
= PHI_RESULT (phi
);
4074 gimple_stmt_iterator gsi2
;
4076 /* We want to perform redundant PHI elimination. Do so by
4077 replacing the PHI with a single copy if possible.
4078 Do not touch inserted, single-argument or virtual PHIs. */
4079 if (gimple_phi_num_args (phi
) == 1
4080 || virtual_operand_p (res
))
4086 sprime
= eliminate_avail (res
);
4090 eliminate_push_avail (res
);
4094 else if (is_gimple_min_invariant (sprime
))
4096 if (!useless_type_conversion_p (TREE_TYPE (res
),
4097 TREE_TYPE (sprime
)))
4098 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4101 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4103 fprintf (dump_file
, "Replaced redundant PHI node defining ");
4104 print_generic_expr (dump_file
, res
, 0);
4105 fprintf (dump_file
, " with ");
4106 print_generic_expr (dump_file
, sprime
, 0);
4107 fprintf (dump_file
, "\n");
4110 remove_phi_node (&gsi
, false);
4113 && !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
))
4114 && TREE_CODE (sprime
) == SSA_NAME
)
4115 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
), NECESSARY
, true);
4117 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
4118 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
4119 stmt
= gimple_build_assign (res
, sprime
);
4120 gimple_set_plf (stmt
, NECESSARY
, gimple_plf (phi
, NECESSARY
));
4122 gsi2
= gsi_after_labels (b
);
4123 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
4124 /* Queue the copy for eventual removal. */
4125 el_to_remove
.safe_push (stmt
);
4126 /* If we inserted this PHI node ourself, it's not an elimination. */
4128 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
4131 pre_stats
.eliminations
++;
4134 for (gsi
= gsi_start_bb (b
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4136 tree lhs
= NULL_TREE
;
4137 tree rhs
= NULL_TREE
;
4139 stmt
= gsi_stmt (gsi
);
4141 if (gimple_has_lhs (stmt
))
4142 lhs
= gimple_get_lhs (stmt
);
4144 if (gimple_assign_single_p (stmt
))
4145 rhs
= gimple_assign_rhs1 (stmt
);
4147 /* Lookup the RHS of the expression, see if we have an
4148 available computation for it. If so, replace the RHS with
4149 the available computation. */
4150 if (gimple_has_lhs (stmt
)
4151 && TREE_CODE (lhs
) == SSA_NAME
4152 && !gimple_has_volatile_ops (stmt
))
4155 gimple orig_stmt
= stmt
;
4157 sprime
= eliminate_avail (lhs
);
4158 /* If there is no usable leader mark lhs as leader for its value. */
4160 eliminate_push_avail (lhs
);
4162 /* See PR43491. Do not replace a global register variable when
4163 it is a the RHS of an assignment. Do replace local register
4164 variables since gcc does not guarantee a local variable will
4165 be allocated in register.
4166 Do not perform copy propagation or undo constant propagation. */
4167 if (gimple_assign_single_p (stmt
)
4168 && (TREE_CODE (rhs
) == SSA_NAME
4169 || is_gimple_min_invariant (rhs
)
4170 || (TREE_CODE (rhs
) == VAR_DECL
4171 && is_global_var (rhs
)
4172 && DECL_HARD_REGISTER (rhs
))))
4177 /* If there is no existing usable leader but SCCVN thinks
4178 it has an expression it wants to use as replacement,
4180 tree val
= VN_INFO (lhs
)->valnum
;
4182 && TREE_CODE (val
) == SSA_NAME
4183 && VN_INFO (val
)->needs_insertion
4184 && VN_INFO (val
)->expr
!= NULL_TREE
4185 && (sprime
= eliminate_insert (&gsi
, val
)) != NULL_TREE
)
4186 eliminate_push_avail (sprime
);
4188 else if (is_gimple_min_invariant (sprime
))
4190 /* If there is no existing leader but SCCVN knows this
4191 value is constant, use that constant. */
4192 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4193 TREE_TYPE (sprime
)))
4194 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4196 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4198 fprintf (dump_file
, "Replaced ");
4199 print_gimple_expr (dump_file
, stmt
, 0, 0);
4200 fprintf (dump_file
, " with ");
4201 print_generic_expr (dump_file
, sprime
, 0);
4202 fprintf (dump_file
, " in ");
4203 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4205 pre_stats
.eliminations
++;
4206 propagate_tree_value_into_stmt (&gsi
, sprime
);
4207 stmt
= gsi_stmt (gsi
);
4210 /* If we removed EH side-effects from the statement, clean
4211 its EH information. */
4212 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4214 bitmap_set_bit (need_eh_cleanup
,
4215 gimple_bb (stmt
)->index
);
4216 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4217 fprintf (dump_file
, " Removed EH side-effects.\n");
4224 && (rhs
== NULL_TREE
4225 || TREE_CODE (rhs
) != SSA_NAME
4226 || may_propagate_copy (rhs
, sprime
)))
4228 bool can_make_abnormal_goto
4229 = is_gimple_call (stmt
)
4230 && stmt_can_make_abnormal_goto (stmt
);
4232 gcc_assert (sprime
!= rhs
);
4234 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4236 fprintf (dump_file
, "Replaced ");
4237 print_gimple_expr (dump_file
, stmt
, 0, 0);
4238 fprintf (dump_file
, " with ");
4239 print_generic_expr (dump_file
, sprime
, 0);
4240 fprintf (dump_file
, " in ");
4241 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4244 if (TREE_CODE (sprime
) == SSA_NAME
)
4245 gimple_set_plf (SSA_NAME_DEF_STMT (sprime
),
4247 /* We need to make sure the new and old types actually match,
4248 which may require adding a simple cast, which fold_convert
4250 if ((!rhs
|| TREE_CODE (rhs
) != SSA_NAME
)
4251 && !useless_type_conversion_p (gimple_expr_type (stmt
),
4252 TREE_TYPE (sprime
)))
4253 sprime
= fold_convert (gimple_expr_type (stmt
), sprime
);
4255 pre_stats
.eliminations
++;
4256 propagate_tree_value_into_stmt (&gsi
, sprime
);
4257 stmt
= gsi_stmt (gsi
);
4260 /* If we removed EH side-effects from the statement, clean
4261 its EH information. */
4262 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4264 bitmap_set_bit (need_eh_cleanup
,
4265 gimple_bb (stmt
)->index
);
4266 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4267 fprintf (dump_file
, " Removed EH side-effects.\n");
4270 /* Likewise for AB side-effects. */
4271 if (can_make_abnormal_goto
4272 && !stmt_can_make_abnormal_goto (stmt
))
4274 bitmap_set_bit (need_ab_cleanup
,
4275 gimple_bb (stmt
)->index
);
4276 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4277 fprintf (dump_file
, " Removed AB side-effects.\n");
4281 /* If the statement is a scalar store, see if the expression
4282 has the same value number as its rhs. If so, the store is
4284 else if (gimple_assign_single_p (stmt
)
4285 && !gimple_has_volatile_ops (stmt
)
4286 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4287 && (TREE_CODE (rhs
) == SSA_NAME
4288 || is_gimple_min_invariant (rhs
)))
4291 val
= vn_reference_lookup (gimple_assign_lhs (stmt
),
4292 gimple_vuse (stmt
), VN_WALK
, NULL
);
4293 if (TREE_CODE (rhs
) == SSA_NAME
)
4294 rhs
= VN_INFO (rhs
)->valnum
;
4296 && operand_equal_p (val
, rhs
, 0))
4298 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4300 fprintf (dump_file
, "Deleted redundant store ");
4301 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4304 /* Queue stmt for removal. */
4305 el_to_remove
.safe_push (stmt
);
4308 /* Visit COND_EXPRs and fold the comparison with the
4309 available value-numbers. */
4310 else if (gimple_code (stmt
) == GIMPLE_COND
)
4312 tree op0
= gimple_cond_lhs (stmt
);
4313 tree op1
= gimple_cond_rhs (stmt
);
4316 if (TREE_CODE (op0
) == SSA_NAME
)
4317 op0
= VN_INFO (op0
)->valnum
;
4318 if (TREE_CODE (op1
) == SSA_NAME
)
4319 op1
= VN_INFO (op1
)->valnum
;
4320 result
= fold_binary (gimple_cond_code (stmt
), boolean_type_node
,
4322 if (result
&& TREE_CODE (result
) == INTEGER_CST
)
4324 if (integer_zerop (result
))
4325 gimple_cond_make_false (stmt
);
4327 gimple_cond_make_true (stmt
);
4329 el_todo
= TODO_cleanup_cfg
;
4332 /* Visit indirect calls and turn them into direct calls if
4334 if (is_gimple_call (stmt
))
4336 tree orig_fn
= gimple_call_fn (stmt
);
4340 if (TREE_CODE (orig_fn
) == SSA_NAME
)
4341 fn
= VN_INFO (orig_fn
)->valnum
;
4342 else if (TREE_CODE (orig_fn
) == OBJ_TYPE_REF
4343 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn
)) == SSA_NAME
)
4345 fn
= VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn
))->valnum
;
4346 if (!gimple_call_addr_fndecl (fn
))
4348 fn
= ipa_intraprocedural_devirtualization (stmt
);
4350 fn
= build_fold_addr_expr (fn
);
4355 if (gimple_call_addr_fndecl (fn
) != NULL_TREE
4356 && useless_type_conversion_p (TREE_TYPE (orig_fn
),
4359 bool can_make_abnormal_goto
4360 = stmt_can_make_abnormal_goto (stmt
);
4361 bool was_noreturn
= gimple_call_noreturn_p (stmt
);
4363 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4365 fprintf (dump_file
, "Replacing call target with ");
4366 print_generic_expr (dump_file
, fn
, 0);
4367 fprintf (dump_file
, " in ");
4368 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4371 gimple_call_set_fn (stmt
, fn
);
4372 el_to_update
.safe_push (stmt
);
4374 /* When changing a call into a noreturn call, cfg cleanup
4375 is needed to fix up the noreturn call. */
4376 if (!was_noreturn
&& gimple_call_noreturn_p (stmt
))
4377 el_todo
|= TODO_cleanup_cfg
;
4379 /* If we removed EH side-effects from the statement, clean
4380 its EH information. */
4381 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
4383 bitmap_set_bit (need_eh_cleanup
,
4384 gimple_bb (stmt
)->index
);
4385 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4386 fprintf (dump_file
, " Removed EH side-effects.\n");
4389 /* Likewise for AB side-effects. */
4390 if (can_make_abnormal_goto
4391 && !stmt_can_make_abnormal_goto (stmt
))
4393 bitmap_set_bit (need_ab_cleanup
,
4394 gimple_bb (stmt
)->index
);
4395 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4396 fprintf (dump_file
, " Removed AB side-effects.\n");
4399 /* Changing an indirect call to a direct call may
4400 have exposed different semantics. This may
4401 require an SSA update. */
4402 el_todo
|= TODO_update_ssa_only_virtuals
;
4408 /* Make no longer available leaders no longer available. */
4411 eliminate_dom_walker::after_dom_children (basic_block
)
4414 while ((entry
= el_avail_stack
.pop ()) != NULL_TREE
)
4415 el_avail
[SSA_NAME_VERSION (VN_INFO (entry
)->valnum
)] = NULL_TREE
;
4418 /* Eliminate fully redundant computations. */
4423 gimple_stmt_iterator gsi
;
4427 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4428 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4430 el_to_remove
.create (0);
4431 el_to_update
.create (0);
4433 el_avail
.create (0);
4434 el_avail_stack
.create (0);
4436 eliminate_dom_walker (CDI_DOMINATORS
).walk (cfun
->cfg
->x_entry_block_ptr
);
4438 el_avail
.release ();
4439 el_avail_stack
.release ();
4441 /* We cannot remove stmts during BB walk, especially not release SSA
4442 names there as this confuses the VN machinery. The stmts ending
4443 up in el_to_remove are either stores or simple copies. */
4444 FOR_EACH_VEC_ELT (el_to_remove
, i
, stmt
)
4446 tree lhs
= gimple_assign_lhs (stmt
);
4447 tree rhs
= gimple_assign_rhs1 (stmt
);
4448 use_operand_p use_p
;
4451 /* If there is a single use only, propagate the equivalency
4452 instead of keeping the copy. */
4453 if (TREE_CODE (lhs
) == SSA_NAME
4454 && TREE_CODE (rhs
) == SSA_NAME
4455 && single_imm_use (lhs
, &use_p
, &use_stmt
)
4456 && may_propagate_copy (USE_FROM_PTR (use_p
), rhs
))
4458 SET_USE (use_p
, rhs
);
4459 update_stmt (use_stmt
);
4461 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (lhs
))
4462 && TREE_CODE (rhs
) == SSA_NAME
)
4463 gimple_set_plf (SSA_NAME_DEF_STMT (rhs
), NECESSARY
, true);
4466 /* If this is a store or a now unused copy, remove it. */
4467 if (TREE_CODE (lhs
) != SSA_NAME
4468 || has_zero_uses (lhs
))
4470 basic_block bb
= gimple_bb (stmt
);
4471 gsi
= gsi_for_stmt (stmt
);
4472 unlink_stmt_vdef (stmt
);
4473 if (gsi_remove (&gsi
, true))
4474 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
4476 && TREE_CODE (lhs
) == SSA_NAME
)
4477 bitmap_clear_bit (inserted_exprs
, SSA_NAME_VERSION (lhs
));
4478 release_defs (stmt
);
4481 el_to_remove
.release ();
4483 /* We cannot update call statements with virtual operands during
4484 SSA walk. This might remove them which in turn makes our
4485 VN lattice invalid. */
4486 FOR_EACH_VEC_ELT (el_to_update
, i
, stmt
)
4488 el_to_update
.release ();
4493 /* Perform CFG cleanups made necessary by elimination. */
4496 fini_eliminate (void)
4498 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
4499 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
4502 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
4505 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
4507 BITMAP_FREE (need_eh_cleanup
);
4508 BITMAP_FREE (need_ab_cleanup
);
4510 if (do_eh_cleanup
|| do_ab_cleanup
)
4511 return TODO_cleanup_cfg
;
4515 /* Borrow a bit of tree-ssa-dce.c for the moment.
4516 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4517 this may be a bit faster, and we may want critical edges kept split. */
4519 /* If OP's defining statement has not already been determined to be necessary,
4520 mark that statement necessary. Return the stmt, if it is newly
4523 static inline gimple
4524 mark_operand_necessary (tree op
)
4530 if (TREE_CODE (op
) != SSA_NAME
)
4533 stmt
= SSA_NAME_DEF_STMT (op
);
4536 if (gimple_plf (stmt
, NECESSARY
)
4537 || gimple_nop_p (stmt
))
4540 gimple_set_plf (stmt
, NECESSARY
, true);
4544 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4545 to insert PHI nodes sometimes, and because value numbering of casts isn't
4546 perfect, we sometimes end up inserting dead code. This simple DCE-like
4547 pass removes any insertions we made that weren't actually used. */
4550 remove_dead_inserted_code (void)
4557 worklist
= BITMAP_ALLOC (NULL
);
4558 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4560 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4561 if (gimple_plf (t
, NECESSARY
))
4562 bitmap_set_bit (worklist
, i
);
4564 while (!bitmap_empty_p (worklist
))
4566 i
= bitmap_first_set_bit (worklist
);
4567 bitmap_clear_bit (worklist
, i
);
4568 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4570 /* PHI nodes are somewhat special in that each PHI alternative has
4571 data and control dependencies. All the statements feeding the
4572 PHI node's arguments are always necessary. */
4573 if (gimple_code (t
) == GIMPLE_PHI
)
4577 for (k
= 0; k
< gimple_phi_num_args (t
); k
++)
4579 tree arg
= PHI_ARG_DEF (t
, k
);
4580 if (TREE_CODE (arg
) == SSA_NAME
)
4582 gimple n
= mark_operand_necessary (arg
);
4584 bitmap_set_bit (worklist
, SSA_NAME_VERSION (arg
));
4590 /* Propagate through the operands. Examine all the USE, VUSE and
4591 VDEF operands in this statement. Mark all the statements
4592 which feed this statement's uses as necessary. */
4596 /* The operands of VDEF expressions are also needed as they
4597 represent potential definitions that may reach this
4598 statement (VDEF operands allow us to follow def-def
4601 FOR_EACH_SSA_TREE_OPERAND (use
, t
, iter
, SSA_OP_ALL_USES
)
4603 gimple n
= mark_operand_necessary (use
);
4605 bitmap_set_bit (worklist
, SSA_NAME_VERSION (use
));
4610 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs
, 0, i
, bi
)
4612 t
= SSA_NAME_DEF_STMT (ssa_name (i
));
4613 if (!gimple_plf (t
, NECESSARY
))
4615 gimple_stmt_iterator gsi
;
4617 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4619 fprintf (dump_file
, "Removing unnecessary insertion:");
4620 print_gimple_stmt (dump_file
, t
, 0, 0);
4623 gsi
= gsi_for_stmt (t
);
4624 if (gimple_code (t
) == GIMPLE_PHI
)
4625 remove_phi_node (&gsi
, true);
4628 gsi_remove (&gsi
, true);
4633 BITMAP_FREE (worklist
);
4637 /* Initialize data structures used by PRE. */
4644 next_expression_id
= 1;
4645 expressions
.create (0);
4646 expressions
.safe_push (NULL
);
4647 value_expressions
.create (get_max_value_id () + 1);
4648 value_expressions
.safe_grow_cleared (get_max_value_id () + 1);
4649 name_to_id
.create (0);
4651 inserted_exprs
= BITMAP_ALLOC (NULL
);
4653 connect_infinite_loops_to_exit ();
4654 memset (&pre_stats
, 0, sizeof (pre_stats
));
4656 postorder
= XNEWVEC (int, n_basic_blocks
);
4657 postorder_num
= inverted_post_order_compute (postorder
);
4659 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets
));
4661 calculate_dominance_info (CDI_POST_DOMINATORS
);
4662 calculate_dominance_info (CDI_DOMINATORS
);
4664 bitmap_obstack_initialize (&grand_bitmap_obstack
);
4665 phi_translate_table
.create (5110);
4666 expression_to_id
.create (num_ssa_names
* 3);
4667 bitmap_set_pool
= create_alloc_pool ("Bitmap sets",
4668 sizeof (struct bitmap_set
), 30);
4669 pre_expr_pool
= create_alloc_pool ("pre_expr nodes",
4670 sizeof (struct pre_expr_d
), 30);
4673 EXP_GEN (bb
) = bitmap_set_new ();
4674 PHI_GEN (bb
) = bitmap_set_new ();
4675 TMP_GEN (bb
) = bitmap_set_new ();
4676 AVAIL_OUT (bb
) = bitmap_set_new ();
4681 /* Deallocate data structures used by PRE. */
4687 value_expressions
.release ();
4688 BITMAP_FREE (inserted_exprs
);
4689 bitmap_obstack_release (&grand_bitmap_obstack
);
4690 free_alloc_pool (bitmap_set_pool
);
4691 free_alloc_pool (pre_expr_pool
);
4692 phi_translate_table
.dispose ();
4693 expression_to_id
.dispose ();
4694 name_to_id
.release ();
4696 free_aux_for_blocks ();
4698 free_dominance_info (CDI_POST_DOMINATORS
);
4701 /* Gate and execute functions for PRE. */
4706 unsigned int todo
= 0;
4708 do_partial_partial
=
4709 flag_tree_partial_pre
&& optimize_function_for_speed_p (cfun
);
4711 /* This has to happen before SCCVN runs because
4712 loop_optimizer_init may create new phis, etc. */
4713 loop_optimizer_init (LOOPS_NORMAL
);
4715 if (!run_scc_vn (VN_WALK
))
4717 loop_optimizer_finalize ();
4724 /* Collect and value number expressions computed in each basic block. */
4727 /* Insert can get quite slow on an incredibly large number of basic
4728 blocks due to some quadratic behavior. Until this behavior is
4729 fixed, don't run it when he have an incredibly large number of
4730 bb's. If we aren't going to run insert, there is no point in
4731 computing ANTIC, either, even though it's plenty fast. */
4732 if (n_basic_blocks
< 4000)
4738 /* Make sure to remove fake edges before committing our inserts.
4739 This makes sure we don't end up with extra critical edges that
4740 we would need to split. */
4741 remove_fake_exit_edges ();
4742 gsi_commit_edge_inserts ();
4744 /* Remove all the redundant expressions. */
4745 todo
|= eliminate ();
4747 statistics_counter_event (cfun
, "Insertions", pre_stats
.insertions
);
4748 statistics_counter_event (cfun
, "PA inserted", pre_stats
.pa_insert
);
4749 statistics_counter_event (cfun
, "New PHIs", pre_stats
.phis
);
4750 statistics_counter_event (cfun
, "Eliminated", pre_stats
.eliminations
);
4752 clear_expression_ids ();
4753 remove_dead_inserted_code ();
4754 todo
|= TODO_verify_flow
;
4758 todo
|= fini_eliminate ();
4759 loop_optimizer_finalize ();
4761 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4762 case we can merge the block with the remaining predecessor of the block.
4764 - call merge_blocks after each tail merge iteration
4765 - call merge_blocks after all tail merge iterations
4766 - mark TODO_cleanup_cfg when necessary
4767 - share the cfg cleanup with fini_pre. */
4768 todo
|= tail_merge_optimize (todo
);
4772 /* Tail merging invalidates the virtual SSA web, together with
4773 cfg-cleanup opportunities exposed by PRE this will wreck the
4774 SSA updating machinery. So make sure to run update-ssa
4775 manually, before eventually scheduling cfg-cleanup as part of
4777 update_ssa (TODO_update_ssa_only_virtuals
);
4785 return flag_tree_pre
!= 0;
4790 const pass_data pass_data_pre
=
4792 GIMPLE_PASS
, /* type */
4794 OPTGROUP_NONE
, /* optinfo_flags */
4795 true, /* has_gate */
4796 true, /* has_execute */
4797 TV_TREE_PRE
, /* tv_id */
4798 ( PROP_no_crit_edges
| PROP_cfg
| PROP_ssa
), /* properties_required */
4799 0, /* properties_provided */
4800 0, /* properties_destroyed */
4801 TODO_rebuild_alias
, /* todo_flags_start */
4802 TODO_verify_ssa
, /* todo_flags_finish */
4805 class pass_pre
: public gimple_opt_pass
4808 pass_pre (gcc::context
*ctxt
)
4809 : gimple_opt_pass (pass_data_pre
, ctxt
)
4812 /* opt_pass methods: */
4813 bool gate () { return gate_pre (); }
4814 unsigned int execute () { return do_pre (); }
4816 }; // class pass_pre
4821 make_pass_pre (gcc::context
*ctxt
)
4823 return new pass_pre (ctxt
);
4827 /* Gate and execute functions for FRE. */
4832 unsigned int todo
= 0;
4834 if (!run_scc_vn (VN_WALKREWRITE
))
4837 memset (&pre_stats
, 0, sizeof (pre_stats
));
4839 /* Remove all the redundant expressions. */
4840 todo
|= eliminate ();
4842 todo
|= fini_eliminate ();
4846 statistics_counter_event (cfun
, "Insertions", pre_stats
.insertions
);
4847 statistics_counter_event (cfun
, "Eliminated", pre_stats
.eliminations
);
4855 return flag_tree_fre
!= 0;
4860 const pass_data pass_data_fre
=
4862 GIMPLE_PASS
, /* type */
4864 OPTGROUP_NONE
, /* optinfo_flags */
4865 true, /* has_gate */
4866 true, /* has_execute */
4867 TV_TREE_FRE
, /* tv_id */
4868 ( PROP_cfg
| PROP_ssa
), /* properties_required */
4869 0, /* properties_provided */
4870 0, /* properties_destroyed */
4871 0, /* todo_flags_start */
4872 TODO_verify_ssa
, /* todo_flags_finish */
4875 class pass_fre
: public gimple_opt_pass
4878 pass_fre (gcc::context
*ctxt
)
4879 : gimple_opt_pass (pass_data_fre
, ctxt
)
4882 /* opt_pass methods: */
4883 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
4884 bool gate () { return gate_fre (); }
4885 unsigned int execute () { return execute_fre (); }
4887 }; // class pass_fre
4892 make_pass_fre (gcc::context
*ctxt
)
4894 return new pass_fre (ctxt
);