]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-pre.c
re PR middle-end/65962 (Missed vectorization of strided stores)
[thirdparty/gcc.git] / gcc / tree-ssa-pre.c
1 /* SSA-PRE for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "predict.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "rtl.h"
30 #include "ssa.h"
31 #include "alias.h"
32 #include "fold-const.h"
33 #include "cfganal.h"
34 #include "gimple-pretty-print.h"
35 #include "tree-inline.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "tree-cfg.h"
43 #include "tree-ssa-loop.h"
44 #include "tree-into-ssa.h"
45 #include "flags.h"
46 #include "insn-config.h"
47 #include "expmed.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "emit-rtl.h"
52 #include "varasm.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "tree-dfa.h"
56 #include "tree-ssa.h"
57 #include "tree-iterator.h"
58 #include "alloc-pool.h"
59 #include "tree-pass.h"
60 #include "langhooks.h"
61 #include "cfgloop.h"
62 #include "tree-ssa-sccvn.h"
63 #include "tree-scalar-evolution.h"
64 #include "params.h"
65 #include "dbgcnt.h"
66 #include "domwalk.h"
67 #include "cgraph.h"
68 #include "symbol-summary.h"
69 #include "ipa-prop.h"
70 #include "tree-ssa-propagate.h"
71 #include "ipa-utils.h"
72 #include "tree-cfgcleanup.h"
73
74 /* TODO:
75
76 1. Avail sets can be shared by making an avail_find_leader that
77 walks up the dominator tree and looks in those avail sets.
78 This might affect code optimality, it's unclear right now.
79 2. Strength reduction can be performed by anticipating expressions
80 we can repair later on.
81 3. We can do back-substitution or smarter value numbering to catch
82 commutative expressions split up over multiple statements.
83 */
84
85 /* For ease of terminology, "expression node" in the below refers to
86 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
87 represent the actual statement containing the expressions we care about,
88 and we cache the value number by putting it in the expression. */
89
90 /* Basic algorithm
91
92 First we walk the statements to generate the AVAIL sets, the
93 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
94 generation of values/expressions by a given block. We use them
95 when computing the ANTIC sets. The AVAIL sets consist of
96 SSA_NAME's that represent values, so we know what values are
97 available in what blocks. AVAIL is a forward dataflow problem. In
98 SSA, values are never killed, so we don't need a kill set, or a
99 fixpoint iteration, in order to calculate the AVAIL sets. In
100 traditional parlance, AVAIL sets tell us the downsafety of the
101 expressions/values.
102
103 Next, we generate the ANTIC sets. These sets represent the
104 anticipatable expressions. ANTIC is a backwards dataflow
105 problem. An expression is anticipatable in a given block if it could
106 be generated in that block. This means that if we had to perform
107 an insertion in that block, of the value of that expression, we
108 could. Calculating the ANTIC sets requires phi translation of
109 expressions, because the flow goes backwards through phis. We must
110 iterate to a fixpoint of the ANTIC sets, because we have a kill
111 set. Even in SSA form, values are not live over the entire
112 function, only from their definition point onwards. So we have to
113 remove values from the ANTIC set once we go past the definition
114 point of the leaders that make them up.
115 compute_antic/compute_antic_aux performs this computation.
116
117 Third, we perform insertions to make partially redundant
118 expressions fully redundant.
119
120 An expression is partially redundant (excluding partial
121 anticipation) if:
122
123 1. It is AVAIL in some, but not all, of the predecessors of a
124 given block.
125 2. It is ANTIC in all the predecessors.
126
127 In order to make it fully redundant, we insert the expression into
128 the predecessors where it is not available, but is ANTIC.
129
130 For the partial anticipation case, we only perform insertion if it
131 is partially anticipated in some block, and fully available in all
132 of the predecessors.
133
134 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
135 performs these steps.
136
137 Fourth, we eliminate fully redundant expressions.
138 This is a simple statement walk that replaces redundant
139 calculations with the now available values. */
140
141 /* Representations of value numbers:
142
143 Value numbers are represented by a representative SSA_NAME. We
144 will create fake SSA_NAME's in situations where we need a
145 representative but do not have one (because it is a complex
146 expression). In order to facilitate storing the value numbers in
147 bitmaps, and keep the number of wasted SSA_NAME's down, we also
148 associate a value_id with each value number, and create full blown
149 ssa_name's only where we actually need them (IE in operands of
150 existing expressions).
151
152 Theoretically you could replace all the value_id's with
153 SSA_NAME_VERSION, but this would allocate a large number of
154 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
155 It would also require an additional indirection at each point we
156 use the value id. */
157
158 /* Representation of expressions on value numbers:
159
160 Expressions consisting of value numbers are represented the same
161 way as our VN internally represents them, with an additional
162 "pre_expr" wrapping around them in order to facilitate storing all
163 of the expressions in the same sets. */
164
165 /* Representation of sets:
166
167 The dataflow sets do not need to be sorted in any particular order
168 for the majority of their lifetime, are simply represented as two
169 bitmaps, one that keeps track of values present in the set, and one
170 that keeps track of expressions present in the set.
171
172 When we need them in topological order, we produce it on demand by
173 transforming the bitmap into an array and sorting it into topo
174 order. */
175
176 /* Type of expression, used to know which member of the PRE_EXPR union
177 is valid. */
178
179 enum pre_expr_kind
180 {
181 NAME,
182 NARY,
183 REFERENCE,
184 CONSTANT
185 };
186
187 union pre_expr_union
188 {
189 tree name;
190 tree constant;
191 vn_nary_op_t nary;
192 vn_reference_t reference;
193 };
194
195 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
196 {
197 enum pre_expr_kind kind;
198 unsigned int id;
199 pre_expr_union u;
200
201 /* hash_table support. */
202 static inline hashval_t hash (const pre_expr_d *);
203 static inline int equal (const pre_expr_d *, const pre_expr_d *);
204 } *pre_expr;
205
206 #define PRE_EXPR_NAME(e) (e)->u.name
207 #define PRE_EXPR_NARY(e) (e)->u.nary
208 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
209 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
210
211 /* Compare E1 and E1 for equality. */
212
213 inline int
214 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
215 {
216 if (e1->kind != e2->kind)
217 return false;
218
219 switch (e1->kind)
220 {
221 case CONSTANT:
222 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
223 PRE_EXPR_CONSTANT (e2));
224 case NAME:
225 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
226 case NARY:
227 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
228 case REFERENCE:
229 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
230 PRE_EXPR_REFERENCE (e2));
231 default:
232 gcc_unreachable ();
233 }
234 }
235
236 /* Hash E. */
237
238 inline hashval_t
239 pre_expr_d::hash (const pre_expr_d *e)
240 {
241 switch (e->kind)
242 {
243 case CONSTANT:
244 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
245 case NAME:
246 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
247 case NARY:
248 return PRE_EXPR_NARY (e)->hashcode;
249 case REFERENCE:
250 return PRE_EXPR_REFERENCE (e)->hashcode;
251 default:
252 gcc_unreachable ();
253 }
254 }
255
256 /* Next global expression id number. */
257 static unsigned int next_expression_id;
258
259 /* Mapping from expression to id number we can use in bitmap sets. */
260 static vec<pre_expr> expressions;
261 static hash_table<pre_expr_d> *expression_to_id;
262 static vec<unsigned> name_to_id;
263
264 /* Allocate an expression id for EXPR. */
265
266 static inline unsigned int
267 alloc_expression_id (pre_expr expr)
268 {
269 struct pre_expr_d **slot;
270 /* Make sure we won't overflow. */
271 gcc_assert (next_expression_id + 1 > next_expression_id);
272 expr->id = next_expression_id++;
273 expressions.safe_push (expr);
274 if (expr->kind == NAME)
275 {
276 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
277 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
278 re-allocations by using vec::reserve upfront. */
279 unsigned old_len = name_to_id.length ();
280 name_to_id.reserve (num_ssa_names - old_len);
281 name_to_id.quick_grow_cleared (num_ssa_names);
282 gcc_assert (name_to_id[version] == 0);
283 name_to_id[version] = expr->id;
284 }
285 else
286 {
287 slot = expression_to_id->find_slot (expr, INSERT);
288 gcc_assert (!*slot);
289 *slot = expr;
290 }
291 return next_expression_id - 1;
292 }
293
294 /* Return the expression id for tree EXPR. */
295
296 static inline unsigned int
297 get_expression_id (const pre_expr expr)
298 {
299 return expr->id;
300 }
301
302 static inline unsigned int
303 lookup_expression_id (const pre_expr expr)
304 {
305 struct pre_expr_d **slot;
306
307 if (expr->kind == NAME)
308 {
309 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
310 if (name_to_id.length () <= version)
311 return 0;
312 return name_to_id[version];
313 }
314 else
315 {
316 slot = expression_to_id->find_slot (expr, NO_INSERT);
317 if (!slot)
318 return 0;
319 return ((pre_expr)*slot)->id;
320 }
321 }
322
323 /* Return the existing expression id for EXPR, or create one if one
324 does not exist yet. */
325
326 static inline unsigned int
327 get_or_alloc_expression_id (pre_expr expr)
328 {
329 unsigned int id = lookup_expression_id (expr);
330 if (id == 0)
331 return alloc_expression_id (expr);
332 return expr->id = id;
333 }
334
335 /* Return the expression that has expression id ID */
336
337 static inline pre_expr
338 expression_for_id (unsigned int id)
339 {
340 return expressions[id];
341 }
342
343 /* Free the expression id field in all of our expressions,
344 and then destroy the expressions array. */
345
346 static void
347 clear_expression_ids (void)
348 {
349 expressions.release ();
350 }
351
352 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
353
354 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
355
356 static pre_expr
357 get_or_alloc_expr_for_name (tree name)
358 {
359 struct pre_expr_d expr;
360 pre_expr result;
361 unsigned int result_id;
362
363 expr.kind = NAME;
364 expr.id = 0;
365 PRE_EXPR_NAME (&expr) = name;
366 result_id = lookup_expression_id (&expr);
367 if (result_id != 0)
368 return expression_for_id (result_id);
369
370 result = pre_expr_pool.allocate ();
371 result->kind = NAME;
372 PRE_EXPR_NAME (result) = name;
373 alloc_expression_id (result);
374 return result;
375 }
376
377 /* An unordered bitmap set. One bitmap tracks values, the other,
378 expressions. */
379 typedef struct bitmap_set
380 {
381 bitmap_head expressions;
382 bitmap_head values;
383 } *bitmap_set_t;
384
385 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
386 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
387
388 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
389 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
390
391 /* Mapping from value id to expressions with that value_id. */
392 static vec<bitmap> value_expressions;
393
394 /* Sets that we need to keep track of. */
395 typedef struct bb_bitmap_sets
396 {
397 /* The EXP_GEN set, which represents expressions/values generated in
398 a basic block. */
399 bitmap_set_t exp_gen;
400
401 /* The PHI_GEN set, which represents PHI results generated in a
402 basic block. */
403 bitmap_set_t phi_gen;
404
405 /* The TMP_GEN set, which represents results/temporaries generated
406 in a basic block. IE the LHS of an expression. */
407 bitmap_set_t tmp_gen;
408
409 /* The AVAIL_OUT set, which represents which values are available in
410 a given basic block. */
411 bitmap_set_t avail_out;
412
413 /* The ANTIC_IN set, which represents which values are anticipatable
414 in a given basic block. */
415 bitmap_set_t antic_in;
416
417 /* The PA_IN set, which represents which values are
418 partially anticipatable in a given basic block. */
419 bitmap_set_t pa_in;
420
421 /* The NEW_SETS set, which is used during insertion to augment the
422 AVAIL_OUT set of blocks with the new insertions performed during
423 the current iteration. */
424 bitmap_set_t new_sets;
425
426 /* A cache for value_dies_in_block_x. */
427 bitmap expr_dies;
428
429 /* The live virtual operand on successor edges. */
430 tree vop_on_exit;
431
432 /* True if we have visited this block during ANTIC calculation. */
433 unsigned int visited : 1;
434
435 /* True when the block contains a call that might not return. */
436 unsigned int contains_may_not_return_call : 1;
437 } *bb_value_sets_t;
438
439 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
440 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
441 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
442 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
443 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
444 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
445 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
446 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
447 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
448 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
449 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
450
451
452 /* Basic block list in postorder. */
453 static int *postorder;
454 static int postorder_num;
455
456 /* This structure is used to keep track of statistics on what
457 optimization PRE was able to perform. */
458 static struct
459 {
460 /* The number of RHS computations eliminated by PRE. */
461 int eliminations;
462
463 /* The number of new expressions/temporaries generated by PRE. */
464 int insertions;
465
466 /* The number of inserts found due to partial anticipation */
467 int pa_insert;
468
469 /* The number of new PHI nodes added by PRE. */
470 int phis;
471 } pre_stats;
472
473 static bool do_partial_partial;
474 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
475 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
476 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
477 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
478 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
479 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
480 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
481 unsigned int, bool);
482 static bitmap_set_t bitmap_set_new (void);
483 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
484 tree);
485 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
486 static unsigned int get_expr_value_id (pre_expr);
487
488 /* We can add and remove elements and entries to and from sets
489 and hash tables, so we use alloc pools for them. */
490
491 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
492 static bitmap_obstack grand_bitmap_obstack;
493
494 /* Set of blocks with statements that have had their EH properties changed. */
495 static bitmap need_eh_cleanup;
496
497 /* Set of blocks with statements that have had their AB properties changed. */
498 static bitmap need_ab_cleanup;
499
500 /* A three tuple {e, pred, v} used to cache phi translations in the
501 phi_translate_table. */
502
503 typedef struct expr_pred_trans_d : free_ptr_hash<expr_pred_trans_d>
504 {
505 /* The expression. */
506 pre_expr e;
507
508 /* The predecessor block along which we translated the expression. */
509 basic_block pred;
510
511 /* The value that resulted from the translation. */
512 pre_expr v;
513
514 /* The hashcode for the expression, pred pair. This is cached for
515 speed reasons. */
516 hashval_t hashcode;
517
518 /* hash_table support. */
519 static inline hashval_t hash (const expr_pred_trans_d *);
520 static inline int equal (const expr_pred_trans_d *, const expr_pred_trans_d *);
521 } *expr_pred_trans_t;
522 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
523
524 inline hashval_t
525 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
526 {
527 return e->hashcode;
528 }
529
530 inline int
531 expr_pred_trans_d::equal (const expr_pred_trans_d *ve1,
532 const expr_pred_trans_d *ve2)
533 {
534 basic_block b1 = ve1->pred;
535 basic_block b2 = ve2->pred;
536
537 /* If they are not translations for the same basic block, they can't
538 be equal. */
539 if (b1 != b2)
540 return false;
541 return pre_expr_d::equal (ve1->e, ve2->e);
542 }
543
544 /* The phi_translate_table caches phi translations for a given
545 expression and predecessor. */
546 static hash_table<expr_pred_trans_d> *phi_translate_table;
547
548 /* Add the tuple mapping from {expression E, basic block PRED} to
549 the phi translation table and return whether it pre-existed. */
550
551 static inline bool
552 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
553 {
554 expr_pred_trans_t *slot;
555 expr_pred_trans_d tem;
556 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
557 pred->index);
558 tem.e = e;
559 tem.pred = pred;
560 tem.hashcode = hash;
561 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
562 if (*slot)
563 {
564 *entry = *slot;
565 return true;
566 }
567
568 *entry = *slot = XNEW (struct expr_pred_trans_d);
569 (*entry)->e = e;
570 (*entry)->pred = pred;
571 (*entry)->hashcode = hash;
572 return false;
573 }
574
575
576 /* Add expression E to the expression set of value id V. */
577
578 static void
579 add_to_value (unsigned int v, pre_expr e)
580 {
581 bitmap set;
582
583 gcc_checking_assert (get_expr_value_id (e) == v);
584
585 if (v >= value_expressions.length ())
586 {
587 value_expressions.safe_grow_cleared (v + 1);
588 }
589
590 set = value_expressions[v];
591 if (!set)
592 {
593 set = BITMAP_ALLOC (&grand_bitmap_obstack);
594 value_expressions[v] = set;
595 }
596
597 bitmap_set_bit (set, get_or_alloc_expression_id (e));
598 }
599
600 /* Create a new bitmap set and return it. */
601
602 static bitmap_set_t
603 bitmap_set_new (void)
604 {
605 bitmap_set_t ret = bitmap_set_pool.allocate ();
606 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
607 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
608 return ret;
609 }
610
611 /* Return the value id for a PRE expression EXPR. */
612
613 static unsigned int
614 get_expr_value_id (pre_expr expr)
615 {
616 unsigned int id;
617 switch (expr->kind)
618 {
619 case CONSTANT:
620 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
621 break;
622 case NAME:
623 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
624 break;
625 case NARY:
626 id = PRE_EXPR_NARY (expr)->value_id;
627 break;
628 case REFERENCE:
629 id = PRE_EXPR_REFERENCE (expr)->value_id;
630 break;
631 default:
632 gcc_unreachable ();
633 }
634 /* ??? We cannot assert that expr has a value-id (it can be 0), because
635 we assign value-ids only to expressions that have a result
636 in set_hashtable_value_ids. */
637 return id;
638 }
639
640 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
641
642 static tree
643 sccvn_valnum_from_value_id (unsigned int val)
644 {
645 bitmap_iterator bi;
646 unsigned int i;
647 bitmap exprset = value_expressions[val];
648 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
649 {
650 pre_expr vexpr = expression_for_id (i);
651 if (vexpr->kind == NAME)
652 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
653 else if (vexpr->kind == CONSTANT)
654 return PRE_EXPR_CONSTANT (vexpr);
655 }
656 return NULL_TREE;
657 }
658
659 /* Remove an expression EXPR from a bitmapped set. */
660
661 static void
662 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
663 {
664 unsigned int val = get_expr_value_id (expr);
665 if (!value_id_constant_p (val))
666 {
667 bitmap_clear_bit (&set->values, val);
668 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
669 }
670 }
671
672 static void
673 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
674 unsigned int val, bool allow_constants)
675 {
676 if (allow_constants || !value_id_constant_p (val))
677 {
678 /* We specifically expect this and only this function to be able to
679 insert constants into a set. */
680 bitmap_set_bit (&set->values, val);
681 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
682 }
683 }
684
685 /* Insert an expression EXPR into a bitmapped set. */
686
687 static void
688 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
689 {
690 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
691 }
692
693 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
694
695 static void
696 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
697 {
698 bitmap_copy (&dest->expressions, &orig->expressions);
699 bitmap_copy (&dest->values, &orig->values);
700 }
701
702
703 /* Free memory used up by SET. */
704 static void
705 bitmap_set_free (bitmap_set_t set)
706 {
707 bitmap_clear (&set->expressions);
708 bitmap_clear (&set->values);
709 }
710
711
712 /* Generate an topological-ordered array of bitmap set SET. */
713
714 static vec<pre_expr>
715 sorted_array_from_bitmap_set (bitmap_set_t set)
716 {
717 unsigned int i, j;
718 bitmap_iterator bi, bj;
719 vec<pre_expr> result;
720
721 /* Pre-allocate enough space for the array. */
722 result.create (bitmap_count_bits (&set->expressions));
723
724 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
725 {
726 /* The number of expressions having a given value is usually
727 relatively small. Thus, rather than making a vector of all
728 the expressions and sorting it by value-id, we walk the values
729 and check in the reverse mapping that tells us what expressions
730 have a given value, to filter those in our set. As a result,
731 the expressions are inserted in value-id order, which means
732 topological order.
733
734 If this is somehow a significant lose for some cases, we can
735 choose which set to walk based on the set size. */
736 bitmap exprset = value_expressions[i];
737 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
738 {
739 if (bitmap_bit_p (&set->expressions, j))
740 result.quick_push (expression_for_id (j));
741 }
742 }
743
744 return result;
745 }
746
747 /* Perform bitmapped set operation DEST &= ORIG. */
748
749 static void
750 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
751 {
752 bitmap_iterator bi;
753 unsigned int i;
754
755 if (dest != orig)
756 {
757 bitmap_head temp;
758 bitmap_initialize (&temp, &grand_bitmap_obstack);
759
760 bitmap_and_into (&dest->values, &orig->values);
761 bitmap_copy (&temp, &dest->expressions);
762 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
763 {
764 pre_expr expr = expression_for_id (i);
765 unsigned int value_id = get_expr_value_id (expr);
766 if (!bitmap_bit_p (&dest->values, value_id))
767 bitmap_clear_bit (&dest->expressions, i);
768 }
769 bitmap_clear (&temp);
770 }
771 }
772
773 /* Subtract all values and expressions contained in ORIG from DEST. */
774
775 static bitmap_set_t
776 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
777 {
778 bitmap_set_t result = bitmap_set_new ();
779 bitmap_iterator bi;
780 unsigned int i;
781
782 bitmap_and_compl (&result->expressions, &dest->expressions,
783 &orig->expressions);
784
785 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
786 {
787 pre_expr expr = expression_for_id (i);
788 unsigned int value_id = get_expr_value_id (expr);
789 bitmap_set_bit (&result->values, value_id);
790 }
791
792 return result;
793 }
794
795 /* Subtract all the values in bitmap set B from bitmap set A. */
796
797 static void
798 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
799 {
800 unsigned int i;
801 bitmap_iterator bi;
802 bitmap_head temp;
803
804 bitmap_initialize (&temp, &grand_bitmap_obstack);
805
806 bitmap_copy (&temp, &a->expressions);
807 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
808 {
809 pre_expr expr = expression_for_id (i);
810 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
811 bitmap_remove_from_set (a, expr);
812 }
813 bitmap_clear (&temp);
814 }
815
816
817 /* Return true if bitmapped set SET contains the value VALUE_ID. */
818
819 static bool
820 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
821 {
822 if (value_id_constant_p (value_id))
823 return true;
824
825 if (!set || bitmap_empty_p (&set->expressions))
826 return false;
827
828 return bitmap_bit_p (&set->values, value_id);
829 }
830
831 static inline bool
832 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
833 {
834 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
835 }
836
837 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
838
839 static void
840 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
841 const pre_expr expr)
842 {
843 bitmap exprset;
844 unsigned int i;
845 bitmap_iterator bi;
846
847 if (value_id_constant_p (lookfor))
848 return;
849
850 if (!bitmap_set_contains_value (set, lookfor))
851 return;
852
853 /* The number of expressions having a given value is usually
854 significantly less than the total number of expressions in SET.
855 Thus, rather than check, for each expression in SET, whether it
856 has the value LOOKFOR, we walk the reverse mapping that tells us
857 what expressions have a given value, and see if any of those
858 expressions are in our set. For large testcases, this is about
859 5-10x faster than walking the bitmap. If this is somehow a
860 significant lose for some cases, we can choose which set to walk
861 based on the set size. */
862 exprset = value_expressions[lookfor];
863 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
864 {
865 if (bitmap_clear_bit (&set->expressions, i))
866 {
867 bitmap_set_bit (&set->expressions, get_expression_id (expr));
868 return;
869 }
870 }
871
872 gcc_unreachable ();
873 }
874
875 /* Return true if two bitmap sets are equal. */
876
877 static bool
878 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
879 {
880 return bitmap_equal_p (&a->values, &b->values);
881 }
882
883 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
884 and add it otherwise. */
885
886 static void
887 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
888 {
889 unsigned int val = get_expr_value_id (expr);
890
891 if (bitmap_set_contains_value (set, val))
892 bitmap_set_replace_value (set, val, expr);
893 else
894 bitmap_insert_into_set (set, expr);
895 }
896
897 /* Insert EXPR into SET if EXPR's value is not already present in
898 SET. */
899
900 static void
901 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
902 {
903 unsigned int val = get_expr_value_id (expr);
904
905 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
906
907 /* Constant values are always considered to be part of the set. */
908 if (value_id_constant_p (val))
909 return;
910
911 /* If the value membership changed, add the expression. */
912 if (bitmap_set_bit (&set->values, val))
913 bitmap_set_bit (&set->expressions, expr->id);
914 }
915
916 /* Print out EXPR to outfile. */
917
918 static void
919 print_pre_expr (FILE *outfile, const pre_expr expr)
920 {
921 switch (expr->kind)
922 {
923 case CONSTANT:
924 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
925 break;
926 case NAME:
927 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
928 break;
929 case NARY:
930 {
931 unsigned int i;
932 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
933 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
934 for (i = 0; i < nary->length; i++)
935 {
936 print_generic_expr (outfile, nary->op[i], 0);
937 if (i != (unsigned) nary->length - 1)
938 fprintf (outfile, ",");
939 }
940 fprintf (outfile, "}");
941 }
942 break;
943
944 case REFERENCE:
945 {
946 vn_reference_op_t vro;
947 unsigned int i;
948 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
949 fprintf (outfile, "{");
950 for (i = 0;
951 ref->operands.iterate (i, &vro);
952 i++)
953 {
954 bool closebrace = false;
955 if (vro->opcode != SSA_NAME
956 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
957 {
958 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
959 if (vro->op0)
960 {
961 fprintf (outfile, "<");
962 closebrace = true;
963 }
964 }
965 if (vro->op0)
966 {
967 print_generic_expr (outfile, vro->op0, 0);
968 if (vro->op1)
969 {
970 fprintf (outfile, ",");
971 print_generic_expr (outfile, vro->op1, 0);
972 }
973 if (vro->op2)
974 {
975 fprintf (outfile, ",");
976 print_generic_expr (outfile, vro->op2, 0);
977 }
978 }
979 if (closebrace)
980 fprintf (outfile, ">");
981 if (i != ref->operands.length () - 1)
982 fprintf (outfile, ",");
983 }
984 fprintf (outfile, "}");
985 if (ref->vuse)
986 {
987 fprintf (outfile, "@");
988 print_generic_expr (outfile, ref->vuse, 0);
989 }
990 }
991 break;
992 }
993 }
994 void debug_pre_expr (pre_expr);
995
996 /* Like print_pre_expr but always prints to stderr. */
997 DEBUG_FUNCTION void
998 debug_pre_expr (pre_expr e)
999 {
1000 print_pre_expr (stderr, e);
1001 fprintf (stderr, "\n");
1002 }
1003
1004 /* Print out SET to OUTFILE. */
1005
1006 static void
1007 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1008 const char *setname, int blockindex)
1009 {
1010 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1011 if (set)
1012 {
1013 bool first = true;
1014 unsigned i;
1015 bitmap_iterator bi;
1016
1017 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1018 {
1019 const pre_expr expr = expression_for_id (i);
1020
1021 if (!first)
1022 fprintf (outfile, ", ");
1023 first = false;
1024 print_pre_expr (outfile, expr);
1025
1026 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1027 }
1028 }
1029 fprintf (outfile, " }\n");
1030 }
1031
1032 void debug_bitmap_set (bitmap_set_t);
1033
1034 DEBUG_FUNCTION void
1035 debug_bitmap_set (bitmap_set_t set)
1036 {
1037 print_bitmap_set (stderr, set, "debug", 0);
1038 }
1039
1040 void debug_bitmap_sets_for (basic_block);
1041
1042 DEBUG_FUNCTION void
1043 debug_bitmap_sets_for (basic_block bb)
1044 {
1045 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1046 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1047 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1048 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1049 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1050 if (do_partial_partial)
1051 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1052 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1053 }
1054
1055 /* Print out the expressions that have VAL to OUTFILE. */
1056
1057 static void
1058 print_value_expressions (FILE *outfile, unsigned int val)
1059 {
1060 bitmap set = value_expressions[val];
1061 if (set)
1062 {
1063 bitmap_set x;
1064 char s[10];
1065 sprintf (s, "%04d", val);
1066 x.expressions = *set;
1067 print_bitmap_set (outfile, &x, s, 0);
1068 }
1069 }
1070
1071
1072 DEBUG_FUNCTION void
1073 debug_value_expressions (unsigned int val)
1074 {
1075 print_value_expressions (stderr, val);
1076 }
1077
1078 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1079 represent it. */
1080
1081 static pre_expr
1082 get_or_alloc_expr_for_constant (tree constant)
1083 {
1084 unsigned int result_id;
1085 unsigned int value_id;
1086 struct pre_expr_d expr;
1087 pre_expr newexpr;
1088
1089 expr.kind = CONSTANT;
1090 PRE_EXPR_CONSTANT (&expr) = constant;
1091 result_id = lookup_expression_id (&expr);
1092 if (result_id != 0)
1093 return expression_for_id (result_id);
1094
1095 newexpr = pre_expr_pool.allocate ();
1096 newexpr->kind = CONSTANT;
1097 PRE_EXPR_CONSTANT (newexpr) = constant;
1098 alloc_expression_id (newexpr);
1099 value_id = get_or_alloc_constant_value_id (constant);
1100 add_to_value (value_id, newexpr);
1101 return newexpr;
1102 }
1103
1104 /* Given a value id V, find the actual tree representing the constant
1105 value if there is one, and return it. Return NULL if we can't find
1106 a constant. */
1107
1108 static tree
1109 get_constant_for_value_id (unsigned int v)
1110 {
1111 if (value_id_constant_p (v))
1112 {
1113 unsigned int i;
1114 bitmap_iterator bi;
1115 bitmap exprset = value_expressions[v];
1116
1117 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1118 {
1119 pre_expr expr = expression_for_id (i);
1120 if (expr->kind == CONSTANT)
1121 return PRE_EXPR_CONSTANT (expr);
1122 }
1123 }
1124 return NULL;
1125 }
1126
1127 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1128 Currently only supports constants and SSA_NAMES. */
1129 static pre_expr
1130 get_or_alloc_expr_for (tree t)
1131 {
1132 if (TREE_CODE (t) == SSA_NAME)
1133 return get_or_alloc_expr_for_name (t);
1134 else if (is_gimple_min_invariant (t))
1135 return get_or_alloc_expr_for_constant (t);
1136 else
1137 {
1138 /* More complex expressions can result from SCCVN expression
1139 simplification that inserts values for them. As they all
1140 do not have VOPs the get handled by the nary ops struct. */
1141 vn_nary_op_t result;
1142 unsigned int result_id;
1143 vn_nary_op_lookup (t, &result);
1144 if (result != NULL)
1145 {
1146 pre_expr e = pre_expr_pool.allocate ();
1147 e->kind = NARY;
1148 PRE_EXPR_NARY (e) = result;
1149 result_id = lookup_expression_id (e);
1150 if (result_id != 0)
1151 {
1152 pre_expr_pool.remove (e);
1153 e = expression_for_id (result_id);
1154 return e;
1155 }
1156 alloc_expression_id (e);
1157 return e;
1158 }
1159 }
1160 return NULL;
1161 }
1162
1163 /* Return the folded version of T if T, when folded, is a gimple
1164 min_invariant. Otherwise, return T. */
1165
1166 static pre_expr
1167 fully_constant_expression (pre_expr e)
1168 {
1169 switch (e->kind)
1170 {
1171 case CONSTANT:
1172 return e;
1173 case NARY:
1174 {
1175 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1176 switch (TREE_CODE_CLASS (nary->opcode))
1177 {
1178 case tcc_binary:
1179 case tcc_comparison:
1180 {
1181 /* We have to go from trees to pre exprs to value ids to
1182 constants. */
1183 tree naryop0 = nary->op[0];
1184 tree naryop1 = nary->op[1];
1185 tree result;
1186 if (!is_gimple_min_invariant (naryop0))
1187 {
1188 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1189 unsigned int vrep0 = get_expr_value_id (rep0);
1190 tree const0 = get_constant_for_value_id (vrep0);
1191 if (const0)
1192 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1193 }
1194 if (!is_gimple_min_invariant (naryop1))
1195 {
1196 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1197 unsigned int vrep1 = get_expr_value_id (rep1);
1198 tree const1 = get_constant_for_value_id (vrep1);
1199 if (const1)
1200 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1201 }
1202 result = fold_binary (nary->opcode, nary->type,
1203 naryop0, naryop1);
1204 if (result && is_gimple_min_invariant (result))
1205 return get_or_alloc_expr_for_constant (result);
1206 /* We might have simplified the expression to a
1207 SSA_NAME for example from x_1 * 1. But we cannot
1208 insert a PHI for x_1 unconditionally as x_1 might
1209 not be available readily. */
1210 return e;
1211 }
1212 case tcc_reference:
1213 if (nary->opcode != REALPART_EXPR
1214 && nary->opcode != IMAGPART_EXPR
1215 && nary->opcode != VIEW_CONVERT_EXPR)
1216 return e;
1217 /* Fallthrough. */
1218 case tcc_unary:
1219 {
1220 /* We have to go from trees to pre exprs to value ids to
1221 constants. */
1222 tree naryop0 = nary->op[0];
1223 tree const0, result;
1224 if (is_gimple_min_invariant (naryop0))
1225 const0 = naryop0;
1226 else
1227 {
1228 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1229 unsigned int vrep0 = get_expr_value_id (rep0);
1230 const0 = get_constant_for_value_id (vrep0);
1231 }
1232 result = NULL;
1233 if (const0)
1234 {
1235 tree type1 = TREE_TYPE (nary->op[0]);
1236 const0 = fold_convert (type1, const0);
1237 result = fold_unary (nary->opcode, nary->type, const0);
1238 }
1239 if (result && is_gimple_min_invariant (result))
1240 return get_or_alloc_expr_for_constant (result);
1241 return e;
1242 }
1243 default:
1244 return e;
1245 }
1246 }
1247 case REFERENCE:
1248 {
1249 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1250 tree folded;
1251 if ((folded = fully_constant_vn_reference_p (ref)))
1252 return get_or_alloc_expr_for_constant (folded);
1253 return e;
1254 }
1255 default:
1256 return e;
1257 }
1258 return e;
1259 }
1260
1261 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1262 it has the value it would have in BLOCK. Set *SAME_VALID to true
1263 in case the new vuse doesn't change the value id of the OPERANDS. */
1264
1265 static tree
1266 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1267 alias_set_type set, tree type, tree vuse,
1268 basic_block phiblock,
1269 basic_block block, bool *same_valid)
1270 {
1271 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1272 ao_ref ref;
1273 edge e = NULL;
1274 bool use_oracle;
1275
1276 *same_valid = true;
1277
1278 if (gimple_bb (phi) != phiblock)
1279 return vuse;
1280
1281 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1282
1283 /* Use the alias-oracle to find either the PHI node in this block,
1284 the first VUSE used in this block that is equivalent to vuse or
1285 the first VUSE which definition in this block kills the value. */
1286 if (gimple_code (phi) == GIMPLE_PHI)
1287 e = find_edge (block, phiblock);
1288 else if (use_oracle)
1289 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1290 {
1291 vuse = gimple_vuse (phi);
1292 phi = SSA_NAME_DEF_STMT (vuse);
1293 if (gimple_bb (phi) != phiblock)
1294 return vuse;
1295 if (gimple_code (phi) == GIMPLE_PHI)
1296 {
1297 e = find_edge (block, phiblock);
1298 break;
1299 }
1300 }
1301 else
1302 return NULL_TREE;
1303
1304 if (e)
1305 {
1306 if (use_oracle)
1307 {
1308 bitmap visited = NULL;
1309 unsigned int cnt;
1310 /* Try to find a vuse that dominates this phi node by skipping
1311 non-clobbering statements. */
1312 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1313 NULL, NULL);
1314 if (visited)
1315 BITMAP_FREE (visited);
1316 }
1317 else
1318 vuse = NULL_TREE;
1319 if (!vuse)
1320 {
1321 /* If we didn't find any, the value ID can't stay the same,
1322 but return the translated vuse. */
1323 *same_valid = false;
1324 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1325 }
1326 /* ??? We would like to return vuse here as this is the canonical
1327 upmost vdef that this reference is associated with. But during
1328 insertion of the references into the hash tables we only ever
1329 directly insert with their direct gimple_vuse, hence returning
1330 something else would make us not find the other expression. */
1331 return PHI_ARG_DEF (phi, e->dest_idx);
1332 }
1333
1334 return NULL_TREE;
1335 }
1336
1337 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1338 SET2. This is used to avoid making a set consisting of the union
1339 of PA_IN and ANTIC_IN during insert. */
1340
1341 static inline pre_expr
1342 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1343 {
1344 pre_expr result;
1345
1346 result = bitmap_find_leader (set1, val);
1347 if (!result && set2)
1348 result = bitmap_find_leader (set2, val);
1349 return result;
1350 }
1351
1352 /* Get the tree type for our PRE expression e. */
1353
1354 static tree
1355 get_expr_type (const pre_expr e)
1356 {
1357 switch (e->kind)
1358 {
1359 case NAME:
1360 return TREE_TYPE (PRE_EXPR_NAME (e));
1361 case CONSTANT:
1362 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1363 case REFERENCE:
1364 return PRE_EXPR_REFERENCE (e)->type;
1365 case NARY:
1366 return PRE_EXPR_NARY (e)->type;
1367 }
1368 gcc_unreachable ();
1369 }
1370
1371 /* Get a representative SSA_NAME for a given expression.
1372 Since all of our sub-expressions are treated as values, we require
1373 them to be SSA_NAME's for simplicity.
1374 Prior versions of GVNPRE used to use "value handles" here, so that
1375 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1376 either case, the operands are really values (IE we do not expect
1377 them to be usable without finding leaders). */
1378
1379 static tree
1380 get_representative_for (const pre_expr e)
1381 {
1382 tree name;
1383 unsigned int value_id = get_expr_value_id (e);
1384
1385 switch (e->kind)
1386 {
1387 case NAME:
1388 return PRE_EXPR_NAME (e);
1389 case CONSTANT:
1390 return PRE_EXPR_CONSTANT (e);
1391 case NARY:
1392 case REFERENCE:
1393 {
1394 /* Go through all of the expressions representing this value
1395 and pick out an SSA_NAME. */
1396 unsigned int i;
1397 bitmap_iterator bi;
1398 bitmap exprs = value_expressions[value_id];
1399 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1400 {
1401 pre_expr rep = expression_for_id (i);
1402 if (rep->kind == NAME)
1403 return PRE_EXPR_NAME (rep);
1404 else if (rep->kind == CONSTANT)
1405 return PRE_EXPR_CONSTANT (rep);
1406 }
1407 }
1408 break;
1409 }
1410
1411 /* If we reached here we couldn't find an SSA_NAME. This can
1412 happen when we've discovered a value that has never appeared in
1413 the program as set to an SSA_NAME, as the result of phi translation.
1414 Create one here.
1415 ??? We should be able to re-use this when we insert the statement
1416 to compute it. */
1417 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1418 VN_INFO_GET (name)->value_id = value_id;
1419 VN_INFO (name)->valnum = name;
1420 /* ??? For now mark this SSA name for release by SCCVN. */
1421 VN_INFO (name)->needs_insertion = true;
1422 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1423 if (dump_file && (dump_flags & TDF_DETAILS))
1424 {
1425 fprintf (dump_file, "Created SSA_NAME representative ");
1426 print_generic_expr (dump_file, name, 0);
1427 fprintf (dump_file, " for expression:");
1428 print_pre_expr (dump_file, e);
1429 fprintf (dump_file, " (%04d)\n", value_id);
1430 }
1431
1432 return name;
1433 }
1434
1435
1436
1437 static pre_expr
1438 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1439 basic_block pred, basic_block phiblock);
1440
1441 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1442 the phis in PRED. Return NULL if we can't find a leader for each part
1443 of the translated expression. */
1444
1445 static pre_expr
1446 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1447 basic_block pred, basic_block phiblock)
1448 {
1449 switch (expr->kind)
1450 {
1451 case NARY:
1452 {
1453 unsigned int i;
1454 bool changed = false;
1455 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1456 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1457 sizeof_vn_nary_op (nary->length));
1458 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1459
1460 for (i = 0; i < newnary->length; i++)
1461 {
1462 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1463 continue;
1464 else
1465 {
1466 pre_expr leader, result;
1467 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1468 leader = find_leader_in_sets (op_val_id, set1, set2);
1469 result = phi_translate (leader, set1, set2, pred, phiblock);
1470 if (result && result != leader)
1471 {
1472 tree name = get_representative_for (result);
1473 if (!name)
1474 return NULL;
1475 newnary->op[i] = name;
1476 }
1477 else if (!result)
1478 return NULL;
1479
1480 changed |= newnary->op[i] != nary->op[i];
1481 }
1482 }
1483 if (changed)
1484 {
1485 pre_expr constant;
1486 unsigned int new_val_id;
1487
1488 tree result = vn_nary_op_lookup_pieces (newnary->length,
1489 newnary->opcode,
1490 newnary->type,
1491 &newnary->op[0],
1492 &nary);
1493 if (result && is_gimple_min_invariant (result))
1494 return get_or_alloc_expr_for_constant (result);
1495
1496 expr = pre_expr_pool.allocate ();
1497 expr->kind = NARY;
1498 expr->id = 0;
1499 if (nary)
1500 {
1501 PRE_EXPR_NARY (expr) = nary;
1502 constant = fully_constant_expression (expr);
1503 if (constant != expr)
1504 return constant;
1505
1506 new_val_id = nary->value_id;
1507 get_or_alloc_expression_id (expr);
1508 }
1509 else
1510 {
1511 new_val_id = get_next_value_id ();
1512 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1513 nary = vn_nary_op_insert_pieces (newnary->length,
1514 newnary->opcode,
1515 newnary->type,
1516 &newnary->op[0],
1517 result, new_val_id);
1518 PRE_EXPR_NARY (expr) = nary;
1519 constant = fully_constant_expression (expr);
1520 if (constant != expr)
1521 return constant;
1522 get_or_alloc_expression_id (expr);
1523 }
1524 add_to_value (new_val_id, expr);
1525 }
1526 return expr;
1527 }
1528 break;
1529
1530 case REFERENCE:
1531 {
1532 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1533 vec<vn_reference_op_s> operands = ref->operands;
1534 tree vuse = ref->vuse;
1535 tree newvuse = vuse;
1536 vec<vn_reference_op_s> newoperands = vNULL;
1537 bool changed = false, same_valid = true;
1538 unsigned int i, n;
1539 vn_reference_op_t operand;
1540 vn_reference_t newref;
1541
1542 for (i = 0; operands.iterate (i, &operand); i++)
1543 {
1544 pre_expr opresult;
1545 pre_expr leader;
1546 tree op[3];
1547 tree type = operand->type;
1548 vn_reference_op_s newop = *operand;
1549 op[0] = operand->op0;
1550 op[1] = operand->op1;
1551 op[2] = operand->op2;
1552 for (n = 0; n < 3; ++n)
1553 {
1554 unsigned int op_val_id;
1555 if (!op[n])
1556 continue;
1557 if (TREE_CODE (op[n]) != SSA_NAME)
1558 {
1559 /* We can't possibly insert these. */
1560 if (n != 0
1561 && !is_gimple_min_invariant (op[n]))
1562 break;
1563 continue;
1564 }
1565 op_val_id = VN_INFO (op[n])->value_id;
1566 leader = find_leader_in_sets (op_val_id, set1, set2);
1567 if (!leader)
1568 break;
1569 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1570 if (!opresult)
1571 break;
1572 if (opresult != leader)
1573 {
1574 tree name = get_representative_for (opresult);
1575 if (!name)
1576 break;
1577 changed |= name != op[n];
1578 op[n] = name;
1579 }
1580 }
1581 if (n != 3)
1582 {
1583 newoperands.release ();
1584 return NULL;
1585 }
1586 if (!changed)
1587 continue;
1588 if (!newoperands.exists ())
1589 newoperands = operands.copy ();
1590 /* We may have changed from an SSA_NAME to a constant */
1591 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1592 newop.opcode = TREE_CODE (op[0]);
1593 newop.type = type;
1594 newop.op0 = op[0];
1595 newop.op1 = op[1];
1596 newop.op2 = op[2];
1597 newoperands[i] = newop;
1598 }
1599 gcc_checking_assert (i == operands.length ());
1600
1601 if (vuse)
1602 {
1603 newvuse = translate_vuse_through_block (newoperands.exists ()
1604 ? newoperands : operands,
1605 ref->set, ref->type,
1606 vuse, phiblock, pred,
1607 &same_valid);
1608 if (newvuse == NULL_TREE)
1609 {
1610 newoperands.release ();
1611 return NULL;
1612 }
1613 }
1614
1615 if (changed || newvuse != vuse)
1616 {
1617 unsigned int new_val_id;
1618 pre_expr constant;
1619
1620 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1621 ref->type,
1622 newoperands.exists ()
1623 ? newoperands : operands,
1624 &newref, VN_WALK);
1625 if (result)
1626 newoperands.release ();
1627
1628 /* We can always insert constants, so if we have a partial
1629 redundant constant load of another type try to translate it
1630 to a constant of appropriate type. */
1631 if (result && is_gimple_min_invariant (result))
1632 {
1633 tree tem = result;
1634 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1635 {
1636 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1637 if (tem && !is_gimple_min_invariant (tem))
1638 tem = NULL_TREE;
1639 }
1640 if (tem)
1641 return get_or_alloc_expr_for_constant (tem);
1642 }
1643
1644 /* If we'd have to convert things we would need to validate
1645 if we can insert the translated expression. So fail
1646 here for now - we cannot insert an alias with a different
1647 type in the VN tables either, as that would assert. */
1648 if (result
1649 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1650 return NULL;
1651 else if (!result && newref
1652 && !useless_type_conversion_p (ref->type, newref->type))
1653 {
1654 newoperands.release ();
1655 return NULL;
1656 }
1657
1658 expr = pre_expr_pool.allocate ();
1659 expr->kind = REFERENCE;
1660 expr->id = 0;
1661
1662 if (newref)
1663 {
1664 PRE_EXPR_REFERENCE (expr) = newref;
1665 constant = fully_constant_expression (expr);
1666 if (constant != expr)
1667 return constant;
1668
1669 new_val_id = newref->value_id;
1670 get_or_alloc_expression_id (expr);
1671 }
1672 else
1673 {
1674 if (changed || !same_valid)
1675 {
1676 new_val_id = get_next_value_id ();
1677 value_expressions.safe_grow_cleared
1678 (get_max_value_id () + 1);
1679 }
1680 else
1681 new_val_id = ref->value_id;
1682 if (!newoperands.exists ())
1683 newoperands = operands.copy ();
1684 newref = vn_reference_insert_pieces (newvuse, ref->set,
1685 ref->type,
1686 newoperands,
1687 result, new_val_id);
1688 newoperands = vNULL;
1689 PRE_EXPR_REFERENCE (expr) = newref;
1690 constant = fully_constant_expression (expr);
1691 if (constant != expr)
1692 return constant;
1693 get_or_alloc_expression_id (expr);
1694 }
1695 add_to_value (new_val_id, expr);
1696 }
1697 newoperands.release ();
1698 return expr;
1699 }
1700 break;
1701
1702 case NAME:
1703 {
1704 tree name = PRE_EXPR_NAME (expr);
1705 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1706 /* If the SSA name is defined by a PHI node in this block,
1707 translate it. */
1708 if (gimple_code (def_stmt) == GIMPLE_PHI
1709 && gimple_bb (def_stmt) == phiblock)
1710 {
1711 edge e = find_edge (pred, gimple_bb (def_stmt));
1712 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1713
1714 /* Handle constant. */
1715 if (is_gimple_min_invariant (def))
1716 return get_or_alloc_expr_for_constant (def);
1717
1718 return get_or_alloc_expr_for_name (def);
1719 }
1720 /* Otherwise return it unchanged - it will get removed if its
1721 value is not available in PREDs AVAIL_OUT set of expressions
1722 by the subtraction of TMP_GEN. */
1723 return expr;
1724 }
1725
1726 default:
1727 gcc_unreachable ();
1728 }
1729 }
1730
1731 /* Wrapper around phi_translate_1 providing caching functionality. */
1732
1733 static pre_expr
1734 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1735 basic_block pred, basic_block phiblock)
1736 {
1737 expr_pred_trans_t slot = NULL;
1738 pre_expr phitrans;
1739
1740 if (!expr)
1741 return NULL;
1742
1743 /* Constants contain no values that need translation. */
1744 if (expr->kind == CONSTANT)
1745 return expr;
1746
1747 if (value_id_constant_p (get_expr_value_id (expr)))
1748 return expr;
1749
1750 /* Don't add translations of NAMEs as those are cheap to translate. */
1751 if (expr->kind != NAME)
1752 {
1753 if (phi_trans_add (&slot, expr, pred))
1754 return slot->v;
1755 /* Store NULL for the value we want to return in the case of
1756 recursing. */
1757 slot->v = NULL;
1758 }
1759
1760 /* Translate. */
1761 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1762
1763 if (slot)
1764 {
1765 if (phitrans)
1766 slot->v = phitrans;
1767 else
1768 /* Remove failed translations again, they cause insert
1769 iteration to not pick up new opportunities reliably. */
1770 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1771 }
1772
1773 return phitrans;
1774 }
1775
1776
1777 /* For each expression in SET, translate the values through phi nodes
1778 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1779 expressions in DEST. */
1780
1781 static void
1782 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1783 basic_block phiblock)
1784 {
1785 vec<pre_expr> exprs;
1786 pre_expr expr;
1787 int i;
1788
1789 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1790 {
1791 bitmap_set_copy (dest, set);
1792 return;
1793 }
1794
1795 exprs = sorted_array_from_bitmap_set (set);
1796 FOR_EACH_VEC_ELT (exprs, i, expr)
1797 {
1798 pre_expr translated;
1799 translated = phi_translate (expr, set, NULL, pred, phiblock);
1800 if (!translated)
1801 continue;
1802
1803 /* We might end up with multiple expressions from SET being
1804 translated to the same value. In this case we do not want
1805 to retain the NARY or REFERENCE expression but prefer a NAME
1806 which would be the leader. */
1807 if (translated->kind == NAME)
1808 bitmap_value_replace_in_set (dest, translated);
1809 else
1810 bitmap_value_insert_into_set (dest, translated);
1811 }
1812 exprs.release ();
1813 }
1814
1815 /* Find the leader for a value (i.e., the name representing that
1816 value) in a given set, and return it. Return NULL if no leader
1817 is found. */
1818
1819 static pre_expr
1820 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1821 {
1822 if (value_id_constant_p (val))
1823 {
1824 unsigned int i;
1825 bitmap_iterator bi;
1826 bitmap exprset = value_expressions[val];
1827
1828 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1829 {
1830 pre_expr expr = expression_for_id (i);
1831 if (expr->kind == CONSTANT)
1832 return expr;
1833 }
1834 }
1835 if (bitmap_set_contains_value (set, val))
1836 {
1837 /* Rather than walk the entire bitmap of expressions, and see
1838 whether any of them has the value we are looking for, we look
1839 at the reverse mapping, which tells us the set of expressions
1840 that have a given value (IE value->expressions with that
1841 value) and see if any of those expressions are in our set.
1842 The number of expressions per value is usually significantly
1843 less than the number of expressions in the set. In fact, for
1844 large testcases, doing it this way is roughly 5-10x faster
1845 than walking the bitmap.
1846 If this is somehow a significant lose for some cases, we can
1847 choose which set to walk based on which set is smaller. */
1848 unsigned int i;
1849 bitmap_iterator bi;
1850 bitmap exprset = value_expressions[val];
1851
1852 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1853 return expression_for_id (i);
1854 }
1855 return NULL;
1856 }
1857
1858 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1859 BLOCK by seeing if it is not killed in the block. Note that we are
1860 only determining whether there is a store that kills it. Because
1861 of the order in which clean iterates over values, we are guaranteed
1862 that altered operands will have caused us to be eliminated from the
1863 ANTIC_IN set already. */
1864
1865 static bool
1866 value_dies_in_block_x (pre_expr expr, basic_block block)
1867 {
1868 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1869 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1870 gimple *def;
1871 gimple_stmt_iterator gsi;
1872 unsigned id = get_expression_id (expr);
1873 bool res = false;
1874 ao_ref ref;
1875
1876 if (!vuse)
1877 return false;
1878
1879 /* Lookup a previously calculated result. */
1880 if (EXPR_DIES (block)
1881 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1882 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1883
1884 /* A memory expression {e, VUSE} dies in the block if there is a
1885 statement that may clobber e. If, starting statement walk from the
1886 top of the basic block, a statement uses VUSE there can be no kill
1887 inbetween that use and the original statement that loaded {e, VUSE},
1888 so we can stop walking. */
1889 ref.base = NULL_TREE;
1890 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1891 {
1892 tree def_vuse, def_vdef;
1893 def = gsi_stmt (gsi);
1894 def_vuse = gimple_vuse (def);
1895 def_vdef = gimple_vdef (def);
1896
1897 /* Not a memory statement. */
1898 if (!def_vuse)
1899 continue;
1900
1901 /* Not a may-def. */
1902 if (!def_vdef)
1903 {
1904 /* A load with the same VUSE, we're done. */
1905 if (def_vuse == vuse)
1906 break;
1907
1908 continue;
1909 }
1910
1911 /* Init ref only if we really need it. */
1912 if (ref.base == NULL_TREE
1913 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1914 refx->operands))
1915 {
1916 res = true;
1917 break;
1918 }
1919 /* If the statement may clobber expr, it dies. */
1920 if (stmt_may_clobber_ref_p_1 (def, &ref))
1921 {
1922 res = true;
1923 break;
1924 }
1925 }
1926
1927 /* Remember the result. */
1928 if (!EXPR_DIES (block))
1929 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1930 bitmap_set_bit (EXPR_DIES (block), id * 2);
1931 if (res)
1932 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1933
1934 return res;
1935 }
1936
1937
1938 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1939 contains its value-id. */
1940
1941 static bool
1942 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1943 {
1944 if (op && TREE_CODE (op) == SSA_NAME)
1945 {
1946 unsigned int value_id = VN_INFO (op)->value_id;
1947 if (!(bitmap_set_contains_value (set1, value_id)
1948 || (set2 && bitmap_set_contains_value (set2, value_id))))
1949 return false;
1950 }
1951 return true;
1952 }
1953
1954 /* Determine if the expression EXPR is valid in SET1 U SET2.
1955 ONLY SET2 CAN BE NULL.
1956 This means that we have a leader for each part of the expression
1957 (if it consists of values), or the expression is an SSA_NAME.
1958 For loads/calls, we also see if the vuse is killed in this block. */
1959
1960 static bool
1961 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1962 {
1963 switch (expr->kind)
1964 {
1965 case NAME:
1966 /* By construction all NAMEs are available. Non-available
1967 NAMEs are removed by subtracting TMP_GEN from the sets. */
1968 return true;
1969 case NARY:
1970 {
1971 unsigned int i;
1972 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1973 for (i = 0; i < nary->length; i++)
1974 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1975 return false;
1976 return true;
1977 }
1978 break;
1979 case REFERENCE:
1980 {
1981 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1982 vn_reference_op_t vro;
1983 unsigned int i;
1984
1985 FOR_EACH_VEC_ELT (ref->operands, i, vro)
1986 {
1987 if (!op_valid_in_sets (set1, set2, vro->op0)
1988 || !op_valid_in_sets (set1, set2, vro->op1)
1989 || !op_valid_in_sets (set1, set2, vro->op2))
1990 return false;
1991 }
1992 return true;
1993 }
1994 default:
1995 gcc_unreachable ();
1996 }
1997 }
1998
1999 /* Clean the set of expressions that are no longer valid in SET1 or
2000 SET2. This means expressions that are made up of values we have no
2001 leaders for in SET1 or SET2. This version is used for partial
2002 anticipation, which means it is not valid in either ANTIC_IN or
2003 PA_IN. */
2004
2005 static void
2006 dependent_clean (bitmap_set_t set1, bitmap_set_t set2)
2007 {
2008 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
2009 pre_expr expr;
2010 int i;
2011
2012 FOR_EACH_VEC_ELT (exprs, i, expr)
2013 {
2014 if (!valid_in_sets (set1, set2, expr))
2015 bitmap_remove_from_set (set1, expr);
2016 }
2017 exprs.release ();
2018 }
2019
2020 /* Clean the set of expressions that are no longer valid in SET. This
2021 means expressions that are made up of values we have no leaders for
2022 in SET. */
2023
2024 static void
2025 clean (bitmap_set_t set)
2026 {
2027 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set);
2028 pre_expr expr;
2029 int i;
2030
2031 FOR_EACH_VEC_ELT (exprs, i, expr)
2032 {
2033 if (!valid_in_sets (set, NULL, expr))
2034 bitmap_remove_from_set (set, expr);
2035 }
2036 exprs.release ();
2037 }
2038
2039 /* Clean the set of expressions that are no longer valid in SET because
2040 they are clobbered in BLOCK or because they trap and may not be executed. */
2041
2042 static void
2043 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2044 {
2045 bitmap_iterator bi;
2046 unsigned i;
2047
2048 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2049 {
2050 pre_expr expr = expression_for_id (i);
2051 if (expr->kind == REFERENCE)
2052 {
2053 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2054 if (ref->vuse)
2055 {
2056 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2057 if (!gimple_nop_p (def_stmt)
2058 && ((gimple_bb (def_stmt) != block
2059 && !dominated_by_p (CDI_DOMINATORS,
2060 block, gimple_bb (def_stmt)))
2061 || (gimple_bb (def_stmt) == block
2062 && value_dies_in_block_x (expr, block))))
2063 bitmap_remove_from_set (set, expr);
2064 }
2065 }
2066 else if (expr->kind == NARY)
2067 {
2068 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2069 /* If the NARY may trap make sure the block does not contain
2070 a possible exit point.
2071 ??? This is overly conservative if we translate AVAIL_OUT
2072 as the available expression might be after the exit point. */
2073 if (BB_MAY_NOTRETURN (block)
2074 && vn_nary_may_trap (nary))
2075 bitmap_remove_from_set (set, expr);
2076 }
2077 }
2078 }
2079
2080 static sbitmap has_abnormal_preds;
2081
2082 /* List of blocks that may have changed during ANTIC computation and
2083 thus need to be iterated over. */
2084
2085 static sbitmap changed_blocks;
2086
2087 /* Compute the ANTIC set for BLOCK.
2088
2089 If succs(BLOCK) > 1 then
2090 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2091 else if succs(BLOCK) == 1 then
2092 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2093
2094 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2095 */
2096
2097 static bool
2098 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2099 {
2100 bool changed = false;
2101 bitmap_set_t S, old, ANTIC_OUT;
2102 bitmap_iterator bi;
2103 unsigned int bii;
2104 edge e;
2105 edge_iterator ei;
2106
2107 old = ANTIC_OUT = S = NULL;
2108 BB_VISITED (block) = 1;
2109
2110 /* If any edges from predecessors are abnormal, antic_in is empty,
2111 so do nothing. */
2112 if (block_has_abnormal_pred_edge)
2113 goto maybe_dump_sets;
2114
2115 old = ANTIC_IN (block);
2116 ANTIC_OUT = bitmap_set_new ();
2117
2118 /* If the block has no successors, ANTIC_OUT is empty. */
2119 if (EDGE_COUNT (block->succs) == 0)
2120 ;
2121 /* If we have one successor, we could have some phi nodes to
2122 translate through. */
2123 else if (single_succ_p (block))
2124 {
2125 basic_block succ_bb = single_succ (block);
2126 gcc_assert (BB_VISITED (succ_bb));
2127 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2128 }
2129 /* If we have multiple successors, we take the intersection of all of
2130 them. Note that in the case of loop exit phi nodes, we may have
2131 phis to translate through. */
2132 else
2133 {
2134 size_t i;
2135 basic_block bprime, first = NULL;
2136
2137 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2138 FOR_EACH_EDGE (e, ei, block->succs)
2139 {
2140 if (!first
2141 && BB_VISITED (e->dest))
2142 first = e->dest;
2143 else if (BB_VISITED (e->dest))
2144 worklist.quick_push (e->dest);
2145 }
2146
2147 /* Of multiple successors we have to have visited one already
2148 which is guaranteed by iteration order. */
2149 gcc_assert (first != NULL);
2150
2151 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2152
2153 FOR_EACH_VEC_ELT (worklist, i, bprime)
2154 {
2155 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2156 {
2157 bitmap_set_t tmp = bitmap_set_new ();
2158 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2159 bitmap_set_and (ANTIC_OUT, tmp);
2160 bitmap_set_free (tmp);
2161 }
2162 else
2163 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2164 }
2165 }
2166
2167 /* Prune expressions that are clobbered in block and thus become
2168 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2169 prune_clobbered_mems (ANTIC_OUT, block);
2170
2171 /* Generate ANTIC_OUT - TMP_GEN. */
2172 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2173
2174 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2175 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2176 TMP_GEN (block));
2177
2178 /* Then union in the ANTIC_OUT - TMP_GEN values,
2179 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2180 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2181 bitmap_value_insert_into_set (ANTIC_IN (block),
2182 expression_for_id (bii));
2183
2184 clean (ANTIC_IN (block));
2185
2186 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2187 {
2188 changed = true;
2189 bitmap_set_bit (changed_blocks, block->index);
2190 FOR_EACH_EDGE (e, ei, block->preds)
2191 bitmap_set_bit (changed_blocks, e->src->index);
2192 }
2193 else
2194 bitmap_clear_bit (changed_blocks, block->index);
2195
2196 maybe_dump_sets:
2197 if (dump_file && (dump_flags & TDF_DETAILS))
2198 {
2199 if (ANTIC_OUT)
2200 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2201
2202 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2203 block->index);
2204
2205 if (S)
2206 print_bitmap_set (dump_file, S, "S", block->index);
2207 }
2208 if (old)
2209 bitmap_set_free (old);
2210 if (S)
2211 bitmap_set_free (S);
2212 if (ANTIC_OUT)
2213 bitmap_set_free (ANTIC_OUT);
2214 return changed;
2215 }
2216
2217 /* Compute PARTIAL_ANTIC for BLOCK.
2218
2219 If succs(BLOCK) > 1 then
2220 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2221 in ANTIC_OUT for all succ(BLOCK)
2222 else if succs(BLOCK) == 1 then
2223 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2224
2225 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2226 - ANTIC_IN[BLOCK])
2227
2228 */
2229 static bool
2230 compute_partial_antic_aux (basic_block block,
2231 bool block_has_abnormal_pred_edge)
2232 {
2233 bool changed = false;
2234 bitmap_set_t old_PA_IN;
2235 bitmap_set_t PA_OUT;
2236 edge e;
2237 edge_iterator ei;
2238 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2239
2240 old_PA_IN = PA_OUT = NULL;
2241
2242 /* If any edges from predecessors are abnormal, antic_in is empty,
2243 so do nothing. */
2244 if (block_has_abnormal_pred_edge)
2245 goto maybe_dump_sets;
2246
2247 /* If there are too many partially anticipatable values in the
2248 block, phi_translate_set can take an exponential time: stop
2249 before the translation starts. */
2250 if (max_pa
2251 && single_succ_p (block)
2252 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2253 goto maybe_dump_sets;
2254
2255 old_PA_IN = PA_IN (block);
2256 PA_OUT = bitmap_set_new ();
2257
2258 /* If the block has no successors, ANTIC_OUT is empty. */
2259 if (EDGE_COUNT (block->succs) == 0)
2260 ;
2261 /* If we have one successor, we could have some phi nodes to
2262 translate through. Note that we can't phi translate across DFS
2263 back edges in partial antic, because it uses a union operation on
2264 the successors. For recurrences like IV's, we will end up
2265 generating a new value in the set on each go around (i + 3 (VH.1)
2266 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2267 else if (single_succ_p (block))
2268 {
2269 basic_block succ = single_succ (block);
2270 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2271 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2272 }
2273 /* If we have multiple successors, we take the union of all of
2274 them. */
2275 else
2276 {
2277 size_t i;
2278 basic_block bprime;
2279
2280 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2281 FOR_EACH_EDGE (e, ei, block->succs)
2282 {
2283 if (e->flags & EDGE_DFS_BACK)
2284 continue;
2285 worklist.quick_push (e->dest);
2286 }
2287 if (worklist.length () > 0)
2288 {
2289 FOR_EACH_VEC_ELT (worklist, i, bprime)
2290 {
2291 unsigned int i;
2292 bitmap_iterator bi;
2293
2294 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2295 bitmap_value_insert_into_set (PA_OUT,
2296 expression_for_id (i));
2297 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2298 {
2299 bitmap_set_t pa_in = bitmap_set_new ();
2300 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2301 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2302 bitmap_value_insert_into_set (PA_OUT,
2303 expression_for_id (i));
2304 bitmap_set_free (pa_in);
2305 }
2306 else
2307 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2308 bitmap_value_insert_into_set (PA_OUT,
2309 expression_for_id (i));
2310 }
2311 }
2312 }
2313
2314 /* Prune expressions that are clobbered in block and thus become
2315 invalid if translated from PA_OUT to PA_IN. */
2316 prune_clobbered_mems (PA_OUT, block);
2317
2318 /* PA_IN starts with PA_OUT - TMP_GEN.
2319 Then we subtract things from ANTIC_IN. */
2320 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2321
2322 /* For partial antic, we want to put back in the phi results, since
2323 we will properly avoid making them partially antic over backedges. */
2324 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2325 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2326
2327 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2328 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2329
2330 dependent_clean (PA_IN (block), ANTIC_IN (block));
2331
2332 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2333 {
2334 changed = true;
2335 bitmap_set_bit (changed_blocks, block->index);
2336 FOR_EACH_EDGE (e, ei, block->preds)
2337 bitmap_set_bit (changed_blocks, e->src->index);
2338 }
2339 else
2340 bitmap_clear_bit (changed_blocks, block->index);
2341
2342 maybe_dump_sets:
2343 if (dump_file && (dump_flags & TDF_DETAILS))
2344 {
2345 if (PA_OUT)
2346 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2347
2348 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2349 }
2350 if (old_PA_IN)
2351 bitmap_set_free (old_PA_IN);
2352 if (PA_OUT)
2353 bitmap_set_free (PA_OUT);
2354 return changed;
2355 }
2356
2357 /* Compute ANTIC and partial ANTIC sets. */
2358
2359 static void
2360 compute_antic (void)
2361 {
2362 bool changed = true;
2363 int num_iterations = 0;
2364 basic_block block;
2365 int i;
2366
2367 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2368 We pre-build the map of blocks with incoming abnormal edges here. */
2369 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2370 bitmap_clear (has_abnormal_preds);
2371
2372 FOR_ALL_BB_FN (block, cfun)
2373 {
2374 edge_iterator ei;
2375 edge e;
2376
2377 FOR_EACH_EDGE (e, ei, block->preds)
2378 {
2379 e->flags &= ~EDGE_DFS_BACK;
2380 if (e->flags & EDGE_ABNORMAL)
2381 {
2382 bitmap_set_bit (has_abnormal_preds, block->index);
2383 break;
2384 }
2385 }
2386
2387 BB_VISITED (block) = 0;
2388
2389 /* While we are here, give empty ANTIC_IN sets to each block. */
2390 ANTIC_IN (block) = bitmap_set_new ();
2391 PA_IN (block) = bitmap_set_new ();
2392 }
2393
2394 /* At the exit block we anticipate nothing. */
2395 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2396
2397 changed_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
2398 bitmap_ones (changed_blocks);
2399 while (changed)
2400 {
2401 if (dump_file && (dump_flags & TDF_DETAILS))
2402 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2403 /* ??? We need to clear our PHI translation cache here as the
2404 ANTIC sets shrink and we restrict valid translations to
2405 those having operands with leaders in ANTIC. Same below
2406 for PA ANTIC computation. */
2407 num_iterations++;
2408 changed = false;
2409 for (i = postorder_num - 1; i >= 0; i--)
2410 {
2411 if (bitmap_bit_p (changed_blocks, postorder[i]))
2412 {
2413 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2414 changed |= compute_antic_aux (block,
2415 bitmap_bit_p (has_abnormal_preds,
2416 block->index));
2417 }
2418 }
2419 /* Theoretically possible, but *highly* unlikely. */
2420 gcc_checking_assert (num_iterations < 500);
2421 }
2422
2423 statistics_histogram_event (cfun, "compute_antic iterations",
2424 num_iterations);
2425
2426 if (do_partial_partial)
2427 {
2428 bitmap_ones (changed_blocks);
2429 mark_dfs_back_edges ();
2430 num_iterations = 0;
2431 changed = true;
2432 while (changed)
2433 {
2434 if (dump_file && (dump_flags & TDF_DETAILS))
2435 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2436 num_iterations++;
2437 changed = false;
2438 for (i = postorder_num - 1 ; i >= 0; i--)
2439 {
2440 if (bitmap_bit_p (changed_blocks, postorder[i]))
2441 {
2442 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2443 changed
2444 |= compute_partial_antic_aux (block,
2445 bitmap_bit_p (has_abnormal_preds,
2446 block->index));
2447 }
2448 }
2449 /* Theoretically possible, but *highly* unlikely. */
2450 gcc_checking_assert (num_iterations < 500);
2451 }
2452 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2453 num_iterations);
2454 }
2455 sbitmap_free (has_abnormal_preds);
2456 sbitmap_free (changed_blocks);
2457 }
2458
2459
2460 /* Inserted expressions are placed onto this worklist, which is used
2461 for performing quick dead code elimination of insertions we made
2462 that didn't turn out to be necessary. */
2463 static bitmap inserted_exprs;
2464
2465 /* The actual worker for create_component_ref_by_pieces. */
2466
2467 static tree
2468 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2469 unsigned int *operand, gimple_seq *stmts)
2470 {
2471 vn_reference_op_t currop = &ref->operands[*operand];
2472 tree genop;
2473 ++*operand;
2474 switch (currop->opcode)
2475 {
2476 case CALL_EXPR:
2477 gcc_unreachable ();
2478
2479 case MEM_REF:
2480 {
2481 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2482 stmts);
2483 if (!baseop)
2484 return NULL_TREE;
2485 tree offset = currop->op0;
2486 if (TREE_CODE (baseop) == ADDR_EXPR
2487 && handled_component_p (TREE_OPERAND (baseop, 0)))
2488 {
2489 HOST_WIDE_INT off;
2490 tree base;
2491 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2492 &off);
2493 gcc_assert (base);
2494 offset = int_const_binop (PLUS_EXPR, offset,
2495 build_int_cst (TREE_TYPE (offset),
2496 off));
2497 baseop = build_fold_addr_expr (base);
2498 }
2499 genop = build2 (MEM_REF, currop->type, baseop, offset);
2500 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2501 MR_DEPENDENCE_BASE (genop) = currop->base;
2502 return genop;
2503 }
2504
2505 case TARGET_MEM_REF:
2506 {
2507 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2508 vn_reference_op_t nextop = &ref->operands[++*operand];
2509 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2510 stmts);
2511 if (!baseop)
2512 return NULL_TREE;
2513 if (currop->op0)
2514 {
2515 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2516 if (!genop0)
2517 return NULL_TREE;
2518 }
2519 if (nextop->op0)
2520 {
2521 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2522 if (!genop1)
2523 return NULL_TREE;
2524 }
2525 genop = build5 (TARGET_MEM_REF, currop->type,
2526 baseop, currop->op2, genop0, currop->op1, genop1);
2527
2528 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2529 MR_DEPENDENCE_BASE (genop) = currop->base;
2530 return genop;
2531 }
2532
2533 case ADDR_EXPR:
2534 if (currop->op0)
2535 {
2536 gcc_assert (is_gimple_min_invariant (currop->op0));
2537 return currop->op0;
2538 }
2539 /* Fallthrough. */
2540 case REALPART_EXPR:
2541 case IMAGPART_EXPR:
2542 case VIEW_CONVERT_EXPR:
2543 {
2544 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2545 stmts);
2546 if (!genop0)
2547 return NULL_TREE;
2548 return fold_build1 (currop->opcode, currop->type, genop0);
2549 }
2550
2551 case WITH_SIZE_EXPR:
2552 {
2553 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2554 stmts);
2555 if (!genop0)
2556 return NULL_TREE;
2557 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2558 if (!genop1)
2559 return NULL_TREE;
2560 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2561 }
2562
2563 case BIT_FIELD_REF:
2564 {
2565 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2566 stmts);
2567 if (!genop0)
2568 return NULL_TREE;
2569 tree op1 = currop->op0;
2570 tree op2 = currop->op1;
2571 return fold_build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2572 }
2573
2574 /* For array ref vn_reference_op's, operand 1 of the array ref
2575 is op0 of the reference op and operand 3 of the array ref is
2576 op1. */
2577 case ARRAY_RANGE_REF:
2578 case ARRAY_REF:
2579 {
2580 tree genop0;
2581 tree genop1 = currop->op0;
2582 tree genop2 = currop->op1;
2583 tree genop3 = currop->op2;
2584 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2585 stmts);
2586 if (!genop0)
2587 return NULL_TREE;
2588 genop1 = find_or_generate_expression (block, genop1, stmts);
2589 if (!genop1)
2590 return NULL_TREE;
2591 if (genop2)
2592 {
2593 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2594 /* Drop zero minimum index if redundant. */
2595 if (integer_zerop (genop2)
2596 && (!domain_type
2597 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2598 genop2 = NULL_TREE;
2599 else
2600 {
2601 genop2 = find_or_generate_expression (block, genop2, stmts);
2602 if (!genop2)
2603 return NULL_TREE;
2604 }
2605 }
2606 if (genop3)
2607 {
2608 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2609 /* We can't always put a size in units of the element alignment
2610 here as the element alignment may be not visible. See
2611 PR43783. Simply drop the element size for constant
2612 sizes. */
2613 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2614 genop3 = NULL_TREE;
2615 else
2616 {
2617 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2618 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2619 genop3 = find_or_generate_expression (block, genop3, stmts);
2620 if (!genop3)
2621 return NULL_TREE;
2622 }
2623 }
2624 return build4 (currop->opcode, currop->type, genop0, genop1,
2625 genop2, genop3);
2626 }
2627 case COMPONENT_REF:
2628 {
2629 tree op0;
2630 tree op1;
2631 tree genop2 = currop->op1;
2632 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2633 if (!op0)
2634 return NULL_TREE;
2635 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2636 op1 = currop->op0;
2637 if (genop2)
2638 {
2639 genop2 = find_or_generate_expression (block, genop2, stmts);
2640 if (!genop2)
2641 return NULL_TREE;
2642 }
2643 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2644 }
2645
2646 case SSA_NAME:
2647 {
2648 genop = find_or_generate_expression (block, currop->op0, stmts);
2649 return genop;
2650 }
2651 case STRING_CST:
2652 case INTEGER_CST:
2653 case COMPLEX_CST:
2654 case VECTOR_CST:
2655 case REAL_CST:
2656 case CONSTRUCTOR:
2657 case VAR_DECL:
2658 case PARM_DECL:
2659 case CONST_DECL:
2660 case RESULT_DECL:
2661 case FUNCTION_DECL:
2662 return currop->op0;
2663
2664 default:
2665 gcc_unreachable ();
2666 }
2667 }
2668
2669 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2670 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2671 trying to rename aggregates into ssa form directly, which is a no no.
2672
2673 Thus, this routine doesn't create temporaries, it just builds a
2674 single access expression for the array, calling
2675 find_or_generate_expression to build the innermost pieces.
2676
2677 This function is a subroutine of create_expression_by_pieces, and
2678 should not be called on it's own unless you really know what you
2679 are doing. */
2680
2681 static tree
2682 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2683 gimple_seq *stmts)
2684 {
2685 unsigned int op = 0;
2686 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2687 }
2688
2689 /* Find a simple leader for an expression, or generate one using
2690 create_expression_by_pieces from a NARY expression for the value.
2691 BLOCK is the basic_block we are looking for leaders in.
2692 OP is the tree expression to find a leader for or generate.
2693 Returns the leader or NULL_TREE on failure. */
2694
2695 static tree
2696 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2697 {
2698 pre_expr expr = get_or_alloc_expr_for (op);
2699 unsigned int lookfor = get_expr_value_id (expr);
2700 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2701 if (leader)
2702 {
2703 if (leader->kind == NAME)
2704 return PRE_EXPR_NAME (leader);
2705 else if (leader->kind == CONSTANT)
2706 return PRE_EXPR_CONSTANT (leader);
2707
2708 /* Defer. */
2709 return NULL_TREE;
2710 }
2711
2712 /* It must be a complex expression, so generate it recursively. Note
2713 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2714 where the insert algorithm fails to insert a required expression. */
2715 bitmap exprset = value_expressions[lookfor];
2716 bitmap_iterator bi;
2717 unsigned int i;
2718 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2719 {
2720 pre_expr temp = expression_for_id (i);
2721 /* We cannot insert random REFERENCE expressions at arbitrary
2722 places. We can insert NARYs which eventually re-materializes
2723 its operand values. */
2724 if (temp->kind == NARY)
2725 return create_expression_by_pieces (block, temp, stmts,
2726 get_expr_type (expr));
2727 }
2728
2729 /* Defer. */
2730 return NULL_TREE;
2731 }
2732
2733 #define NECESSARY GF_PLF_1
2734
2735 /* Create an expression in pieces, so that we can handle very complex
2736 expressions that may be ANTIC, but not necessary GIMPLE.
2737 BLOCK is the basic block the expression will be inserted into,
2738 EXPR is the expression to insert (in value form)
2739 STMTS is a statement list to append the necessary insertions into.
2740
2741 This function will die if we hit some value that shouldn't be
2742 ANTIC but is (IE there is no leader for it, or its components).
2743 The function returns NULL_TREE in case a different antic expression
2744 has to be inserted first.
2745 This function may also generate expressions that are themselves
2746 partially or fully redundant. Those that are will be either made
2747 fully redundant during the next iteration of insert (for partially
2748 redundant ones), or eliminated by eliminate (for fully redundant
2749 ones). */
2750
2751 static tree
2752 create_expression_by_pieces (basic_block block, pre_expr expr,
2753 gimple_seq *stmts, tree type)
2754 {
2755 tree name;
2756 tree folded;
2757 gimple_seq forced_stmts = NULL;
2758 unsigned int value_id;
2759 gimple_stmt_iterator gsi;
2760 tree exprtype = type ? type : get_expr_type (expr);
2761 pre_expr nameexpr;
2762 gassign *newstmt;
2763
2764 switch (expr->kind)
2765 {
2766 /* We may hit the NAME/CONSTANT case if we have to convert types
2767 that value numbering saw through. */
2768 case NAME:
2769 folded = PRE_EXPR_NAME (expr);
2770 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2771 return folded;
2772 break;
2773 case CONSTANT:
2774 {
2775 folded = PRE_EXPR_CONSTANT (expr);
2776 tree tem = fold_convert (exprtype, folded);
2777 if (is_gimple_min_invariant (tem))
2778 return tem;
2779 break;
2780 }
2781 case REFERENCE:
2782 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2783 {
2784 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2785 unsigned int operand = 1;
2786 vn_reference_op_t currop = &ref->operands[0];
2787 tree sc = NULL_TREE;
2788 tree fn;
2789 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2790 fn = currop->op0;
2791 else
2792 fn = find_or_generate_expression (block, currop->op0, stmts);
2793 if (!fn)
2794 return NULL_TREE;
2795 if (currop->op1)
2796 {
2797 sc = find_or_generate_expression (block, currop->op1, stmts);
2798 if (!sc)
2799 return NULL_TREE;
2800 }
2801 auto_vec<tree> args (ref->operands.length () - 1);
2802 while (operand < ref->operands.length ())
2803 {
2804 tree arg = create_component_ref_by_pieces_1 (block, ref,
2805 &operand, stmts);
2806 if (!arg)
2807 return NULL_TREE;
2808 args.quick_push (arg);
2809 }
2810 gcall *call
2811 = gimple_build_call_vec ((TREE_CODE (fn) == FUNCTION_DECL
2812 ? build_fold_addr_expr (fn) : fn), args);
2813 gimple_call_set_with_bounds (call, currop->with_bounds);
2814 if (sc)
2815 gimple_call_set_chain (call, sc);
2816 tree forcedname = make_ssa_name (currop->type);
2817 gimple_call_set_lhs (call, forcedname);
2818 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2819 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2820 folded = forcedname;
2821 }
2822 else
2823 {
2824 folded = create_component_ref_by_pieces (block,
2825 PRE_EXPR_REFERENCE (expr),
2826 stmts);
2827 if (!folded)
2828 return NULL_TREE;
2829 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2830 newstmt = gimple_build_assign (name, folded);
2831 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2832 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2833 folded = name;
2834 }
2835 break;
2836 case NARY:
2837 {
2838 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2839 tree *genop = XALLOCAVEC (tree, nary->length);
2840 unsigned i;
2841 for (i = 0; i < nary->length; ++i)
2842 {
2843 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2844 if (!genop[i])
2845 return NULL_TREE;
2846 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2847 may have conversions stripped. */
2848 if (nary->opcode == POINTER_PLUS_EXPR)
2849 {
2850 if (i == 0)
2851 genop[i] = gimple_convert (&forced_stmts,
2852 nary->type, genop[i]);
2853 else if (i == 1)
2854 genop[i] = gimple_convert (&forced_stmts,
2855 sizetype, genop[i]);
2856 }
2857 else
2858 genop[i] = gimple_convert (&forced_stmts,
2859 TREE_TYPE (nary->op[i]), genop[i]);
2860 }
2861 if (nary->opcode == CONSTRUCTOR)
2862 {
2863 vec<constructor_elt, va_gc> *elts = NULL;
2864 for (i = 0; i < nary->length; ++i)
2865 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2866 folded = build_constructor (nary->type, elts);
2867 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2868 newstmt = gimple_build_assign (name, folded);
2869 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2870 folded = name;
2871 }
2872 else
2873 {
2874 switch (nary->length)
2875 {
2876 case 1:
2877 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2878 genop[0]);
2879 break;
2880 case 2:
2881 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2882 genop[0], genop[1]);
2883 break;
2884 case 3:
2885 folded = gimple_build (&forced_stmts, nary->opcode, nary->type,
2886 genop[0], genop[1], genop[2]);
2887 break;
2888 default:
2889 gcc_unreachable ();
2890 }
2891 }
2892 }
2893 break;
2894 default:
2895 gcc_unreachable ();
2896 }
2897
2898 folded = gimple_convert (&forced_stmts, exprtype, folded);
2899
2900 /* If there is nothing to insert, return the simplified result. */
2901 if (gimple_seq_empty_p (forced_stmts))
2902 return folded;
2903 /* If we simplified to a constant return it and discard eventually
2904 built stmts. */
2905 if (is_gimple_min_invariant (folded))
2906 {
2907 gimple_seq_discard (forced_stmts);
2908 return folded;
2909 }
2910
2911 gcc_assert (TREE_CODE (folded) == SSA_NAME);
2912
2913 /* If we have any intermediate expressions to the value sets, add them
2914 to the value sets and chain them in the instruction stream. */
2915 if (forced_stmts)
2916 {
2917 gsi = gsi_start (forced_stmts);
2918 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2919 {
2920 gimple *stmt = gsi_stmt (gsi);
2921 tree forcedname = gimple_get_lhs (stmt);
2922 pre_expr nameexpr;
2923
2924 if (forcedname != folded)
2925 {
2926 VN_INFO_GET (forcedname)->valnum = forcedname;
2927 VN_INFO (forcedname)->value_id = get_next_value_id ();
2928 nameexpr = get_or_alloc_expr_for_name (forcedname);
2929 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2930 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2931 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2932 }
2933
2934 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2935 gimple_set_plf (stmt, NECESSARY, false);
2936 }
2937 gimple_seq_add_seq (stmts, forced_stmts);
2938 }
2939
2940 name = folded;
2941
2942 /* Fold the last statement. */
2943 gsi = gsi_last (*stmts);
2944 if (fold_stmt_inplace (&gsi))
2945 update_stmt (gsi_stmt (gsi));
2946
2947 /* Add a value number to the temporary.
2948 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2949 we are creating the expression by pieces, and this particular piece of
2950 the expression may have been represented. There is no harm in replacing
2951 here. */
2952 value_id = get_expr_value_id (expr);
2953 VN_INFO_GET (name)->value_id = value_id;
2954 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2955 if (VN_INFO (name)->valnum == NULL_TREE)
2956 VN_INFO (name)->valnum = name;
2957 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2958 nameexpr = get_or_alloc_expr_for_name (name);
2959 add_to_value (value_id, nameexpr);
2960 if (NEW_SETS (block))
2961 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2962 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2963
2964 pre_stats.insertions++;
2965 if (dump_file && (dump_flags & TDF_DETAILS))
2966 {
2967 fprintf (dump_file, "Inserted ");
2968 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0, 0);
2969 fprintf (dump_file, " in predecessor %d (%04d)\n",
2970 block->index, value_id);
2971 }
2972
2973 return name;
2974 }
2975
2976
2977 /* Insert the to-be-made-available values of expression EXPRNUM for each
2978 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2979 merge the result with a phi node, given the same value number as
2980 NODE. Return true if we have inserted new stuff. */
2981
2982 static bool
2983 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2984 vec<pre_expr> avail)
2985 {
2986 pre_expr expr = expression_for_id (exprnum);
2987 pre_expr newphi;
2988 unsigned int val = get_expr_value_id (expr);
2989 edge pred;
2990 bool insertions = false;
2991 bool nophi = false;
2992 basic_block bprime;
2993 pre_expr eprime;
2994 edge_iterator ei;
2995 tree type = get_expr_type (expr);
2996 tree temp;
2997 gphi *phi;
2998
2999 /* Make sure we aren't creating an induction variable. */
3000 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
3001 {
3002 bool firstinsideloop = false;
3003 bool secondinsideloop = false;
3004 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3005 EDGE_PRED (block, 0)->src);
3006 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3007 EDGE_PRED (block, 1)->src);
3008 /* Induction variables only have one edge inside the loop. */
3009 if ((firstinsideloop ^ secondinsideloop)
3010 && expr->kind != REFERENCE)
3011 {
3012 if (dump_file && (dump_flags & TDF_DETAILS))
3013 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3014 nophi = true;
3015 }
3016 }
3017
3018 /* Make the necessary insertions. */
3019 FOR_EACH_EDGE (pred, ei, block->preds)
3020 {
3021 gimple_seq stmts = NULL;
3022 tree builtexpr;
3023 bprime = pred->src;
3024 eprime = avail[pred->dest_idx];
3025 builtexpr = create_expression_by_pieces (bprime, eprime,
3026 &stmts, type);
3027 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3028 if (!gimple_seq_empty_p (stmts))
3029 {
3030 gsi_insert_seq_on_edge (pred, stmts);
3031 insertions = true;
3032 }
3033 if (!builtexpr)
3034 {
3035 /* We cannot insert a PHI node if we failed to insert
3036 on one edge. */
3037 nophi = true;
3038 continue;
3039 }
3040 if (is_gimple_min_invariant (builtexpr))
3041 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3042 else
3043 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3044 }
3045 /* If we didn't want a phi node, and we made insertions, we still have
3046 inserted new stuff, and thus return true. If we didn't want a phi node,
3047 and didn't make insertions, we haven't added anything new, so return
3048 false. */
3049 if (nophi && insertions)
3050 return true;
3051 else if (nophi && !insertions)
3052 return false;
3053
3054 /* Now build a phi for the new variable. */
3055 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3056 phi = create_phi_node (temp, block);
3057
3058 gimple_set_plf (phi, NECESSARY, false);
3059 VN_INFO_GET (temp)->value_id = val;
3060 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3061 if (VN_INFO (temp)->valnum == NULL_TREE)
3062 VN_INFO (temp)->valnum = temp;
3063 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3064 FOR_EACH_EDGE (pred, ei, block->preds)
3065 {
3066 pre_expr ae = avail[pred->dest_idx];
3067 gcc_assert (get_expr_type (ae) == type
3068 || useless_type_conversion_p (type, get_expr_type (ae)));
3069 if (ae->kind == CONSTANT)
3070 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3071 pred, UNKNOWN_LOCATION);
3072 else
3073 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3074 }
3075
3076 newphi = get_or_alloc_expr_for_name (temp);
3077 add_to_value (val, newphi);
3078
3079 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3080 this insertion, since we test for the existence of this value in PHI_GEN
3081 before proceeding with the partial redundancy checks in insert_aux.
3082
3083 The value may exist in AVAIL_OUT, in particular, it could be represented
3084 by the expression we are trying to eliminate, in which case we want the
3085 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3086 inserted there.
3087
3088 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3089 this block, because if it did, it would have existed in our dominator's
3090 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3091 */
3092
3093 bitmap_insert_into_set (PHI_GEN (block), newphi);
3094 bitmap_value_replace_in_set (AVAIL_OUT (block),
3095 newphi);
3096 bitmap_insert_into_set (NEW_SETS (block),
3097 newphi);
3098
3099 /* If we insert a PHI node for a conversion of another PHI node
3100 in the same basic-block try to preserve range information.
3101 This is important so that followup loop passes receive optimal
3102 number of iteration analysis results. See PR61743. */
3103 if (expr->kind == NARY
3104 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3105 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3106 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3107 && INTEGRAL_TYPE_P (type)
3108 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3109 && (TYPE_PRECISION (type)
3110 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3111 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3112 {
3113 wide_int min, max;
3114 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3115 && !wi::neg_p (min, SIGNED)
3116 && !wi::neg_p (max, SIGNED))
3117 /* Just handle extension and sign-changes of all-positive ranges. */
3118 set_range_info (temp,
3119 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3120 wide_int_storage::from (min, TYPE_PRECISION (type),
3121 TYPE_SIGN (type)),
3122 wide_int_storage::from (max, TYPE_PRECISION (type),
3123 TYPE_SIGN (type)));
3124 }
3125
3126 if (dump_file && (dump_flags & TDF_DETAILS))
3127 {
3128 fprintf (dump_file, "Created phi ");
3129 print_gimple_stmt (dump_file, phi, 0, 0);
3130 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3131 }
3132 pre_stats.phis++;
3133 return true;
3134 }
3135
3136
3137
3138 /* Perform insertion of partially redundant values.
3139 For BLOCK, do the following:
3140 1. Propagate the NEW_SETS of the dominator into the current block.
3141 If the block has multiple predecessors,
3142 2a. Iterate over the ANTIC expressions for the block to see if
3143 any of them are partially redundant.
3144 2b. If so, insert them into the necessary predecessors to make
3145 the expression fully redundant.
3146 2c. Insert a new PHI merging the values of the predecessors.
3147 2d. Insert the new PHI, and the new expressions, into the
3148 NEW_SETS set.
3149 3. Recursively call ourselves on the dominator children of BLOCK.
3150
3151 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3152 do_regular_insertion and do_partial_insertion.
3153
3154 */
3155
3156 static bool
3157 do_regular_insertion (basic_block block, basic_block dom)
3158 {
3159 bool new_stuff = false;
3160 vec<pre_expr> exprs;
3161 pre_expr expr;
3162 auto_vec<pre_expr> avail;
3163 int i;
3164
3165 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3166 avail.safe_grow (EDGE_COUNT (block->preds));
3167
3168 FOR_EACH_VEC_ELT (exprs, i, expr)
3169 {
3170 if (expr->kind == NARY
3171 || expr->kind == REFERENCE)
3172 {
3173 unsigned int val;
3174 bool by_some = false;
3175 bool cant_insert = false;
3176 bool all_same = true;
3177 pre_expr first_s = NULL;
3178 edge pred;
3179 basic_block bprime;
3180 pre_expr eprime = NULL;
3181 edge_iterator ei;
3182 pre_expr edoubleprime = NULL;
3183 bool do_insertion = false;
3184
3185 val = get_expr_value_id (expr);
3186 if (bitmap_set_contains_value (PHI_GEN (block), val))
3187 continue;
3188 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3189 {
3190 if (dump_file && (dump_flags & TDF_DETAILS))
3191 {
3192 fprintf (dump_file, "Found fully redundant value: ");
3193 print_pre_expr (dump_file, expr);
3194 fprintf (dump_file, "\n");
3195 }
3196 continue;
3197 }
3198
3199 FOR_EACH_EDGE (pred, ei, block->preds)
3200 {
3201 unsigned int vprime;
3202
3203 /* We should never run insertion for the exit block
3204 and so not come across fake pred edges. */
3205 gcc_assert (!(pred->flags & EDGE_FAKE));
3206 bprime = pred->src;
3207 /* We are looking at ANTIC_OUT of bprime. */
3208 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3209 bprime, block);
3210
3211 /* eprime will generally only be NULL if the
3212 value of the expression, translated
3213 through the PHI for this predecessor, is
3214 undefined. If that is the case, we can't
3215 make the expression fully redundant,
3216 because its value is undefined along a
3217 predecessor path. We can thus break out
3218 early because it doesn't matter what the
3219 rest of the results are. */
3220 if (eprime == NULL)
3221 {
3222 avail[pred->dest_idx] = NULL;
3223 cant_insert = true;
3224 break;
3225 }
3226
3227 eprime = fully_constant_expression (eprime);
3228 vprime = get_expr_value_id (eprime);
3229 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3230 vprime);
3231 if (edoubleprime == NULL)
3232 {
3233 avail[pred->dest_idx] = eprime;
3234 all_same = false;
3235 }
3236 else
3237 {
3238 avail[pred->dest_idx] = edoubleprime;
3239 by_some = true;
3240 /* We want to perform insertions to remove a redundancy on
3241 a path in the CFG we want to optimize for speed. */
3242 if (optimize_edge_for_speed_p (pred))
3243 do_insertion = true;
3244 if (first_s == NULL)
3245 first_s = edoubleprime;
3246 else if (!pre_expr_d::equal (first_s, edoubleprime))
3247 all_same = false;
3248 }
3249 }
3250 /* If we can insert it, it's not the same value
3251 already existing along every predecessor, and
3252 it's defined by some predecessor, it is
3253 partially redundant. */
3254 if (!cant_insert && !all_same && by_some)
3255 {
3256 if (!do_insertion)
3257 {
3258 if (dump_file && (dump_flags & TDF_DETAILS))
3259 {
3260 fprintf (dump_file, "Skipping partial redundancy for "
3261 "expression ");
3262 print_pre_expr (dump_file, expr);
3263 fprintf (dump_file, " (%04d), no redundancy on to be "
3264 "optimized for speed edge\n", val);
3265 }
3266 }
3267 else if (dbg_cnt (treepre_insert))
3268 {
3269 if (dump_file && (dump_flags & TDF_DETAILS))
3270 {
3271 fprintf (dump_file, "Found partial redundancy for "
3272 "expression ");
3273 print_pre_expr (dump_file, expr);
3274 fprintf (dump_file, " (%04d)\n",
3275 get_expr_value_id (expr));
3276 }
3277 if (insert_into_preds_of_block (block,
3278 get_expression_id (expr),
3279 avail))
3280 new_stuff = true;
3281 }
3282 }
3283 /* If all edges produce the same value and that value is
3284 an invariant, then the PHI has the same value on all
3285 edges. Note this. */
3286 else if (!cant_insert && all_same)
3287 {
3288 gcc_assert (edoubleprime->kind == CONSTANT
3289 || edoubleprime->kind == NAME);
3290
3291 tree temp = make_temp_ssa_name (get_expr_type (expr),
3292 NULL, "pretmp");
3293 gassign *assign
3294 = gimple_build_assign (temp,
3295 edoubleprime->kind == CONSTANT ?
3296 PRE_EXPR_CONSTANT (edoubleprime) :
3297 PRE_EXPR_NAME (edoubleprime));
3298 gimple_stmt_iterator gsi = gsi_after_labels (block);
3299 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3300
3301 gimple_set_plf (assign, NECESSARY, false);
3302 VN_INFO_GET (temp)->value_id = val;
3303 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3304 if (VN_INFO (temp)->valnum == NULL_TREE)
3305 VN_INFO (temp)->valnum = temp;
3306 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3307 pre_expr newe = get_or_alloc_expr_for_name (temp);
3308 add_to_value (val, newe);
3309 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3310 bitmap_insert_into_set (NEW_SETS (block), newe);
3311 }
3312 }
3313 }
3314
3315 exprs.release ();
3316 return new_stuff;
3317 }
3318
3319
3320 /* Perform insertion for partially anticipatable expressions. There
3321 is only one case we will perform insertion for these. This case is
3322 if the expression is partially anticipatable, and fully available.
3323 In this case, we know that putting it earlier will enable us to
3324 remove the later computation. */
3325
3326
3327 static bool
3328 do_partial_partial_insertion (basic_block block, basic_block dom)
3329 {
3330 bool new_stuff = false;
3331 vec<pre_expr> exprs;
3332 pre_expr expr;
3333 auto_vec<pre_expr> avail;
3334 int i;
3335
3336 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3337 avail.safe_grow (EDGE_COUNT (block->preds));
3338
3339 FOR_EACH_VEC_ELT (exprs, i, expr)
3340 {
3341 if (expr->kind == NARY
3342 || expr->kind == REFERENCE)
3343 {
3344 unsigned int val;
3345 bool by_all = true;
3346 bool cant_insert = false;
3347 edge pred;
3348 basic_block bprime;
3349 pre_expr eprime = NULL;
3350 edge_iterator ei;
3351
3352 val = get_expr_value_id (expr);
3353 if (bitmap_set_contains_value (PHI_GEN (block), val))
3354 continue;
3355 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3356 continue;
3357
3358 FOR_EACH_EDGE (pred, ei, block->preds)
3359 {
3360 unsigned int vprime;
3361 pre_expr edoubleprime;
3362
3363 /* We should never run insertion for the exit block
3364 and so not come across fake pred edges. */
3365 gcc_assert (!(pred->flags & EDGE_FAKE));
3366 bprime = pred->src;
3367 eprime = phi_translate (expr, ANTIC_IN (block),
3368 PA_IN (block),
3369 bprime, block);
3370
3371 /* eprime will generally only be NULL if the
3372 value of the expression, translated
3373 through the PHI for this predecessor, is
3374 undefined. If that is the case, we can't
3375 make the expression fully redundant,
3376 because its value is undefined along a
3377 predecessor path. We can thus break out
3378 early because it doesn't matter what the
3379 rest of the results are. */
3380 if (eprime == NULL)
3381 {
3382 avail[pred->dest_idx] = NULL;
3383 cant_insert = true;
3384 break;
3385 }
3386
3387 eprime = fully_constant_expression (eprime);
3388 vprime = get_expr_value_id (eprime);
3389 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3390 avail[pred->dest_idx] = edoubleprime;
3391 if (edoubleprime == NULL)
3392 {
3393 by_all = false;
3394 break;
3395 }
3396 }
3397
3398 /* If we can insert it, it's not the same value
3399 already existing along every predecessor, and
3400 it's defined by some predecessor, it is
3401 partially redundant. */
3402 if (!cant_insert && by_all)
3403 {
3404 edge succ;
3405 bool do_insertion = false;
3406
3407 /* Insert only if we can remove a later expression on a path
3408 that we want to optimize for speed.
3409 The phi node that we will be inserting in BLOCK is not free,
3410 and inserting it for the sake of !optimize_for_speed successor
3411 may cause regressions on the speed path. */
3412 FOR_EACH_EDGE (succ, ei, block->succs)
3413 {
3414 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3415 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3416 {
3417 if (optimize_edge_for_speed_p (succ))
3418 do_insertion = true;
3419 }
3420 }
3421
3422 if (!do_insertion)
3423 {
3424 if (dump_file && (dump_flags & TDF_DETAILS))
3425 {
3426 fprintf (dump_file, "Skipping partial partial redundancy "
3427 "for expression ");
3428 print_pre_expr (dump_file, expr);
3429 fprintf (dump_file, " (%04d), not (partially) anticipated "
3430 "on any to be optimized for speed edges\n", val);
3431 }
3432 }
3433 else if (dbg_cnt (treepre_insert))
3434 {
3435 pre_stats.pa_insert++;
3436 if (dump_file && (dump_flags & TDF_DETAILS))
3437 {
3438 fprintf (dump_file, "Found partial partial redundancy "
3439 "for expression ");
3440 print_pre_expr (dump_file, expr);
3441 fprintf (dump_file, " (%04d)\n",
3442 get_expr_value_id (expr));
3443 }
3444 if (insert_into_preds_of_block (block,
3445 get_expression_id (expr),
3446 avail))
3447 new_stuff = true;
3448 }
3449 }
3450 }
3451 }
3452
3453 exprs.release ();
3454 return new_stuff;
3455 }
3456
3457 static bool
3458 insert_aux (basic_block block)
3459 {
3460 basic_block son;
3461 bool new_stuff = false;
3462
3463 if (block)
3464 {
3465 basic_block dom;
3466 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3467 if (dom)
3468 {
3469 unsigned i;
3470 bitmap_iterator bi;
3471 bitmap_set_t newset = NEW_SETS (dom);
3472 if (newset)
3473 {
3474 /* Note that we need to value_replace both NEW_SETS, and
3475 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3476 represented by some non-simple expression here that we want
3477 to replace it with. */
3478 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3479 {
3480 pre_expr expr = expression_for_id (i);
3481 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3482 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3483 }
3484 }
3485 if (!single_pred_p (block))
3486 {
3487 new_stuff |= do_regular_insertion (block, dom);
3488 if (do_partial_partial)
3489 new_stuff |= do_partial_partial_insertion (block, dom);
3490 }
3491 }
3492 }
3493 for (son = first_dom_son (CDI_DOMINATORS, block);
3494 son;
3495 son = next_dom_son (CDI_DOMINATORS, son))
3496 {
3497 new_stuff |= insert_aux (son);
3498 }
3499
3500 return new_stuff;
3501 }
3502
3503 /* Perform insertion of partially redundant values. */
3504
3505 static void
3506 insert (void)
3507 {
3508 bool new_stuff = true;
3509 basic_block bb;
3510 int num_iterations = 0;
3511
3512 FOR_ALL_BB_FN (bb, cfun)
3513 NEW_SETS (bb) = bitmap_set_new ();
3514
3515 while (new_stuff)
3516 {
3517 num_iterations++;
3518 if (dump_file && dump_flags & TDF_DETAILS)
3519 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3520 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3521
3522 /* Clear the NEW sets before the next iteration. We have already
3523 fully propagated its contents. */
3524 if (new_stuff)
3525 FOR_ALL_BB_FN (bb, cfun)
3526 bitmap_set_free (NEW_SETS (bb));
3527 }
3528 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3529 }
3530
3531
3532 /* Compute the AVAIL set for all basic blocks.
3533
3534 This function performs value numbering of the statements in each basic
3535 block. The AVAIL sets are built from information we glean while doing
3536 this value numbering, since the AVAIL sets contain only one entry per
3537 value.
3538
3539 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3540 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3541
3542 static void
3543 compute_avail (void)
3544 {
3545
3546 basic_block block, son;
3547 basic_block *worklist;
3548 size_t sp = 0;
3549 unsigned i;
3550
3551 /* We pretend that default definitions are defined in the entry block.
3552 This includes function arguments and the static chain decl. */
3553 for (i = 1; i < num_ssa_names; ++i)
3554 {
3555 tree name = ssa_name (i);
3556 pre_expr e;
3557 if (!name
3558 || !SSA_NAME_IS_DEFAULT_DEF (name)
3559 || has_zero_uses (name)
3560 || virtual_operand_p (name))
3561 continue;
3562
3563 e = get_or_alloc_expr_for_name (name);
3564 add_to_value (get_expr_value_id (e), e);
3565 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3566 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3567 e);
3568 }
3569
3570 if (dump_file && (dump_flags & TDF_DETAILS))
3571 {
3572 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3573 "tmp_gen", ENTRY_BLOCK);
3574 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3575 "avail_out", ENTRY_BLOCK);
3576 }
3577
3578 /* Allocate the worklist. */
3579 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3580
3581 /* Seed the algorithm by putting the dominator children of the entry
3582 block on the worklist. */
3583 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3584 son;
3585 son = next_dom_son (CDI_DOMINATORS, son))
3586 worklist[sp++] = son;
3587
3588 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3589 = ssa_default_def (cfun, gimple_vop (cfun));
3590
3591 /* Loop until the worklist is empty. */
3592 while (sp)
3593 {
3594 gimple *stmt;
3595 basic_block dom;
3596
3597 /* Pick a block from the worklist. */
3598 block = worklist[--sp];
3599
3600 /* Initially, the set of available values in BLOCK is that of
3601 its immediate dominator. */
3602 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3603 if (dom)
3604 {
3605 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3606 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3607 }
3608
3609 /* Generate values for PHI nodes. */
3610 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3611 gsi_next (&gsi))
3612 {
3613 tree result = gimple_phi_result (gsi.phi ());
3614
3615 /* We have no need for virtual phis, as they don't represent
3616 actual computations. */
3617 if (virtual_operand_p (result))
3618 {
3619 BB_LIVE_VOP_ON_EXIT (block) = result;
3620 continue;
3621 }
3622
3623 pre_expr e = get_or_alloc_expr_for_name (result);
3624 add_to_value (get_expr_value_id (e), e);
3625 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3626 bitmap_insert_into_set (PHI_GEN (block), e);
3627 }
3628
3629 BB_MAY_NOTRETURN (block) = 0;
3630
3631 /* Now compute value numbers and populate value sets with all
3632 the expressions computed in BLOCK. */
3633 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3634 gsi_next (&gsi))
3635 {
3636 ssa_op_iter iter;
3637 tree op;
3638
3639 stmt = gsi_stmt (gsi);
3640
3641 /* Cache whether the basic-block has any non-visible side-effect
3642 or control flow.
3643 If this isn't a call or it is the last stmt in the
3644 basic-block then the CFG represents things correctly. */
3645 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3646 {
3647 /* Non-looping const functions always return normally.
3648 Otherwise the call might not return or have side-effects
3649 that forbids hoisting possibly trapping expressions
3650 before it. */
3651 int flags = gimple_call_flags (stmt);
3652 if (!(flags & ECF_CONST)
3653 || (flags & ECF_LOOPING_CONST_OR_PURE))
3654 BB_MAY_NOTRETURN (block) = 1;
3655 }
3656
3657 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3658 {
3659 pre_expr e = get_or_alloc_expr_for_name (op);
3660
3661 add_to_value (get_expr_value_id (e), e);
3662 bitmap_insert_into_set (TMP_GEN (block), e);
3663 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3664 }
3665
3666 if (gimple_vdef (stmt))
3667 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3668
3669 if (gimple_has_side_effects (stmt)
3670 || stmt_could_throw_p (stmt)
3671 || is_gimple_debug (stmt))
3672 continue;
3673
3674 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3675 {
3676 if (ssa_undefined_value_p (op))
3677 continue;
3678 pre_expr e = get_or_alloc_expr_for_name (op);
3679 bitmap_value_insert_into_set (EXP_GEN (block), e);
3680 }
3681
3682 switch (gimple_code (stmt))
3683 {
3684 case GIMPLE_RETURN:
3685 continue;
3686
3687 case GIMPLE_CALL:
3688 {
3689 vn_reference_t ref;
3690 vn_reference_s ref1;
3691 pre_expr result = NULL;
3692
3693 /* We can value number only calls to real functions. */
3694 if (gimple_call_internal_p (stmt))
3695 continue;
3696
3697 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3698 if (!ref)
3699 continue;
3700
3701 /* If the value of the call is not invalidated in
3702 this block until it is computed, add the expression
3703 to EXP_GEN. */
3704 if (!gimple_vuse (stmt)
3705 || gimple_code
3706 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3707 || gimple_bb (SSA_NAME_DEF_STMT
3708 (gimple_vuse (stmt))) != block)
3709 {
3710 result = pre_expr_pool.allocate ();
3711 result->kind = REFERENCE;
3712 result->id = 0;
3713 PRE_EXPR_REFERENCE (result) = ref;
3714
3715 get_or_alloc_expression_id (result);
3716 add_to_value (get_expr_value_id (result), result);
3717 bitmap_value_insert_into_set (EXP_GEN (block), result);
3718 }
3719 continue;
3720 }
3721
3722 case GIMPLE_ASSIGN:
3723 {
3724 pre_expr result = NULL;
3725 switch (vn_get_stmt_kind (stmt))
3726 {
3727 case VN_NARY:
3728 {
3729 enum tree_code code = gimple_assign_rhs_code (stmt);
3730 vn_nary_op_t nary;
3731
3732 /* COND_EXPR and VEC_COND_EXPR are awkward in
3733 that they contain an embedded complex expression.
3734 Don't even try to shove those through PRE. */
3735 if (code == COND_EXPR
3736 || code == VEC_COND_EXPR)
3737 continue;
3738
3739 vn_nary_op_lookup_stmt (stmt, &nary);
3740 if (!nary)
3741 continue;
3742
3743 /* If the NARY traps and there was a preceding
3744 point in the block that might not return avoid
3745 adding the nary to EXP_GEN. */
3746 if (BB_MAY_NOTRETURN (block)
3747 && vn_nary_may_trap (nary))
3748 continue;
3749
3750 result = pre_expr_pool.allocate ();
3751 result->kind = NARY;
3752 result->id = 0;
3753 PRE_EXPR_NARY (result) = nary;
3754 break;
3755 }
3756
3757 case VN_REFERENCE:
3758 {
3759 vn_reference_t ref;
3760 vn_reference_lookup (gimple_assign_rhs1 (stmt),
3761 gimple_vuse (stmt),
3762 VN_WALK, &ref);
3763 if (!ref)
3764 continue;
3765
3766 /* If the value of the reference is not invalidated in
3767 this block until it is computed, add the expression
3768 to EXP_GEN. */
3769 if (gimple_vuse (stmt))
3770 {
3771 gimple *def_stmt;
3772 bool ok = true;
3773 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3774 while (!gimple_nop_p (def_stmt)
3775 && gimple_code (def_stmt) != GIMPLE_PHI
3776 && gimple_bb (def_stmt) == block)
3777 {
3778 if (stmt_may_clobber_ref_p
3779 (def_stmt, gimple_assign_rhs1 (stmt)))
3780 {
3781 ok = false;
3782 break;
3783 }
3784 def_stmt
3785 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3786 }
3787 if (!ok)
3788 continue;
3789 }
3790
3791 result = pre_expr_pool.allocate ();
3792 result->kind = REFERENCE;
3793 result->id = 0;
3794 PRE_EXPR_REFERENCE (result) = ref;
3795 break;
3796 }
3797
3798 default:
3799 continue;
3800 }
3801
3802 get_or_alloc_expression_id (result);
3803 add_to_value (get_expr_value_id (result), result);
3804 bitmap_value_insert_into_set (EXP_GEN (block), result);
3805 continue;
3806 }
3807 default:
3808 break;
3809 }
3810 }
3811
3812 if (dump_file && (dump_flags & TDF_DETAILS))
3813 {
3814 print_bitmap_set (dump_file, EXP_GEN (block),
3815 "exp_gen", block->index);
3816 print_bitmap_set (dump_file, PHI_GEN (block),
3817 "phi_gen", block->index);
3818 print_bitmap_set (dump_file, TMP_GEN (block),
3819 "tmp_gen", block->index);
3820 print_bitmap_set (dump_file, AVAIL_OUT (block),
3821 "avail_out", block->index);
3822 }
3823
3824 /* Put the dominator children of BLOCK on the worklist of blocks
3825 to compute available sets for. */
3826 for (son = first_dom_son (CDI_DOMINATORS, block);
3827 son;
3828 son = next_dom_son (CDI_DOMINATORS, son))
3829 worklist[sp++] = son;
3830 }
3831
3832 free (worklist);
3833 }
3834
3835
3836 /* Local state for the eliminate domwalk. */
3837 static vec<gimple *> el_to_remove;
3838 static vec<gimple *> el_to_fixup;
3839 static unsigned int el_todo;
3840 static vec<tree> el_avail;
3841 static vec<tree> el_avail_stack;
3842
3843 /* Return a leader for OP that is available at the current point of the
3844 eliminate domwalk. */
3845
3846 static tree
3847 eliminate_avail (tree op)
3848 {
3849 tree valnum = VN_INFO (op)->valnum;
3850 if (TREE_CODE (valnum) == SSA_NAME)
3851 {
3852 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
3853 return valnum;
3854 if (el_avail.length () > SSA_NAME_VERSION (valnum))
3855 return el_avail[SSA_NAME_VERSION (valnum)];
3856 }
3857 else if (is_gimple_min_invariant (valnum))
3858 return valnum;
3859 return NULL_TREE;
3860 }
3861
3862 /* At the current point of the eliminate domwalk make OP available. */
3863
3864 static void
3865 eliminate_push_avail (tree op)
3866 {
3867 tree valnum = VN_INFO (op)->valnum;
3868 if (TREE_CODE (valnum) == SSA_NAME)
3869 {
3870 if (el_avail.length () <= SSA_NAME_VERSION (valnum))
3871 el_avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
3872 tree pushop = op;
3873 if (el_avail[SSA_NAME_VERSION (valnum)])
3874 pushop = el_avail[SSA_NAME_VERSION (valnum)];
3875 el_avail_stack.safe_push (pushop);
3876 el_avail[SSA_NAME_VERSION (valnum)] = op;
3877 }
3878 }
3879
3880 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3881 the leader for the expression if insertion was successful. */
3882
3883 static tree
3884 eliminate_insert (gimple_stmt_iterator *gsi, tree val)
3885 {
3886 gimple *stmt = gimple_seq_first_stmt (VN_INFO (val)->expr);
3887 if (!is_gimple_assign (stmt)
3888 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
3889 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR))
3890 return NULL_TREE;
3891
3892 tree op = gimple_assign_rhs1 (stmt);
3893 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
3894 op = TREE_OPERAND (op, 0);
3895 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
3896 if (!leader)
3897 return NULL_TREE;
3898
3899 gimple_seq stmts = NULL;
3900 tree res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
3901 TREE_TYPE (val), leader);
3902 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3903 VN_INFO_GET (res)->valnum = val;
3904
3905 if (TREE_CODE (leader) == SSA_NAME)
3906 gimple_set_plf (SSA_NAME_DEF_STMT (leader), NECESSARY, true);
3907
3908 pre_stats.insertions++;
3909 if (dump_file && (dump_flags & TDF_DETAILS))
3910 {
3911 fprintf (dump_file, "Inserted ");
3912 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0, 0);
3913 }
3914
3915 return res;
3916 }
3917
3918 class eliminate_dom_walker : public dom_walker
3919 {
3920 public:
3921 eliminate_dom_walker (cdi_direction direction, bool do_pre_)
3922 : dom_walker (direction), do_pre (do_pre_) {}
3923
3924 virtual void before_dom_children (basic_block);
3925 virtual void after_dom_children (basic_block);
3926
3927 bool do_pre;
3928 };
3929
3930 /* Perform elimination for the basic-block B during the domwalk. */
3931
3932 void
3933 eliminate_dom_walker::before_dom_children (basic_block b)
3934 {
3935 /* Mark new bb. */
3936 el_avail_stack.safe_push (NULL_TREE);
3937
3938 /* ??? If we do nothing for unreachable blocks then this will confuse
3939 tailmerging. Eventually we can reduce its reliance on SCCVN now
3940 that we fully copy/constant-propagate (most) things. */
3941
3942 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
3943 {
3944 gphi *phi = gsi.phi ();
3945 tree res = PHI_RESULT (phi);
3946
3947 if (virtual_operand_p (res))
3948 {
3949 gsi_next (&gsi);
3950 continue;
3951 }
3952
3953 tree sprime = eliminate_avail (res);
3954 if (sprime
3955 && sprime != res)
3956 {
3957 if (dump_file && (dump_flags & TDF_DETAILS))
3958 {
3959 fprintf (dump_file, "Replaced redundant PHI node defining ");
3960 print_generic_expr (dump_file, res, 0);
3961 fprintf (dump_file, " with ");
3962 print_generic_expr (dump_file, sprime, 0);
3963 fprintf (dump_file, "\n");
3964 }
3965
3966 /* If we inserted this PHI node ourself, it's not an elimination. */
3967 if (inserted_exprs
3968 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
3969 pre_stats.phis--;
3970 else
3971 pre_stats.eliminations++;
3972
3973 /* If we will propagate into all uses don't bother to do
3974 anything. */
3975 if (may_propagate_copy (res, sprime))
3976 {
3977 /* Mark the PHI for removal. */
3978 el_to_remove.safe_push (phi);
3979 gsi_next (&gsi);
3980 continue;
3981 }
3982
3983 remove_phi_node (&gsi, false);
3984
3985 if (inserted_exprs
3986 && !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
3987 && TREE_CODE (sprime) == SSA_NAME)
3988 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
3989
3990 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
3991 sprime = fold_convert (TREE_TYPE (res), sprime);
3992 gimple *stmt = gimple_build_assign (res, sprime);
3993 /* ??? It cannot yet be necessary (DOM walk). */
3994 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
3995
3996 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
3997 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
3998 continue;
3999 }
4000
4001 eliminate_push_avail (res);
4002 gsi_next (&gsi);
4003 }
4004
4005 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
4006 !gsi_end_p (gsi);
4007 gsi_next (&gsi))
4008 {
4009 tree sprime = NULL_TREE;
4010 gimple *stmt = gsi_stmt (gsi);
4011 tree lhs = gimple_get_lhs (stmt);
4012 if (lhs && TREE_CODE (lhs) == SSA_NAME
4013 && !gimple_has_volatile_ops (stmt)
4014 /* See PR43491. Do not replace a global register variable when
4015 it is a the RHS of an assignment. Do replace local register
4016 variables since gcc does not guarantee a local variable will
4017 be allocated in register.
4018 ??? The fix isn't effective here. This should instead
4019 be ensured by not value-numbering them the same but treating
4020 them like volatiles? */
4021 && !(gimple_assign_single_p (stmt)
4022 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4023 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4024 && is_global_var (gimple_assign_rhs1 (stmt)))))
4025 {
4026 sprime = eliminate_avail (lhs);
4027 if (!sprime)
4028 {
4029 /* If there is no existing usable leader but SCCVN thinks
4030 it has an expression it wants to use as replacement,
4031 insert that. */
4032 tree val = VN_INFO (lhs)->valnum;
4033 if (val != VN_TOP
4034 && TREE_CODE (val) == SSA_NAME
4035 && VN_INFO (val)->needs_insertion
4036 && VN_INFO (val)->expr != NULL
4037 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
4038 eliminate_push_avail (sprime);
4039 }
4040
4041 /* If this now constitutes a copy duplicate points-to
4042 and range info appropriately. This is especially
4043 important for inserted code. See tree-ssa-copy.c
4044 for similar code. */
4045 if (sprime
4046 && TREE_CODE (sprime) == SSA_NAME)
4047 {
4048 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
4049 if (POINTER_TYPE_P (TREE_TYPE (lhs))
4050 && SSA_NAME_PTR_INFO (lhs)
4051 && !SSA_NAME_PTR_INFO (sprime))
4052 {
4053 duplicate_ssa_name_ptr_info (sprime,
4054 SSA_NAME_PTR_INFO (lhs));
4055 if (b != sprime_b)
4056 mark_ptr_info_alignment_unknown
4057 (SSA_NAME_PTR_INFO (sprime));
4058 }
4059 else if (!POINTER_TYPE_P (TREE_TYPE (lhs))
4060 && SSA_NAME_RANGE_INFO (lhs)
4061 && !SSA_NAME_RANGE_INFO (sprime)
4062 && b == sprime_b)
4063 duplicate_ssa_name_range_info (sprime,
4064 SSA_NAME_RANGE_TYPE (lhs),
4065 SSA_NAME_RANGE_INFO (lhs));
4066 }
4067
4068 /* Inhibit the use of an inserted PHI on a loop header when
4069 the address of the memory reference is a simple induction
4070 variable. In other cases the vectorizer won't do anything
4071 anyway (either it's loop invariant or a complicated
4072 expression). */
4073 if (sprime
4074 && TREE_CODE (sprime) == SSA_NAME
4075 && do_pre
4076 && flag_tree_loop_vectorize
4077 && loop_outer (b->loop_father)
4078 && has_zero_uses (sprime)
4079 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
4080 && gimple_assign_load_p (stmt))
4081 {
4082 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
4083 basic_block def_bb = gimple_bb (def_stmt);
4084 if (gimple_code (def_stmt) == GIMPLE_PHI
4085 && def_bb->loop_father->header == def_bb)
4086 {
4087 loop_p loop = def_bb->loop_father;
4088 ssa_op_iter iter;
4089 tree op;
4090 bool found = false;
4091 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4092 {
4093 affine_iv iv;
4094 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
4095 if (def_bb
4096 && flow_bb_inside_loop_p (loop, def_bb)
4097 && simple_iv (loop, loop, op, &iv, true))
4098 {
4099 found = true;
4100 break;
4101 }
4102 }
4103 if (found)
4104 {
4105 if (dump_file && (dump_flags & TDF_DETAILS))
4106 {
4107 fprintf (dump_file, "Not replacing ");
4108 print_gimple_expr (dump_file, stmt, 0, 0);
4109 fprintf (dump_file, " with ");
4110 print_generic_expr (dump_file, sprime, 0);
4111 fprintf (dump_file, " which would add a loop"
4112 " carried dependence to loop %d\n",
4113 loop->num);
4114 }
4115 /* Don't keep sprime available. */
4116 sprime = NULL_TREE;
4117 }
4118 }
4119 }
4120
4121 if (sprime)
4122 {
4123 /* If we can propagate the value computed for LHS into
4124 all uses don't bother doing anything with this stmt. */
4125 if (may_propagate_copy (lhs, sprime))
4126 {
4127 /* Mark it for removal. */
4128 el_to_remove.safe_push (stmt);
4129
4130 /* ??? Don't count copy/constant propagations. */
4131 if (gimple_assign_single_p (stmt)
4132 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4133 || gimple_assign_rhs1 (stmt) == sprime))
4134 continue;
4135
4136 if (dump_file && (dump_flags & TDF_DETAILS))
4137 {
4138 fprintf (dump_file, "Replaced ");
4139 print_gimple_expr (dump_file, stmt, 0, 0);
4140 fprintf (dump_file, " with ");
4141 print_generic_expr (dump_file, sprime, 0);
4142 fprintf (dump_file, " in all uses of ");
4143 print_gimple_stmt (dump_file, stmt, 0, 0);
4144 }
4145
4146 pre_stats.eliminations++;
4147 continue;
4148 }
4149
4150 /* If this is an assignment from our leader (which
4151 happens in the case the value-number is a constant)
4152 then there is nothing to do. */
4153 if (gimple_assign_single_p (stmt)
4154 && sprime == gimple_assign_rhs1 (stmt))
4155 continue;
4156
4157 /* Else replace its RHS. */
4158 bool can_make_abnormal_goto
4159 = is_gimple_call (stmt)
4160 && stmt_can_make_abnormal_goto (stmt);
4161
4162 if (dump_file && (dump_flags & TDF_DETAILS))
4163 {
4164 fprintf (dump_file, "Replaced ");
4165 print_gimple_expr (dump_file, stmt, 0, 0);
4166 fprintf (dump_file, " with ");
4167 print_generic_expr (dump_file, sprime, 0);
4168 fprintf (dump_file, " in ");
4169 print_gimple_stmt (dump_file, stmt, 0, 0);
4170 }
4171
4172 if (TREE_CODE (sprime) == SSA_NAME)
4173 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4174 NECESSARY, true);
4175
4176 pre_stats.eliminations++;
4177 gimple *orig_stmt = stmt;
4178 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4179 TREE_TYPE (sprime)))
4180 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4181 tree vdef = gimple_vdef (stmt);
4182 tree vuse = gimple_vuse (stmt);
4183 propagate_tree_value_into_stmt (&gsi, sprime);
4184 stmt = gsi_stmt (gsi);
4185 update_stmt (stmt);
4186 if (vdef != gimple_vdef (stmt))
4187 VN_INFO (vdef)->valnum = vuse;
4188
4189 /* If we removed EH side-effects from the statement, clean
4190 its EH information. */
4191 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4192 {
4193 bitmap_set_bit (need_eh_cleanup,
4194 gimple_bb (stmt)->index);
4195 if (dump_file && (dump_flags & TDF_DETAILS))
4196 fprintf (dump_file, " Removed EH side-effects.\n");
4197 }
4198
4199 /* Likewise for AB side-effects. */
4200 if (can_make_abnormal_goto
4201 && !stmt_can_make_abnormal_goto (stmt))
4202 {
4203 bitmap_set_bit (need_ab_cleanup,
4204 gimple_bb (stmt)->index);
4205 if (dump_file && (dump_flags & TDF_DETAILS))
4206 fprintf (dump_file, " Removed AB side-effects.\n");
4207 }
4208
4209 continue;
4210 }
4211 }
4212
4213 /* If the statement is a scalar store, see if the expression
4214 has the same value number as its rhs. If so, the store is
4215 dead. */
4216 if (gimple_assign_single_p (stmt)
4217 && !gimple_has_volatile_ops (stmt)
4218 && !is_gimple_reg (gimple_assign_lhs (stmt))
4219 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4220 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4221 {
4222 tree val;
4223 tree rhs = gimple_assign_rhs1 (stmt);
4224 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4225 gimple_vuse (stmt), VN_WALK, NULL);
4226 if (TREE_CODE (rhs) == SSA_NAME)
4227 rhs = VN_INFO (rhs)->valnum;
4228 if (val
4229 && operand_equal_p (val, rhs, 0))
4230 {
4231 if (dump_file && (dump_flags & TDF_DETAILS))
4232 {
4233 fprintf (dump_file, "Deleted redundant store ");
4234 print_gimple_stmt (dump_file, stmt, 0, 0);
4235 }
4236
4237 /* Queue stmt for removal. */
4238 el_to_remove.safe_push (stmt);
4239 continue;
4240 }
4241 }
4242
4243 /* If this is a control statement value numbering left edges
4244 unexecuted on force the condition in a way consistent with
4245 that. */
4246 if (gcond *cond = dyn_cast <gcond *> (stmt))
4247 {
4248 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
4249 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
4250 {
4251 if (dump_file && (dump_flags & TDF_DETAILS))
4252 {
4253 fprintf (dump_file, "Removing unexecutable edge from ");
4254 print_gimple_stmt (dump_file, stmt, 0, 0);
4255 }
4256 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
4257 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
4258 gimple_cond_make_true (cond);
4259 else
4260 gimple_cond_make_false (cond);
4261 update_stmt (cond);
4262 el_todo |= TODO_cleanup_cfg;
4263 continue;
4264 }
4265 }
4266
4267 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
4268 bool was_noreturn = (is_gimple_call (stmt)
4269 && gimple_call_noreturn_p (stmt));
4270 tree vdef = gimple_vdef (stmt);
4271 tree vuse = gimple_vuse (stmt);
4272
4273 /* If we didn't replace the whole stmt (or propagate the result
4274 into all uses), replace all uses on this stmt with their
4275 leaders. */
4276 use_operand_p use_p;
4277 ssa_op_iter iter;
4278 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4279 {
4280 tree use = USE_FROM_PTR (use_p);
4281 /* ??? The call code above leaves stmt operands un-updated. */
4282 if (TREE_CODE (use) != SSA_NAME)
4283 continue;
4284 tree sprime = eliminate_avail (use);
4285 if (sprime && sprime != use
4286 && may_propagate_copy (use, sprime)
4287 /* We substitute into debug stmts to avoid excessive
4288 debug temporaries created by removed stmts, but we need
4289 to avoid doing so for inserted sprimes as we never want
4290 to create debug temporaries for them. */
4291 && (!inserted_exprs
4292 || TREE_CODE (sprime) != SSA_NAME
4293 || !is_gimple_debug (stmt)
4294 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
4295 {
4296 propagate_value (use_p, sprime);
4297 gimple_set_modified (stmt, true);
4298 if (TREE_CODE (sprime) == SSA_NAME
4299 && !is_gimple_debug (stmt))
4300 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4301 NECESSARY, true);
4302 }
4303 }
4304
4305 /* Visit indirect calls and turn them into direct calls if
4306 possible using the devirtualization machinery. */
4307 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4308 {
4309 tree fn = gimple_call_fn (call_stmt);
4310 if (fn
4311 && flag_devirtualize
4312 && virtual_method_call_p (fn))
4313 {
4314 tree otr_type = obj_type_ref_class (fn);
4315 tree instance;
4316 ipa_polymorphic_call_context context (current_function_decl, fn, stmt, &instance);
4317 bool final;
4318
4319 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn), otr_type, stmt);
4320
4321 vec <cgraph_node *>targets
4322 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
4323 tree_to_uhwi
4324 (OBJ_TYPE_REF_TOKEN (fn)),
4325 context,
4326 &final);
4327 if (dump_file)
4328 dump_possible_polymorphic_call_targets (dump_file,
4329 obj_type_ref_class (fn),
4330 tree_to_uhwi
4331 (OBJ_TYPE_REF_TOKEN (fn)),
4332 context);
4333 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4334 {
4335 tree fn;
4336 if (targets.length () == 1)
4337 fn = targets[0]->decl;
4338 else
4339 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4340 if (dump_enabled_p ())
4341 {
4342 location_t loc = gimple_location_safe (stmt);
4343 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4344 "converting indirect call to "
4345 "function %s\n",
4346 cgraph_node::get (fn)->name ());
4347 }
4348 gimple_call_set_fndecl (call_stmt, fn);
4349 maybe_remove_unused_call_args (cfun, call_stmt);
4350 gimple_set_modified (stmt, true);
4351 }
4352 }
4353 }
4354
4355 if (gimple_modified_p (stmt))
4356 {
4357 /* If a formerly non-invariant ADDR_EXPR is turned into an
4358 invariant one it was on a separate stmt. */
4359 if (gimple_assign_single_p (stmt)
4360 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
4361 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
4362 gimple *old_stmt = stmt;
4363 if (is_gimple_call (stmt))
4364 {
4365 /* ??? Only fold calls inplace for now, this may create new
4366 SSA names which in turn will confuse free_scc_vn SSA name
4367 release code. */
4368 fold_stmt_inplace (&gsi);
4369 /* When changing a call into a noreturn call, cfg cleanup
4370 is needed to fix up the noreturn call. */
4371 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4372 el_to_fixup.safe_push (stmt);
4373 }
4374 else
4375 {
4376 fold_stmt (&gsi);
4377 stmt = gsi_stmt (gsi);
4378 if ((gimple_code (stmt) == GIMPLE_COND
4379 && (gimple_cond_true_p (as_a <gcond *> (stmt))
4380 || gimple_cond_false_p (as_a <gcond *> (stmt))))
4381 || (gimple_code (stmt) == GIMPLE_SWITCH
4382 && TREE_CODE (gimple_switch_index (
4383 as_a <gswitch *> (stmt)))
4384 == INTEGER_CST))
4385 el_todo |= TODO_cleanup_cfg;
4386 }
4387 /* If we removed EH side-effects from the statement, clean
4388 its EH information. */
4389 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
4390 {
4391 bitmap_set_bit (need_eh_cleanup,
4392 gimple_bb (stmt)->index);
4393 if (dump_file && (dump_flags & TDF_DETAILS))
4394 fprintf (dump_file, " Removed EH side-effects.\n");
4395 }
4396 /* Likewise for AB side-effects. */
4397 if (can_make_abnormal_goto
4398 && !stmt_can_make_abnormal_goto (stmt))
4399 {
4400 bitmap_set_bit (need_ab_cleanup,
4401 gimple_bb (stmt)->index);
4402 if (dump_file && (dump_flags & TDF_DETAILS))
4403 fprintf (dump_file, " Removed AB side-effects.\n");
4404 }
4405 update_stmt (stmt);
4406 if (vdef != gimple_vdef (stmt))
4407 VN_INFO (vdef)->valnum = vuse;
4408 }
4409
4410 /* Make new values available - for fully redundant LHS we
4411 continue with the next stmt above and skip this. */
4412 def_operand_p defp;
4413 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4414 eliminate_push_avail (DEF_FROM_PTR (defp));
4415 }
4416
4417 /* Replace destination PHI arguments. */
4418 edge_iterator ei;
4419 edge e;
4420 FOR_EACH_EDGE (e, ei, b->succs)
4421 {
4422 for (gphi_iterator gsi = gsi_start_phis (e->dest);
4423 !gsi_end_p (gsi);
4424 gsi_next (&gsi))
4425 {
4426 gphi *phi = gsi.phi ();
4427 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
4428 tree arg = USE_FROM_PTR (use_p);
4429 if (TREE_CODE (arg) != SSA_NAME
4430 || virtual_operand_p (arg))
4431 continue;
4432 tree sprime = eliminate_avail (arg);
4433 if (sprime && may_propagate_copy (arg, sprime))
4434 {
4435 propagate_value (use_p, sprime);
4436 if (TREE_CODE (sprime) == SSA_NAME)
4437 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4438 }
4439 }
4440 }
4441 }
4442
4443 /* Make no longer available leaders no longer available. */
4444
4445 void
4446 eliminate_dom_walker::after_dom_children (basic_block)
4447 {
4448 tree entry;
4449 while ((entry = el_avail_stack.pop ()) != NULL_TREE)
4450 {
4451 tree valnum = VN_INFO (entry)->valnum;
4452 tree old = el_avail[SSA_NAME_VERSION (valnum)];
4453 if (old == entry)
4454 el_avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
4455 else
4456 el_avail[SSA_NAME_VERSION (valnum)] = entry;
4457 }
4458 }
4459
4460 /* Eliminate fully redundant computations. */
4461
4462 static unsigned int
4463 eliminate (bool do_pre)
4464 {
4465 gimple_stmt_iterator gsi;
4466 gimple *stmt;
4467
4468 need_eh_cleanup = BITMAP_ALLOC (NULL);
4469 need_ab_cleanup = BITMAP_ALLOC (NULL);
4470
4471 el_to_remove.create (0);
4472 el_to_fixup.create (0);
4473 el_todo = 0;
4474 el_avail.create (num_ssa_names);
4475 el_avail_stack.create (0);
4476
4477 eliminate_dom_walker (CDI_DOMINATORS,
4478 do_pre).walk (cfun->cfg->x_entry_block_ptr);
4479
4480 el_avail.release ();
4481 el_avail_stack.release ();
4482
4483 /* We cannot remove stmts during BB walk, especially not release SSA
4484 names there as this confuses the VN machinery. The stmts ending
4485 up in el_to_remove are either stores or simple copies.
4486 Remove stmts in reverse order to make debug stmt creation possible. */
4487 while (!el_to_remove.is_empty ())
4488 {
4489 stmt = el_to_remove.pop ();
4490
4491 if (dump_file && (dump_flags & TDF_DETAILS))
4492 {
4493 fprintf (dump_file, "Removing dead stmt ");
4494 print_gimple_stmt (dump_file, stmt, 0, 0);
4495 }
4496
4497 tree lhs;
4498 if (gimple_code (stmt) == GIMPLE_PHI)
4499 lhs = gimple_phi_result (stmt);
4500 else
4501 lhs = gimple_get_lhs (stmt);
4502
4503 if (inserted_exprs
4504 && TREE_CODE (lhs) == SSA_NAME)
4505 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4506
4507 gsi = gsi_for_stmt (stmt);
4508 if (gimple_code (stmt) == GIMPLE_PHI)
4509 remove_phi_node (&gsi, true);
4510 else
4511 {
4512 basic_block bb = gimple_bb (stmt);
4513 unlink_stmt_vdef (stmt);
4514 if (gsi_remove (&gsi, true))
4515 bitmap_set_bit (need_eh_cleanup, bb->index);
4516 release_defs (stmt);
4517 }
4518
4519 /* Removing a stmt may expose a forwarder block. */
4520 el_todo |= TODO_cleanup_cfg;
4521 }
4522 el_to_remove.release ();
4523
4524 /* Fixup stmts that became noreturn calls. This may require splitting
4525 blocks and thus isn't possible during the dominator walk. Do this
4526 in reverse order so we don't inadvertedly remove a stmt we want to
4527 fixup by visiting a dominating now noreturn call first. */
4528 while (!el_to_fixup.is_empty ())
4529 {
4530 stmt = el_to_fixup.pop ();
4531
4532 if (dump_file && (dump_flags & TDF_DETAILS))
4533 {
4534 fprintf (dump_file, "Fixing up noreturn call ");
4535 print_gimple_stmt (dump_file, stmt, 0, 0);
4536 }
4537
4538 if (fixup_noreturn_call (stmt))
4539 el_todo |= TODO_cleanup_cfg;
4540 }
4541 el_to_fixup.release ();
4542
4543 return el_todo;
4544 }
4545
4546 /* Perform CFG cleanups made necessary by elimination. */
4547
4548 static unsigned
4549 fini_eliminate (void)
4550 {
4551 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4552 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4553
4554 if (do_eh_cleanup)
4555 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4556
4557 if (do_ab_cleanup)
4558 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4559
4560 BITMAP_FREE (need_eh_cleanup);
4561 BITMAP_FREE (need_ab_cleanup);
4562
4563 if (do_eh_cleanup || do_ab_cleanup)
4564 return TODO_cleanup_cfg;
4565 return 0;
4566 }
4567
4568 /* Borrow a bit of tree-ssa-dce.c for the moment.
4569 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4570 this may be a bit faster, and we may want critical edges kept split. */
4571
4572 /* If OP's defining statement has not already been determined to be necessary,
4573 mark that statement necessary. Return the stmt, if it is newly
4574 necessary. */
4575
4576 static inline gimple *
4577 mark_operand_necessary (tree op)
4578 {
4579 gimple *stmt;
4580
4581 gcc_assert (op);
4582
4583 if (TREE_CODE (op) != SSA_NAME)
4584 return NULL;
4585
4586 stmt = SSA_NAME_DEF_STMT (op);
4587 gcc_assert (stmt);
4588
4589 if (gimple_plf (stmt, NECESSARY)
4590 || gimple_nop_p (stmt))
4591 return NULL;
4592
4593 gimple_set_plf (stmt, NECESSARY, true);
4594 return stmt;
4595 }
4596
4597 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4598 to insert PHI nodes sometimes, and because value numbering of casts isn't
4599 perfect, we sometimes end up inserting dead code. This simple DCE-like
4600 pass removes any insertions we made that weren't actually used. */
4601
4602 static void
4603 remove_dead_inserted_code (void)
4604 {
4605 bitmap worklist;
4606 unsigned i;
4607 bitmap_iterator bi;
4608 gimple *t;
4609
4610 worklist = BITMAP_ALLOC (NULL);
4611 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4612 {
4613 t = SSA_NAME_DEF_STMT (ssa_name (i));
4614 if (gimple_plf (t, NECESSARY))
4615 bitmap_set_bit (worklist, i);
4616 }
4617 while (!bitmap_empty_p (worklist))
4618 {
4619 i = bitmap_first_set_bit (worklist);
4620 bitmap_clear_bit (worklist, i);
4621 t = SSA_NAME_DEF_STMT (ssa_name (i));
4622
4623 /* PHI nodes are somewhat special in that each PHI alternative has
4624 data and control dependencies. All the statements feeding the
4625 PHI node's arguments are always necessary. */
4626 if (gimple_code (t) == GIMPLE_PHI)
4627 {
4628 unsigned k;
4629
4630 for (k = 0; k < gimple_phi_num_args (t); k++)
4631 {
4632 tree arg = PHI_ARG_DEF (t, k);
4633 if (TREE_CODE (arg) == SSA_NAME)
4634 {
4635 gimple *n = mark_operand_necessary (arg);
4636 if (n)
4637 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4638 }
4639 }
4640 }
4641 else
4642 {
4643 /* Propagate through the operands. Examine all the USE, VUSE and
4644 VDEF operands in this statement. Mark all the statements
4645 which feed this statement's uses as necessary. */
4646 ssa_op_iter iter;
4647 tree use;
4648
4649 /* The operands of VDEF expressions are also needed as they
4650 represent potential definitions that may reach this
4651 statement (VDEF operands allow us to follow def-def
4652 links). */
4653
4654 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4655 {
4656 gimple *n = mark_operand_necessary (use);
4657 if (n)
4658 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4659 }
4660 }
4661 }
4662
4663 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4664 {
4665 t = SSA_NAME_DEF_STMT (ssa_name (i));
4666 if (!gimple_plf (t, NECESSARY))
4667 {
4668 gimple_stmt_iterator gsi;
4669
4670 if (dump_file && (dump_flags & TDF_DETAILS))
4671 {
4672 fprintf (dump_file, "Removing unnecessary insertion:");
4673 print_gimple_stmt (dump_file, t, 0, 0);
4674 }
4675
4676 gsi = gsi_for_stmt (t);
4677 if (gimple_code (t) == GIMPLE_PHI)
4678 remove_phi_node (&gsi, true);
4679 else
4680 {
4681 gsi_remove (&gsi, true);
4682 release_defs (t);
4683 }
4684 }
4685 }
4686 BITMAP_FREE (worklist);
4687 }
4688
4689
4690 /* Initialize data structures used by PRE. */
4691
4692 static void
4693 init_pre (void)
4694 {
4695 basic_block bb;
4696
4697 next_expression_id = 1;
4698 expressions.create (0);
4699 expressions.safe_push (NULL);
4700 value_expressions.create (get_max_value_id () + 1);
4701 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4702 name_to_id.create (0);
4703
4704 inserted_exprs = BITMAP_ALLOC (NULL);
4705
4706 connect_infinite_loops_to_exit ();
4707 memset (&pre_stats, 0, sizeof (pre_stats));
4708
4709 postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
4710 postorder_num = inverted_post_order_compute (postorder);
4711
4712 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4713
4714 calculate_dominance_info (CDI_POST_DOMINATORS);
4715 calculate_dominance_info (CDI_DOMINATORS);
4716
4717 bitmap_obstack_initialize (&grand_bitmap_obstack);
4718 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4719 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4720 FOR_ALL_BB_FN (bb, cfun)
4721 {
4722 EXP_GEN (bb) = bitmap_set_new ();
4723 PHI_GEN (bb) = bitmap_set_new ();
4724 TMP_GEN (bb) = bitmap_set_new ();
4725 AVAIL_OUT (bb) = bitmap_set_new ();
4726 }
4727 }
4728
4729
4730 /* Deallocate data structures used by PRE. */
4731
4732 static void
4733 fini_pre ()
4734 {
4735 free (postorder);
4736 value_expressions.release ();
4737 BITMAP_FREE (inserted_exprs);
4738 bitmap_obstack_release (&grand_bitmap_obstack);
4739 bitmap_set_pool.release ();
4740 pre_expr_pool.release ();
4741 delete phi_translate_table;
4742 phi_translate_table = NULL;
4743 delete expression_to_id;
4744 expression_to_id = NULL;
4745 name_to_id.release ();
4746
4747 free_aux_for_blocks ();
4748
4749 free_dominance_info (CDI_POST_DOMINATORS);
4750 }
4751
4752 namespace {
4753
4754 const pass_data pass_data_pre =
4755 {
4756 GIMPLE_PASS, /* type */
4757 "pre", /* name */
4758 OPTGROUP_NONE, /* optinfo_flags */
4759 TV_TREE_PRE, /* tv_id */
4760 /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
4761 pass_pre. */
4762 ( PROP_no_crit_edges | PROP_cfg | PROP_ssa ), /* properties_required */
4763 0, /* properties_provided */
4764 PROP_no_crit_edges, /* properties_destroyed */
4765 TODO_rebuild_alias, /* todo_flags_start */
4766 0, /* todo_flags_finish */
4767 };
4768
4769 class pass_pre : public gimple_opt_pass
4770 {
4771 public:
4772 pass_pre (gcc::context *ctxt)
4773 : gimple_opt_pass (pass_data_pre, ctxt)
4774 {}
4775
4776 /* opt_pass methods: */
4777 virtual bool gate (function *) { return flag_tree_pre != 0; }
4778 virtual unsigned int execute (function *);
4779
4780 }; // class pass_pre
4781
4782 unsigned int
4783 pass_pre::execute (function *fun)
4784 {
4785 unsigned int todo = 0;
4786
4787 do_partial_partial =
4788 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4789
4790 /* This has to happen before SCCVN runs because
4791 loop_optimizer_init may create new phis, etc. */
4792 loop_optimizer_init (LOOPS_NORMAL);
4793
4794 if (!run_scc_vn (VN_WALK))
4795 {
4796 loop_optimizer_finalize ();
4797 return 0;
4798 }
4799
4800 init_pre ();
4801 scev_initialize ();
4802
4803 /* Collect and value number expressions computed in each basic block. */
4804 compute_avail ();
4805
4806 /* Insert can get quite slow on an incredibly large number of basic
4807 blocks due to some quadratic behavior. Until this behavior is
4808 fixed, don't run it when he have an incredibly large number of
4809 bb's. If we aren't going to run insert, there is no point in
4810 computing ANTIC, either, even though it's plenty fast. */
4811 if (n_basic_blocks_for_fn (fun) < 4000)
4812 {
4813 compute_antic ();
4814 insert ();
4815 }
4816
4817 /* Make sure to remove fake edges before committing our inserts.
4818 This makes sure we don't end up with extra critical edges that
4819 we would need to split. */
4820 remove_fake_exit_edges ();
4821 gsi_commit_edge_inserts ();
4822
4823 /* Eliminate folds statements which might (should not...) end up
4824 not keeping virtual operands up-to-date. */
4825 gcc_assert (!need_ssa_update_p (fun));
4826
4827 /* Remove all the redundant expressions. */
4828 todo |= eliminate (true);
4829
4830 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4831 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4832 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4833 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
4834
4835 clear_expression_ids ();
4836 remove_dead_inserted_code ();
4837
4838 scev_finalize ();
4839 fini_pre ();
4840 todo |= fini_eliminate ();
4841 loop_optimizer_finalize ();
4842
4843 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4844 case we can merge the block with the remaining predecessor of the block.
4845 It should either:
4846 - call merge_blocks after each tail merge iteration
4847 - call merge_blocks after all tail merge iterations
4848 - mark TODO_cleanup_cfg when necessary
4849 - share the cfg cleanup with fini_pre. */
4850 todo |= tail_merge_optimize (todo);
4851
4852 free_scc_vn ();
4853
4854 /* Tail merging invalidates the virtual SSA web, together with
4855 cfg-cleanup opportunities exposed by PRE this will wreck the
4856 SSA updating machinery. So make sure to run update-ssa
4857 manually, before eventually scheduling cfg-cleanup as part of
4858 the todo. */
4859 update_ssa (TODO_update_ssa_only_virtuals);
4860
4861 return todo;
4862 }
4863
4864 } // anon namespace
4865
4866 gimple_opt_pass *
4867 make_pass_pre (gcc::context *ctxt)
4868 {
4869 return new pass_pre (ctxt);
4870 }
4871
4872 namespace {
4873
4874 const pass_data pass_data_fre =
4875 {
4876 GIMPLE_PASS, /* type */
4877 "fre", /* name */
4878 OPTGROUP_NONE, /* optinfo_flags */
4879 TV_TREE_FRE, /* tv_id */
4880 ( PROP_cfg | PROP_ssa ), /* properties_required */
4881 0, /* properties_provided */
4882 0, /* properties_destroyed */
4883 0, /* todo_flags_start */
4884 0, /* todo_flags_finish */
4885 };
4886
4887 class pass_fre : public gimple_opt_pass
4888 {
4889 public:
4890 pass_fre (gcc::context *ctxt)
4891 : gimple_opt_pass (pass_data_fre, ctxt)
4892 {}
4893
4894 /* opt_pass methods: */
4895 opt_pass * clone () { return new pass_fre (m_ctxt); }
4896 virtual bool gate (function *) { return flag_tree_fre != 0; }
4897 virtual unsigned int execute (function *);
4898
4899 }; // class pass_fre
4900
4901 unsigned int
4902 pass_fre::execute (function *fun)
4903 {
4904 unsigned int todo = 0;
4905
4906 if (!run_scc_vn (VN_WALKREWRITE))
4907 return 0;
4908
4909 memset (&pre_stats, 0, sizeof (pre_stats));
4910
4911 /* Remove all the redundant expressions. */
4912 todo |= eliminate (false);
4913
4914 todo |= fini_eliminate ();
4915
4916 free_scc_vn ();
4917
4918 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4919 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
4920
4921 return todo;
4922 }
4923
4924 } // anon namespace
4925
4926 gimple_opt_pass *
4927 make_pass_fre (gcc::context *ctxt)
4928 {
4929 return new pass_fre (ctxt);
4930 }