]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-pre.c
genattrtab.c (write_header): Include hash-set.h...
[thirdparty/gcc.git] / gcc / tree-ssa-pre.c
1 /* SSA-PRE for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "predict.h"
38 #include "hard-reg-set.h"
39 #include "input.h"
40 #include "function.h"
41 #include "dominance.h"
42 #include "cfg.h"
43 #include "cfganal.h"
44 #include "basic-block.h"
45 #include "gimple-pretty-print.h"
46 #include "tree-inline.h"
47 #include "inchash.h"
48 #include "hash-table.h"
49 #include "tree-ssa-alias.h"
50 #include "internal-fn.h"
51 #include "gimple-fold.h"
52 #include "tree-eh.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimplify.h"
57 #include "gimple-iterator.h"
58 #include "gimplify-me.h"
59 #include "gimple-ssa.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "stringpool.h"
64 #include "tree-ssanames.h"
65 #include "tree-ssa-loop.h"
66 #include "tree-into-ssa.h"
67 #include "expr.h"
68 #include "tree-dfa.h"
69 #include "tree-ssa.h"
70 #include "tree-iterator.h"
71 #include "alloc-pool.h"
72 #include "obstack.h"
73 #include "tree-pass.h"
74 #include "flags.h"
75 #include "langhooks.h"
76 #include "cfgloop.h"
77 #include "tree-ssa-sccvn.h"
78 #include "tree-scalar-evolution.h"
79 #include "params.h"
80 #include "dbgcnt.h"
81 #include "domwalk.h"
82 #include "hash-map.h"
83 #include "plugin-api.h"
84 #include "ipa-ref.h"
85 #include "cgraph.h"
86 #include "symbol-summary.h"
87 #include "ipa-prop.h"
88 #include "tree-ssa-propagate.h"
89 #include "ipa-utils.h"
90
91 /* TODO:
92
93 1. Avail sets can be shared by making an avail_find_leader that
94 walks up the dominator tree and looks in those avail sets.
95 This might affect code optimality, it's unclear right now.
96 2. Strength reduction can be performed by anticipating expressions
97 we can repair later on.
98 3. We can do back-substitution or smarter value numbering to catch
99 commutative expressions split up over multiple statements.
100 */
101
102 /* For ease of terminology, "expression node" in the below refers to
103 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
104 represent the actual statement containing the expressions we care about,
105 and we cache the value number by putting it in the expression. */
106
107 /* Basic algorithm
108
109 First we walk the statements to generate the AVAIL sets, the
110 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
111 generation of values/expressions by a given block. We use them
112 when computing the ANTIC sets. The AVAIL sets consist of
113 SSA_NAME's that represent values, so we know what values are
114 available in what blocks. AVAIL is a forward dataflow problem. In
115 SSA, values are never killed, so we don't need a kill set, or a
116 fixpoint iteration, in order to calculate the AVAIL sets. In
117 traditional parlance, AVAIL sets tell us the downsafety of the
118 expressions/values.
119
120 Next, we generate the ANTIC sets. These sets represent the
121 anticipatable expressions. ANTIC is a backwards dataflow
122 problem. An expression is anticipatable in a given block if it could
123 be generated in that block. This means that if we had to perform
124 an insertion in that block, of the value of that expression, we
125 could. Calculating the ANTIC sets requires phi translation of
126 expressions, because the flow goes backwards through phis. We must
127 iterate to a fixpoint of the ANTIC sets, because we have a kill
128 set. Even in SSA form, values are not live over the entire
129 function, only from their definition point onwards. So we have to
130 remove values from the ANTIC set once we go past the definition
131 point of the leaders that make them up.
132 compute_antic/compute_antic_aux performs this computation.
133
134 Third, we perform insertions to make partially redundant
135 expressions fully redundant.
136
137 An expression is partially redundant (excluding partial
138 anticipation) if:
139
140 1. It is AVAIL in some, but not all, of the predecessors of a
141 given block.
142 2. It is ANTIC in all the predecessors.
143
144 In order to make it fully redundant, we insert the expression into
145 the predecessors where it is not available, but is ANTIC.
146
147 For the partial anticipation case, we only perform insertion if it
148 is partially anticipated in some block, and fully available in all
149 of the predecessors.
150
151 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
152 performs these steps.
153
154 Fourth, we eliminate fully redundant expressions.
155 This is a simple statement walk that replaces redundant
156 calculations with the now available values. */
157
158 /* Representations of value numbers:
159
160 Value numbers are represented by a representative SSA_NAME. We
161 will create fake SSA_NAME's in situations where we need a
162 representative but do not have one (because it is a complex
163 expression). In order to facilitate storing the value numbers in
164 bitmaps, and keep the number of wasted SSA_NAME's down, we also
165 associate a value_id with each value number, and create full blown
166 ssa_name's only where we actually need them (IE in operands of
167 existing expressions).
168
169 Theoretically you could replace all the value_id's with
170 SSA_NAME_VERSION, but this would allocate a large number of
171 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
172 It would also require an additional indirection at each point we
173 use the value id. */
174
175 /* Representation of expressions on value numbers:
176
177 Expressions consisting of value numbers are represented the same
178 way as our VN internally represents them, with an additional
179 "pre_expr" wrapping around them in order to facilitate storing all
180 of the expressions in the same sets. */
181
182 /* Representation of sets:
183
184 The dataflow sets do not need to be sorted in any particular order
185 for the majority of their lifetime, are simply represented as two
186 bitmaps, one that keeps track of values present in the set, and one
187 that keeps track of expressions present in the set.
188
189 When we need them in topological order, we produce it on demand by
190 transforming the bitmap into an array and sorting it into topo
191 order. */
192
193 /* Type of expression, used to know which member of the PRE_EXPR union
194 is valid. */
195
196 enum pre_expr_kind
197 {
198 NAME,
199 NARY,
200 REFERENCE,
201 CONSTANT
202 };
203
204 typedef union pre_expr_union_d
205 {
206 tree name;
207 tree constant;
208 vn_nary_op_t nary;
209 vn_reference_t reference;
210 } pre_expr_union;
211
212 typedef struct pre_expr_d : typed_noop_remove <pre_expr_d>
213 {
214 enum pre_expr_kind kind;
215 unsigned int id;
216 pre_expr_union u;
217
218 /* hash_table support. */
219 typedef pre_expr_d value_type;
220 typedef pre_expr_d compare_type;
221 static inline hashval_t hash (const pre_expr_d *);
222 static inline int equal (const pre_expr_d *, const pre_expr_d *);
223 } *pre_expr;
224
225 #define PRE_EXPR_NAME(e) (e)->u.name
226 #define PRE_EXPR_NARY(e) (e)->u.nary
227 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
228 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
229
230 /* Compare E1 and E1 for equality. */
231
232 inline int
233 pre_expr_d::equal (const value_type *e1, const compare_type *e2)
234 {
235 if (e1->kind != e2->kind)
236 return false;
237
238 switch (e1->kind)
239 {
240 case CONSTANT:
241 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
242 PRE_EXPR_CONSTANT (e2));
243 case NAME:
244 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
245 case NARY:
246 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
247 case REFERENCE:
248 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
249 PRE_EXPR_REFERENCE (e2));
250 default:
251 gcc_unreachable ();
252 }
253 }
254
255 /* Hash E. */
256
257 inline hashval_t
258 pre_expr_d::hash (const value_type *e)
259 {
260 switch (e->kind)
261 {
262 case CONSTANT:
263 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
264 case NAME:
265 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
266 case NARY:
267 return PRE_EXPR_NARY (e)->hashcode;
268 case REFERENCE:
269 return PRE_EXPR_REFERENCE (e)->hashcode;
270 default:
271 gcc_unreachable ();
272 }
273 }
274
275 /* Next global expression id number. */
276 static unsigned int next_expression_id;
277
278 /* Mapping from expression to id number we can use in bitmap sets. */
279 static vec<pre_expr> expressions;
280 static hash_table<pre_expr_d> *expression_to_id;
281 static vec<unsigned> name_to_id;
282
283 /* Allocate an expression id for EXPR. */
284
285 static inline unsigned int
286 alloc_expression_id (pre_expr expr)
287 {
288 struct pre_expr_d **slot;
289 /* Make sure we won't overflow. */
290 gcc_assert (next_expression_id + 1 > next_expression_id);
291 expr->id = next_expression_id++;
292 expressions.safe_push (expr);
293 if (expr->kind == NAME)
294 {
295 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
296 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
297 re-allocations by using vec::reserve upfront. */
298 unsigned old_len = name_to_id.length ();
299 name_to_id.reserve (num_ssa_names - old_len);
300 name_to_id.quick_grow_cleared (num_ssa_names);
301 gcc_assert (name_to_id[version] == 0);
302 name_to_id[version] = expr->id;
303 }
304 else
305 {
306 slot = expression_to_id->find_slot (expr, INSERT);
307 gcc_assert (!*slot);
308 *slot = expr;
309 }
310 return next_expression_id - 1;
311 }
312
313 /* Return the expression id for tree EXPR. */
314
315 static inline unsigned int
316 get_expression_id (const pre_expr expr)
317 {
318 return expr->id;
319 }
320
321 static inline unsigned int
322 lookup_expression_id (const pre_expr expr)
323 {
324 struct pre_expr_d **slot;
325
326 if (expr->kind == NAME)
327 {
328 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
329 if (name_to_id.length () <= version)
330 return 0;
331 return name_to_id[version];
332 }
333 else
334 {
335 slot = expression_to_id->find_slot (expr, NO_INSERT);
336 if (!slot)
337 return 0;
338 return ((pre_expr)*slot)->id;
339 }
340 }
341
342 /* Return the existing expression id for EXPR, or create one if one
343 does not exist yet. */
344
345 static inline unsigned int
346 get_or_alloc_expression_id (pre_expr expr)
347 {
348 unsigned int id = lookup_expression_id (expr);
349 if (id == 0)
350 return alloc_expression_id (expr);
351 return expr->id = id;
352 }
353
354 /* Return the expression that has expression id ID */
355
356 static inline pre_expr
357 expression_for_id (unsigned int id)
358 {
359 return expressions[id];
360 }
361
362 /* Free the expression id field in all of our expressions,
363 and then destroy the expressions array. */
364
365 static void
366 clear_expression_ids (void)
367 {
368 expressions.release ();
369 }
370
371 static alloc_pool pre_expr_pool;
372
373 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
374
375 static pre_expr
376 get_or_alloc_expr_for_name (tree name)
377 {
378 struct pre_expr_d expr;
379 pre_expr result;
380 unsigned int result_id;
381
382 expr.kind = NAME;
383 expr.id = 0;
384 PRE_EXPR_NAME (&expr) = name;
385 result_id = lookup_expression_id (&expr);
386 if (result_id != 0)
387 return expression_for_id (result_id);
388
389 result = (pre_expr) pool_alloc (pre_expr_pool);
390 result->kind = NAME;
391 PRE_EXPR_NAME (result) = name;
392 alloc_expression_id (result);
393 return result;
394 }
395
396 /* An unordered bitmap set. One bitmap tracks values, the other,
397 expressions. */
398 typedef struct bitmap_set
399 {
400 bitmap_head expressions;
401 bitmap_head values;
402 } *bitmap_set_t;
403
404 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
405 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
406
407 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
408 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
409
410 /* Mapping from value id to expressions with that value_id. */
411 static vec<bitmap> value_expressions;
412
413 /* Sets that we need to keep track of. */
414 typedef struct bb_bitmap_sets
415 {
416 /* The EXP_GEN set, which represents expressions/values generated in
417 a basic block. */
418 bitmap_set_t exp_gen;
419
420 /* The PHI_GEN set, which represents PHI results generated in a
421 basic block. */
422 bitmap_set_t phi_gen;
423
424 /* The TMP_GEN set, which represents results/temporaries generated
425 in a basic block. IE the LHS of an expression. */
426 bitmap_set_t tmp_gen;
427
428 /* The AVAIL_OUT set, which represents which values are available in
429 a given basic block. */
430 bitmap_set_t avail_out;
431
432 /* The ANTIC_IN set, which represents which values are anticipatable
433 in a given basic block. */
434 bitmap_set_t antic_in;
435
436 /* The PA_IN set, which represents which values are
437 partially anticipatable in a given basic block. */
438 bitmap_set_t pa_in;
439
440 /* The NEW_SETS set, which is used during insertion to augment the
441 AVAIL_OUT set of blocks with the new insertions performed during
442 the current iteration. */
443 bitmap_set_t new_sets;
444
445 /* A cache for value_dies_in_block_x. */
446 bitmap expr_dies;
447
448 /* The live virtual operand on successor edges. */
449 tree vop_on_exit;
450
451 /* True if we have visited this block during ANTIC calculation. */
452 unsigned int visited : 1;
453
454 /* True when the block contains a call that might not return. */
455 unsigned int contains_may_not_return_call : 1;
456 } *bb_value_sets_t;
457
458 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
459 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
460 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
461 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
462 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
463 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
464 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
465 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
466 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
467 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
468 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
469
470
471 /* Basic block list in postorder. */
472 static int *postorder;
473 static int postorder_num;
474
475 /* This structure is used to keep track of statistics on what
476 optimization PRE was able to perform. */
477 static struct
478 {
479 /* The number of RHS computations eliminated by PRE. */
480 int eliminations;
481
482 /* The number of new expressions/temporaries generated by PRE. */
483 int insertions;
484
485 /* The number of inserts found due to partial anticipation */
486 int pa_insert;
487
488 /* The number of new PHI nodes added by PRE. */
489 int phis;
490 } pre_stats;
491
492 static bool do_partial_partial;
493 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
494 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
495 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
496 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
497 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
498 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
499 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
500 unsigned int, bool);
501 static bitmap_set_t bitmap_set_new (void);
502 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
503 tree);
504 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
505 static unsigned int get_expr_value_id (pre_expr);
506
507 /* We can add and remove elements and entries to and from sets
508 and hash tables, so we use alloc pools for them. */
509
510 static alloc_pool bitmap_set_pool;
511 static bitmap_obstack grand_bitmap_obstack;
512
513 /* Set of blocks with statements that have had their EH properties changed. */
514 static bitmap need_eh_cleanup;
515
516 /* Set of blocks with statements that have had their AB properties changed. */
517 static bitmap need_ab_cleanup;
518
519 /* A three tuple {e, pred, v} used to cache phi translations in the
520 phi_translate_table. */
521
522 typedef struct expr_pred_trans_d : typed_free_remove<expr_pred_trans_d>
523 {
524 /* The expression. */
525 pre_expr e;
526
527 /* The predecessor block along which we translated the expression. */
528 basic_block pred;
529
530 /* The value that resulted from the translation. */
531 pre_expr v;
532
533 /* The hashcode for the expression, pred pair. This is cached for
534 speed reasons. */
535 hashval_t hashcode;
536
537 /* hash_table support. */
538 typedef expr_pred_trans_d value_type;
539 typedef expr_pred_trans_d compare_type;
540 static inline hashval_t hash (const value_type *);
541 static inline int equal (const value_type *, const compare_type *);
542 } *expr_pred_trans_t;
543 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
544
545 inline hashval_t
546 expr_pred_trans_d::hash (const expr_pred_trans_d *e)
547 {
548 return e->hashcode;
549 }
550
551 inline int
552 expr_pred_trans_d::equal (const value_type *ve1,
553 const compare_type *ve2)
554 {
555 basic_block b1 = ve1->pred;
556 basic_block b2 = ve2->pred;
557
558 /* If they are not translations for the same basic block, they can't
559 be equal. */
560 if (b1 != b2)
561 return false;
562 return pre_expr_d::equal (ve1->e, ve2->e);
563 }
564
565 /* The phi_translate_table caches phi translations for a given
566 expression and predecessor. */
567 static hash_table<expr_pred_trans_d> *phi_translate_table;
568
569 /* Add the tuple mapping from {expression E, basic block PRED} to
570 the phi translation table and return whether it pre-existed. */
571
572 static inline bool
573 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
574 {
575 expr_pred_trans_t *slot;
576 expr_pred_trans_d tem;
577 hashval_t hash = iterative_hash_hashval_t (pre_expr_d::hash (e),
578 pred->index);
579 tem.e = e;
580 tem.pred = pred;
581 tem.hashcode = hash;
582 slot = phi_translate_table->find_slot_with_hash (&tem, hash, INSERT);
583 if (*slot)
584 {
585 *entry = *slot;
586 return true;
587 }
588
589 *entry = *slot = XNEW (struct expr_pred_trans_d);
590 (*entry)->e = e;
591 (*entry)->pred = pred;
592 (*entry)->hashcode = hash;
593 return false;
594 }
595
596
597 /* Add expression E to the expression set of value id V. */
598
599 static void
600 add_to_value (unsigned int v, pre_expr e)
601 {
602 bitmap set;
603
604 gcc_checking_assert (get_expr_value_id (e) == v);
605
606 if (v >= value_expressions.length ())
607 {
608 value_expressions.safe_grow_cleared (v + 1);
609 }
610
611 set = value_expressions[v];
612 if (!set)
613 {
614 set = BITMAP_ALLOC (&grand_bitmap_obstack);
615 value_expressions[v] = set;
616 }
617
618 bitmap_set_bit (set, get_or_alloc_expression_id (e));
619 }
620
621 /* Create a new bitmap set and return it. */
622
623 static bitmap_set_t
624 bitmap_set_new (void)
625 {
626 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
627 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
628 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
629 return ret;
630 }
631
632 /* Return the value id for a PRE expression EXPR. */
633
634 static unsigned int
635 get_expr_value_id (pre_expr expr)
636 {
637 unsigned int id;
638 switch (expr->kind)
639 {
640 case CONSTANT:
641 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
642 break;
643 case NAME:
644 id = VN_INFO (PRE_EXPR_NAME (expr))->value_id;
645 break;
646 case NARY:
647 id = PRE_EXPR_NARY (expr)->value_id;
648 break;
649 case REFERENCE:
650 id = PRE_EXPR_REFERENCE (expr)->value_id;
651 break;
652 default:
653 gcc_unreachable ();
654 }
655 /* ??? We cannot assert that expr has a value-id (it can be 0), because
656 we assign value-ids only to expressions that have a result
657 in set_hashtable_value_ids. */
658 return id;
659 }
660
661 /* Return a SCCVN valnum (SSA name or constant) for the PRE value-id VAL. */
662
663 static tree
664 sccvn_valnum_from_value_id (unsigned int val)
665 {
666 bitmap_iterator bi;
667 unsigned int i;
668 bitmap exprset = value_expressions[val];
669 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
670 {
671 pre_expr vexpr = expression_for_id (i);
672 if (vexpr->kind == NAME)
673 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
674 else if (vexpr->kind == CONSTANT)
675 return PRE_EXPR_CONSTANT (vexpr);
676 }
677 return NULL_TREE;
678 }
679
680 /* Remove an expression EXPR from a bitmapped set. */
681
682 static void
683 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
684 {
685 unsigned int val = get_expr_value_id (expr);
686 if (!value_id_constant_p (val))
687 {
688 bitmap_clear_bit (&set->values, val);
689 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
690 }
691 }
692
693 static void
694 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
695 unsigned int val, bool allow_constants)
696 {
697 if (allow_constants || !value_id_constant_p (val))
698 {
699 /* We specifically expect this and only this function to be able to
700 insert constants into a set. */
701 bitmap_set_bit (&set->values, val);
702 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
703 }
704 }
705
706 /* Insert an expression EXPR into a bitmapped set. */
707
708 static void
709 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
710 {
711 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
712 }
713
714 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
715
716 static void
717 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
718 {
719 bitmap_copy (&dest->expressions, &orig->expressions);
720 bitmap_copy (&dest->values, &orig->values);
721 }
722
723
724 /* Free memory used up by SET. */
725 static void
726 bitmap_set_free (bitmap_set_t set)
727 {
728 bitmap_clear (&set->expressions);
729 bitmap_clear (&set->values);
730 }
731
732
733 /* Generate an topological-ordered array of bitmap set SET. */
734
735 static vec<pre_expr>
736 sorted_array_from_bitmap_set (bitmap_set_t set)
737 {
738 unsigned int i, j;
739 bitmap_iterator bi, bj;
740 vec<pre_expr> result;
741
742 /* Pre-allocate enough space for the array. */
743 result.create (bitmap_count_bits (&set->expressions));
744
745 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
746 {
747 /* The number of expressions having a given value is usually
748 relatively small. Thus, rather than making a vector of all
749 the expressions and sorting it by value-id, we walk the values
750 and check in the reverse mapping that tells us what expressions
751 have a given value, to filter those in our set. As a result,
752 the expressions are inserted in value-id order, which means
753 topological order.
754
755 If this is somehow a significant lose for some cases, we can
756 choose which set to walk based on the set size. */
757 bitmap exprset = value_expressions[i];
758 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
759 {
760 if (bitmap_bit_p (&set->expressions, j))
761 result.quick_push (expression_for_id (j));
762 }
763 }
764
765 return result;
766 }
767
768 /* Perform bitmapped set operation DEST &= ORIG. */
769
770 static void
771 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
772 {
773 bitmap_iterator bi;
774 unsigned int i;
775
776 if (dest != orig)
777 {
778 bitmap_head temp;
779 bitmap_initialize (&temp, &grand_bitmap_obstack);
780
781 bitmap_and_into (&dest->values, &orig->values);
782 bitmap_copy (&temp, &dest->expressions);
783 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
784 {
785 pre_expr expr = expression_for_id (i);
786 unsigned int value_id = get_expr_value_id (expr);
787 if (!bitmap_bit_p (&dest->values, value_id))
788 bitmap_clear_bit (&dest->expressions, i);
789 }
790 bitmap_clear (&temp);
791 }
792 }
793
794 /* Subtract all values and expressions contained in ORIG from DEST. */
795
796 static bitmap_set_t
797 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
798 {
799 bitmap_set_t result = bitmap_set_new ();
800 bitmap_iterator bi;
801 unsigned int i;
802
803 bitmap_and_compl (&result->expressions, &dest->expressions,
804 &orig->expressions);
805
806 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
807 {
808 pre_expr expr = expression_for_id (i);
809 unsigned int value_id = get_expr_value_id (expr);
810 bitmap_set_bit (&result->values, value_id);
811 }
812
813 return result;
814 }
815
816 /* Subtract all the values in bitmap set B from bitmap set A. */
817
818 static void
819 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
820 {
821 unsigned int i;
822 bitmap_iterator bi;
823 bitmap_head temp;
824
825 bitmap_initialize (&temp, &grand_bitmap_obstack);
826
827 bitmap_copy (&temp, &a->expressions);
828 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
829 {
830 pre_expr expr = expression_for_id (i);
831 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
832 bitmap_remove_from_set (a, expr);
833 }
834 bitmap_clear (&temp);
835 }
836
837
838 /* Return true if bitmapped set SET contains the value VALUE_ID. */
839
840 static bool
841 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
842 {
843 if (value_id_constant_p (value_id))
844 return true;
845
846 if (!set || bitmap_empty_p (&set->expressions))
847 return false;
848
849 return bitmap_bit_p (&set->values, value_id);
850 }
851
852 static inline bool
853 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
854 {
855 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
856 }
857
858 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
859
860 static void
861 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
862 const pre_expr expr)
863 {
864 bitmap exprset;
865 unsigned int i;
866 bitmap_iterator bi;
867
868 if (value_id_constant_p (lookfor))
869 return;
870
871 if (!bitmap_set_contains_value (set, lookfor))
872 return;
873
874 /* The number of expressions having a given value is usually
875 significantly less than the total number of expressions in SET.
876 Thus, rather than check, for each expression in SET, whether it
877 has the value LOOKFOR, we walk the reverse mapping that tells us
878 what expressions have a given value, and see if any of those
879 expressions are in our set. For large testcases, this is about
880 5-10x faster than walking the bitmap. If this is somehow a
881 significant lose for some cases, we can choose which set to walk
882 based on the set size. */
883 exprset = value_expressions[lookfor];
884 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
885 {
886 if (bitmap_clear_bit (&set->expressions, i))
887 {
888 bitmap_set_bit (&set->expressions, get_expression_id (expr));
889 return;
890 }
891 }
892
893 gcc_unreachable ();
894 }
895
896 /* Return true if two bitmap sets are equal. */
897
898 static bool
899 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
900 {
901 return bitmap_equal_p (&a->values, &b->values);
902 }
903
904 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
905 and add it otherwise. */
906
907 static void
908 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
909 {
910 unsigned int val = get_expr_value_id (expr);
911
912 if (bitmap_set_contains_value (set, val))
913 bitmap_set_replace_value (set, val, expr);
914 else
915 bitmap_insert_into_set (set, expr);
916 }
917
918 /* Insert EXPR into SET if EXPR's value is not already present in
919 SET. */
920
921 static void
922 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
923 {
924 unsigned int val = get_expr_value_id (expr);
925
926 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
927
928 /* Constant values are always considered to be part of the set. */
929 if (value_id_constant_p (val))
930 return;
931
932 /* If the value membership changed, add the expression. */
933 if (bitmap_set_bit (&set->values, val))
934 bitmap_set_bit (&set->expressions, expr->id);
935 }
936
937 /* Print out EXPR to outfile. */
938
939 static void
940 print_pre_expr (FILE *outfile, const pre_expr expr)
941 {
942 switch (expr->kind)
943 {
944 case CONSTANT:
945 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
946 break;
947 case NAME:
948 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
949 break;
950 case NARY:
951 {
952 unsigned int i;
953 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
954 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
955 for (i = 0; i < nary->length; i++)
956 {
957 print_generic_expr (outfile, nary->op[i], 0);
958 if (i != (unsigned) nary->length - 1)
959 fprintf (outfile, ",");
960 }
961 fprintf (outfile, "}");
962 }
963 break;
964
965 case REFERENCE:
966 {
967 vn_reference_op_t vro;
968 unsigned int i;
969 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
970 fprintf (outfile, "{");
971 for (i = 0;
972 ref->operands.iterate (i, &vro);
973 i++)
974 {
975 bool closebrace = false;
976 if (vro->opcode != SSA_NAME
977 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
978 {
979 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
980 if (vro->op0)
981 {
982 fprintf (outfile, "<");
983 closebrace = true;
984 }
985 }
986 if (vro->op0)
987 {
988 print_generic_expr (outfile, vro->op0, 0);
989 if (vro->op1)
990 {
991 fprintf (outfile, ",");
992 print_generic_expr (outfile, vro->op1, 0);
993 }
994 if (vro->op2)
995 {
996 fprintf (outfile, ",");
997 print_generic_expr (outfile, vro->op2, 0);
998 }
999 }
1000 if (closebrace)
1001 fprintf (outfile, ">");
1002 if (i != ref->operands.length () - 1)
1003 fprintf (outfile, ",");
1004 }
1005 fprintf (outfile, "}");
1006 if (ref->vuse)
1007 {
1008 fprintf (outfile, "@");
1009 print_generic_expr (outfile, ref->vuse, 0);
1010 }
1011 }
1012 break;
1013 }
1014 }
1015 void debug_pre_expr (pre_expr);
1016
1017 /* Like print_pre_expr but always prints to stderr. */
1018 DEBUG_FUNCTION void
1019 debug_pre_expr (pre_expr e)
1020 {
1021 print_pre_expr (stderr, e);
1022 fprintf (stderr, "\n");
1023 }
1024
1025 /* Print out SET to OUTFILE. */
1026
1027 static void
1028 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1029 const char *setname, int blockindex)
1030 {
1031 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1032 if (set)
1033 {
1034 bool first = true;
1035 unsigned i;
1036 bitmap_iterator bi;
1037
1038 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1039 {
1040 const pre_expr expr = expression_for_id (i);
1041
1042 if (!first)
1043 fprintf (outfile, ", ");
1044 first = false;
1045 print_pre_expr (outfile, expr);
1046
1047 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1048 }
1049 }
1050 fprintf (outfile, " }\n");
1051 }
1052
1053 void debug_bitmap_set (bitmap_set_t);
1054
1055 DEBUG_FUNCTION void
1056 debug_bitmap_set (bitmap_set_t set)
1057 {
1058 print_bitmap_set (stderr, set, "debug", 0);
1059 }
1060
1061 void debug_bitmap_sets_for (basic_block);
1062
1063 DEBUG_FUNCTION void
1064 debug_bitmap_sets_for (basic_block bb)
1065 {
1066 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1067 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1068 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1069 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1070 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1071 if (do_partial_partial)
1072 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1073 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1074 }
1075
1076 /* Print out the expressions that have VAL to OUTFILE. */
1077
1078 static void
1079 print_value_expressions (FILE *outfile, unsigned int val)
1080 {
1081 bitmap set = value_expressions[val];
1082 if (set)
1083 {
1084 bitmap_set x;
1085 char s[10];
1086 sprintf (s, "%04d", val);
1087 x.expressions = *set;
1088 print_bitmap_set (outfile, &x, s, 0);
1089 }
1090 }
1091
1092
1093 DEBUG_FUNCTION void
1094 debug_value_expressions (unsigned int val)
1095 {
1096 print_value_expressions (stderr, val);
1097 }
1098
1099 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1100 represent it. */
1101
1102 static pre_expr
1103 get_or_alloc_expr_for_constant (tree constant)
1104 {
1105 unsigned int result_id;
1106 unsigned int value_id;
1107 struct pre_expr_d expr;
1108 pre_expr newexpr;
1109
1110 expr.kind = CONSTANT;
1111 PRE_EXPR_CONSTANT (&expr) = constant;
1112 result_id = lookup_expression_id (&expr);
1113 if (result_id != 0)
1114 return expression_for_id (result_id);
1115
1116 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1117 newexpr->kind = CONSTANT;
1118 PRE_EXPR_CONSTANT (newexpr) = constant;
1119 alloc_expression_id (newexpr);
1120 value_id = get_or_alloc_constant_value_id (constant);
1121 add_to_value (value_id, newexpr);
1122 return newexpr;
1123 }
1124
1125 /* Given a value id V, find the actual tree representing the constant
1126 value if there is one, and return it. Return NULL if we can't find
1127 a constant. */
1128
1129 static tree
1130 get_constant_for_value_id (unsigned int v)
1131 {
1132 if (value_id_constant_p (v))
1133 {
1134 unsigned int i;
1135 bitmap_iterator bi;
1136 bitmap exprset = value_expressions[v];
1137
1138 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1139 {
1140 pre_expr expr = expression_for_id (i);
1141 if (expr->kind == CONSTANT)
1142 return PRE_EXPR_CONSTANT (expr);
1143 }
1144 }
1145 return NULL;
1146 }
1147
1148 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1149 Currently only supports constants and SSA_NAMES. */
1150 static pre_expr
1151 get_or_alloc_expr_for (tree t)
1152 {
1153 if (TREE_CODE (t) == SSA_NAME)
1154 return get_or_alloc_expr_for_name (t);
1155 else if (is_gimple_min_invariant (t))
1156 return get_or_alloc_expr_for_constant (t);
1157 else
1158 {
1159 /* More complex expressions can result from SCCVN expression
1160 simplification that inserts values for them. As they all
1161 do not have VOPs the get handled by the nary ops struct. */
1162 vn_nary_op_t result;
1163 unsigned int result_id;
1164 vn_nary_op_lookup (t, &result);
1165 if (result != NULL)
1166 {
1167 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1168 e->kind = NARY;
1169 PRE_EXPR_NARY (e) = result;
1170 result_id = lookup_expression_id (e);
1171 if (result_id != 0)
1172 {
1173 pool_free (pre_expr_pool, e);
1174 e = expression_for_id (result_id);
1175 return e;
1176 }
1177 alloc_expression_id (e);
1178 return e;
1179 }
1180 }
1181 return NULL;
1182 }
1183
1184 /* Return the folded version of T if T, when folded, is a gimple
1185 min_invariant. Otherwise, return T. */
1186
1187 static pre_expr
1188 fully_constant_expression (pre_expr e)
1189 {
1190 switch (e->kind)
1191 {
1192 case CONSTANT:
1193 return e;
1194 case NARY:
1195 {
1196 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1197 switch (TREE_CODE_CLASS (nary->opcode))
1198 {
1199 case tcc_binary:
1200 case tcc_comparison:
1201 {
1202 /* We have to go from trees to pre exprs to value ids to
1203 constants. */
1204 tree naryop0 = nary->op[0];
1205 tree naryop1 = nary->op[1];
1206 tree result;
1207 if (!is_gimple_min_invariant (naryop0))
1208 {
1209 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1210 unsigned int vrep0 = get_expr_value_id (rep0);
1211 tree const0 = get_constant_for_value_id (vrep0);
1212 if (const0)
1213 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1214 }
1215 if (!is_gimple_min_invariant (naryop1))
1216 {
1217 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1218 unsigned int vrep1 = get_expr_value_id (rep1);
1219 tree const1 = get_constant_for_value_id (vrep1);
1220 if (const1)
1221 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1222 }
1223 result = fold_binary (nary->opcode, nary->type,
1224 naryop0, naryop1);
1225 if (result && is_gimple_min_invariant (result))
1226 return get_or_alloc_expr_for_constant (result);
1227 /* We might have simplified the expression to a
1228 SSA_NAME for example from x_1 * 1. But we cannot
1229 insert a PHI for x_1 unconditionally as x_1 might
1230 not be available readily. */
1231 return e;
1232 }
1233 case tcc_reference:
1234 if (nary->opcode != REALPART_EXPR
1235 && nary->opcode != IMAGPART_EXPR
1236 && nary->opcode != VIEW_CONVERT_EXPR)
1237 return e;
1238 /* Fallthrough. */
1239 case tcc_unary:
1240 {
1241 /* We have to go from trees to pre exprs to value ids to
1242 constants. */
1243 tree naryop0 = nary->op[0];
1244 tree const0, result;
1245 if (is_gimple_min_invariant (naryop0))
1246 const0 = naryop0;
1247 else
1248 {
1249 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1250 unsigned int vrep0 = get_expr_value_id (rep0);
1251 const0 = get_constant_for_value_id (vrep0);
1252 }
1253 result = NULL;
1254 if (const0)
1255 {
1256 tree type1 = TREE_TYPE (nary->op[0]);
1257 const0 = fold_convert (type1, const0);
1258 result = fold_unary (nary->opcode, nary->type, const0);
1259 }
1260 if (result && is_gimple_min_invariant (result))
1261 return get_or_alloc_expr_for_constant (result);
1262 return e;
1263 }
1264 default:
1265 return e;
1266 }
1267 }
1268 case REFERENCE:
1269 {
1270 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1271 tree folded;
1272 if ((folded = fully_constant_vn_reference_p (ref)))
1273 return get_or_alloc_expr_for_constant (folded);
1274 return e;
1275 }
1276 default:
1277 return e;
1278 }
1279 return e;
1280 }
1281
1282 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1283 it has the value it would have in BLOCK. Set *SAME_VALID to true
1284 in case the new vuse doesn't change the value id of the OPERANDS. */
1285
1286 static tree
1287 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1288 alias_set_type set, tree type, tree vuse,
1289 basic_block phiblock,
1290 basic_block block, bool *same_valid)
1291 {
1292 gimple phi = SSA_NAME_DEF_STMT (vuse);
1293 ao_ref ref;
1294 edge e = NULL;
1295 bool use_oracle;
1296
1297 *same_valid = true;
1298
1299 if (gimple_bb (phi) != phiblock)
1300 return vuse;
1301
1302 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1303
1304 /* Use the alias-oracle to find either the PHI node in this block,
1305 the first VUSE used in this block that is equivalent to vuse or
1306 the first VUSE which definition in this block kills the value. */
1307 if (gimple_code (phi) == GIMPLE_PHI)
1308 e = find_edge (block, phiblock);
1309 else if (use_oracle)
1310 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1311 {
1312 vuse = gimple_vuse (phi);
1313 phi = SSA_NAME_DEF_STMT (vuse);
1314 if (gimple_bb (phi) != phiblock)
1315 return vuse;
1316 if (gimple_code (phi) == GIMPLE_PHI)
1317 {
1318 e = find_edge (block, phiblock);
1319 break;
1320 }
1321 }
1322 else
1323 return NULL_TREE;
1324
1325 if (e)
1326 {
1327 if (use_oracle)
1328 {
1329 bitmap visited = NULL;
1330 unsigned int cnt;
1331 /* Try to find a vuse that dominates this phi node by skipping
1332 non-clobbering statements. */
1333 vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
1334 NULL, NULL);
1335 if (visited)
1336 BITMAP_FREE (visited);
1337 }
1338 else
1339 vuse = NULL_TREE;
1340 if (!vuse)
1341 {
1342 /* If we didn't find any, the value ID can't stay the same,
1343 but return the translated vuse. */
1344 *same_valid = false;
1345 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1346 }
1347 /* ??? We would like to return vuse here as this is the canonical
1348 upmost vdef that this reference is associated with. But during
1349 insertion of the references into the hash tables we only ever
1350 directly insert with their direct gimple_vuse, hence returning
1351 something else would make us not find the other expression. */
1352 return PHI_ARG_DEF (phi, e->dest_idx);
1353 }
1354
1355 return NULL_TREE;
1356 }
1357
1358 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1359 SET2. This is used to avoid making a set consisting of the union
1360 of PA_IN and ANTIC_IN during insert. */
1361
1362 static inline pre_expr
1363 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1364 {
1365 pre_expr result;
1366
1367 result = bitmap_find_leader (set1, val);
1368 if (!result && set2)
1369 result = bitmap_find_leader (set2, val);
1370 return result;
1371 }
1372
1373 /* Get the tree type for our PRE expression e. */
1374
1375 static tree
1376 get_expr_type (const pre_expr e)
1377 {
1378 switch (e->kind)
1379 {
1380 case NAME:
1381 return TREE_TYPE (PRE_EXPR_NAME (e));
1382 case CONSTANT:
1383 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1384 case REFERENCE:
1385 return PRE_EXPR_REFERENCE (e)->type;
1386 case NARY:
1387 return PRE_EXPR_NARY (e)->type;
1388 }
1389 gcc_unreachable ();
1390 }
1391
1392 /* Get a representative SSA_NAME for a given expression.
1393 Since all of our sub-expressions are treated as values, we require
1394 them to be SSA_NAME's for simplicity.
1395 Prior versions of GVNPRE used to use "value handles" here, so that
1396 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1397 either case, the operands are really values (IE we do not expect
1398 them to be usable without finding leaders). */
1399
1400 static tree
1401 get_representative_for (const pre_expr e)
1402 {
1403 tree name;
1404 unsigned int value_id = get_expr_value_id (e);
1405
1406 switch (e->kind)
1407 {
1408 case NAME:
1409 return PRE_EXPR_NAME (e);
1410 case CONSTANT:
1411 return PRE_EXPR_CONSTANT (e);
1412 case NARY:
1413 case REFERENCE:
1414 {
1415 /* Go through all of the expressions representing this value
1416 and pick out an SSA_NAME. */
1417 unsigned int i;
1418 bitmap_iterator bi;
1419 bitmap exprs = value_expressions[value_id];
1420 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1421 {
1422 pre_expr rep = expression_for_id (i);
1423 if (rep->kind == NAME)
1424 return PRE_EXPR_NAME (rep);
1425 else if (rep->kind == CONSTANT)
1426 return PRE_EXPR_CONSTANT (rep);
1427 }
1428 }
1429 break;
1430 }
1431
1432 /* If we reached here we couldn't find an SSA_NAME. This can
1433 happen when we've discovered a value that has never appeared in
1434 the program as set to an SSA_NAME, as the result of phi translation.
1435 Create one here.
1436 ??? We should be able to re-use this when we insert the statement
1437 to compute it. */
1438 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1439 VN_INFO_GET (name)->value_id = value_id;
1440 VN_INFO (name)->valnum = name;
1441 /* ??? For now mark this SSA name for release by SCCVN. */
1442 VN_INFO (name)->needs_insertion = true;
1443 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1444 if (dump_file && (dump_flags & TDF_DETAILS))
1445 {
1446 fprintf (dump_file, "Created SSA_NAME representative ");
1447 print_generic_expr (dump_file, name, 0);
1448 fprintf (dump_file, " for expression:");
1449 print_pre_expr (dump_file, e);
1450 fprintf (dump_file, " (%04d)\n", value_id);
1451 }
1452
1453 return name;
1454 }
1455
1456
1457
1458 static pre_expr
1459 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1460 basic_block pred, basic_block phiblock);
1461
1462 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1463 the phis in PRED. Return NULL if we can't find a leader for each part
1464 of the translated expression. */
1465
1466 static pre_expr
1467 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1468 basic_block pred, basic_block phiblock)
1469 {
1470 switch (expr->kind)
1471 {
1472 case NARY:
1473 {
1474 unsigned int i;
1475 bool changed = false;
1476 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1477 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1478 sizeof_vn_nary_op (nary->length));
1479 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1480
1481 for (i = 0; i < newnary->length; i++)
1482 {
1483 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1484 continue;
1485 else
1486 {
1487 pre_expr leader, result;
1488 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1489 leader = find_leader_in_sets (op_val_id, set1, set2);
1490 result = phi_translate (leader, set1, set2, pred, phiblock);
1491 if (result && result != leader)
1492 {
1493 tree name = get_representative_for (result);
1494 if (!name)
1495 return NULL;
1496 newnary->op[i] = name;
1497 }
1498 else if (!result)
1499 return NULL;
1500
1501 changed |= newnary->op[i] != nary->op[i];
1502 }
1503 }
1504 if (changed)
1505 {
1506 pre_expr constant;
1507 unsigned int new_val_id;
1508
1509 tree result = vn_nary_op_lookup_pieces (newnary->length,
1510 newnary->opcode,
1511 newnary->type,
1512 &newnary->op[0],
1513 &nary);
1514 if (result && is_gimple_min_invariant (result))
1515 return get_or_alloc_expr_for_constant (result);
1516
1517 expr = (pre_expr) pool_alloc (pre_expr_pool);
1518 expr->kind = NARY;
1519 expr->id = 0;
1520 if (nary)
1521 {
1522 PRE_EXPR_NARY (expr) = nary;
1523 constant = fully_constant_expression (expr);
1524 if (constant != expr)
1525 return constant;
1526
1527 new_val_id = nary->value_id;
1528 get_or_alloc_expression_id (expr);
1529 }
1530 else
1531 {
1532 new_val_id = get_next_value_id ();
1533 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
1534 nary = vn_nary_op_insert_pieces (newnary->length,
1535 newnary->opcode,
1536 newnary->type,
1537 &newnary->op[0],
1538 result, new_val_id);
1539 PRE_EXPR_NARY (expr) = nary;
1540 constant = fully_constant_expression (expr);
1541 if (constant != expr)
1542 return constant;
1543 get_or_alloc_expression_id (expr);
1544 }
1545 add_to_value (new_val_id, expr);
1546 }
1547 return expr;
1548 }
1549 break;
1550
1551 case REFERENCE:
1552 {
1553 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1554 vec<vn_reference_op_s> operands = ref->operands;
1555 tree vuse = ref->vuse;
1556 tree newvuse = vuse;
1557 vec<vn_reference_op_s> newoperands = vNULL;
1558 bool changed = false, same_valid = true;
1559 unsigned int i, n;
1560 vn_reference_op_t operand;
1561 vn_reference_t newref;
1562
1563 for (i = 0; operands.iterate (i, &operand); i++)
1564 {
1565 pre_expr opresult;
1566 pre_expr leader;
1567 tree op[3];
1568 tree type = operand->type;
1569 vn_reference_op_s newop = *operand;
1570 op[0] = operand->op0;
1571 op[1] = operand->op1;
1572 op[2] = operand->op2;
1573 for (n = 0; n < 3; ++n)
1574 {
1575 unsigned int op_val_id;
1576 if (!op[n])
1577 continue;
1578 if (TREE_CODE (op[n]) != SSA_NAME)
1579 {
1580 /* We can't possibly insert these. */
1581 if (n != 0
1582 && !is_gimple_min_invariant (op[n]))
1583 break;
1584 continue;
1585 }
1586 op_val_id = VN_INFO (op[n])->value_id;
1587 leader = find_leader_in_sets (op_val_id, set1, set2);
1588 if (!leader)
1589 break;
1590 opresult = phi_translate (leader, set1, set2, pred, phiblock);
1591 if (!opresult)
1592 break;
1593 if (opresult != leader)
1594 {
1595 tree name = get_representative_for (opresult);
1596 if (!name)
1597 break;
1598 changed |= name != op[n];
1599 op[n] = name;
1600 }
1601 }
1602 if (n != 3)
1603 {
1604 newoperands.release ();
1605 return NULL;
1606 }
1607 if (!changed)
1608 continue;
1609 if (!newoperands.exists ())
1610 newoperands = operands.copy ();
1611 /* We may have changed from an SSA_NAME to a constant */
1612 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1613 newop.opcode = TREE_CODE (op[0]);
1614 newop.type = type;
1615 newop.op0 = op[0];
1616 newop.op1 = op[1];
1617 newop.op2 = op[2];
1618 newoperands[i] = newop;
1619 }
1620 gcc_checking_assert (i == operands.length ());
1621
1622 if (vuse)
1623 {
1624 newvuse = translate_vuse_through_block (newoperands.exists ()
1625 ? newoperands : operands,
1626 ref->set, ref->type,
1627 vuse, phiblock, pred,
1628 &same_valid);
1629 if (newvuse == NULL_TREE)
1630 {
1631 newoperands.release ();
1632 return NULL;
1633 }
1634 }
1635
1636 if (changed || newvuse != vuse)
1637 {
1638 unsigned int new_val_id;
1639 pre_expr constant;
1640
1641 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1642 ref->type,
1643 newoperands.exists ()
1644 ? newoperands : operands,
1645 &newref, VN_WALK);
1646 if (result)
1647 newoperands.release ();
1648
1649 /* We can always insert constants, so if we have a partial
1650 redundant constant load of another type try to translate it
1651 to a constant of appropriate type. */
1652 if (result && is_gimple_min_invariant (result))
1653 {
1654 tree tem = result;
1655 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1656 {
1657 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1658 if (tem && !is_gimple_min_invariant (tem))
1659 tem = NULL_TREE;
1660 }
1661 if (tem)
1662 return get_or_alloc_expr_for_constant (tem);
1663 }
1664
1665 /* If we'd have to convert things we would need to validate
1666 if we can insert the translated expression. So fail
1667 here for now - we cannot insert an alias with a different
1668 type in the VN tables either, as that would assert. */
1669 if (result
1670 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1671 return NULL;
1672 else if (!result && newref
1673 && !useless_type_conversion_p (ref->type, newref->type))
1674 {
1675 newoperands.release ();
1676 return NULL;
1677 }
1678
1679 expr = (pre_expr) pool_alloc (pre_expr_pool);
1680 expr->kind = REFERENCE;
1681 expr->id = 0;
1682
1683 if (newref)
1684 {
1685 PRE_EXPR_REFERENCE (expr) = newref;
1686 constant = fully_constant_expression (expr);
1687 if (constant != expr)
1688 return constant;
1689
1690 new_val_id = newref->value_id;
1691 get_or_alloc_expression_id (expr);
1692 }
1693 else
1694 {
1695 if (changed || !same_valid)
1696 {
1697 new_val_id = get_next_value_id ();
1698 value_expressions.safe_grow_cleared
1699 (get_max_value_id () + 1);
1700 }
1701 else
1702 new_val_id = ref->value_id;
1703 if (!newoperands.exists ())
1704 newoperands = operands.copy ();
1705 newref = vn_reference_insert_pieces (newvuse, ref->set,
1706 ref->type,
1707 newoperands,
1708 result, new_val_id);
1709 newoperands = vNULL;
1710 PRE_EXPR_REFERENCE (expr) = newref;
1711 constant = fully_constant_expression (expr);
1712 if (constant != expr)
1713 return constant;
1714 get_or_alloc_expression_id (expr);
1715 }
1716 add_to_value (new_val_id, expr);
1717 }
1718 newoperands.release ();
1719 return expr;
1720 }
1721 break;
1722
1723 case NAME:
1724 {
1725 tree name = PRE_EXPR_NAME (expr);
1726 gimple def_stmt = SSA_NAME_DEF_STMT (name);
1727 /* If the SSA name is defined by a PHI node in this block,
1728 translate it. */
1729 if (gimple_code (def_stmt) == GIMPLE_PHI
1730 && gimple_bb (def_stmt) == phiblock)
1731 {
1732 edge e = find_edge (pred, gimple_bb (def_stmt));
1733 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1734
1735 /* Handle constant. */
1736 if (is_gimple_min_invariant (def))
1737 return get_or_alloc_expr_for_constant (def);
1738
1739 return get_or_alloc_expr_for_name (def);
1740 }
1741 /* Otherwise return it unchanged - it will get removed if its
1742 value is not available in PREDs AVAIL_OUT set of expressions
1743 by the subtraction of TMP_GEN. */
1744 return expr;
1745 }
1746
1747 default:
1748 gcc_unreachable ();
1749 }
1750 }
1751
1752 /* Wrapper around phi_translate_1 providing caching functionality. */
1753
1754 static pre_expr
1755 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1756 basic_block pred, basic_block phiblock)
1757 {
1758 expr_pred_trans_t slot = NULL;
1759 pre_expr phitrans;
1760
1761 if (!expr)
1762 return NULL;
1763
1764 /* Constants contain no values that need translation. */
1765 if (expr->kind == CONSTANT)
1766 return expr;
1767
1768 if (value_id_constant_p (get_expr_value_id (expr)))
1769 return expr;
1770
1771 /* Don't add translations of NAMEs as those are cheap to translate. */
1772 if (expr->kind != NAME)
1773 {
1774 if (phi_trans_add (&slot, expr, pred))
1775 return slot->v;
1776 /* Store NULL for the value we want to return in the case of
1777 recursing. */
1778 slot->v = NULL;
1779 }
1780
1781 /* Translate. */
1782 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1783
1784 if (slot)
1785 {
1786 if (phitrans)
1787 slot->v = phitrans;
1788 else
1789 /* Remove failed translations again, they cause insert
1790 iteration to not pick up new opportunities reliably. */
1791 phi_translate_table->remove_elt_with_hash (slot, slot->hashcode);
1792 }
1793
1794 return phitrans;
1795 }
1796
1797
1798 /* For each expression in SET, translate the values through phi nodes
1799 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1800 expressions in DEST. */
1801
1802 static void
1803 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1804 basic_block phiblock)
1805 {
1806 vec<pre_expr> exprs;
1807 pre_expr expr;
1808 int i;
1809
1810 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1811 {
1812 bitmap_set_copy (dest, set);
1813 return;
1814 }
1815
1816 exprs = sorted_array_from_bitmap_set (set);
1817 FOR_EACH_VEC_ELT (exprs, i, expr)
1818 {
1819 pre_expr translated;
1820 translated = phi_translate (expr, set, NULL, pred, phiblock);
1821 if (!translated)
1822 continue;
1823
1824 /* We might end up with multiple expressions from SET being
1825 translated to the same value. In this case we do not want
1826 to retain the NARY or REFERENCE expression but prefer a NAME
1827 which would be the leader. */
1828 if (translated->kind == NAME)
1829 bitmap_value_replace_in_set (dest, translated);
1830 else
1831 bitmap_value_insert_into_set (dest, translated);
1832 }
1833 exprs.release ();
1834 }
1835
1836 /* Find the leader for a value (i.e., the name representing that
1837 value) in a given set, and return it. Return NULL if no leader
1838 is found. */
1839
1840 static pre_expr
1841 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1842 {
1843 if (value_id_constant_p (val))
1844 {
1845 unsigned int i;
1846 bitmap_iterator bi;
1847 bitmap exprset = value_expressions[val];
1848
1849 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1850 {
1851 pre_expr expr = expression_for_id (i);
1852 if (expr->kind == CONSTANT)
1853 return expr;
1854 }
1855 }
1856 if (bitmap_set_contains_value (set, val))
1857 {
1858 /* Rather than walk the entire bitmap of expressions, and see
1859 whether any of them has the value we are looking for, we look
1860 at the reverse mapping, which tells us the set of expressions
1861 that have a given value (IE value->expressions with that
1862 value) and see if any of those expressions are in our set.
1863 The number of expressions per value is usually significantly
1864 less than the number of expressions in the set. In fact, for
1865 large testcases, doing it this way is roughly 5-10x faster
1866 than walking the bitmap.
1867 If this is somehow a significant lose for some cases, we can
1868 choose which set to walk based on which set is smaller. */
1869 unsigned int i;
1870 bitmap_iterator bi;
1871 bitmap exprset = value_expressions[val];
1872
1873 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1874 return expression_for_id (i);
1875 }
1876 return NULL;
1877 }
1878
1879 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1880 BLOCK by seeing if it is not killed in the block. Note that we are
1881 only determining whether there is a store that kills it. Because
1882 of the order in which clean iterates over values, we are guaranteed
1883 that altered operands will have caused us to be eliminated from the
1884 ANTIC_IN set already. */
1885
1886 static bool
1887 value_dies_in_block_x (pre_expr expr, basic_block block)
1888 {
1889 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1890 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1891 gimple def;
1892 gimple_stmt_iterator gsi;
1893 unsigned id = get_expression_id (expr);
1894 bool res = false;
1895 ao_ref ref;
1896
1897 if (!vuse)
1898 return false;
1899
1900 /* Lookup a previously calculated result. */
1901 if (EXPR_DIES (block)
1902 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1903 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1904
1905 /* A memory expression {e, VUSE} dies in the block if there is a
1906 statement that may clobber e. If, starting statement walk from the
1907 top of the basic block, a statement uses VUSE there can be no kill
1908 inbetween that use and the original statement that loaded {e, VUSE},
1909 so we can stop walking. */
1910 ref.base = NULL_TREE;
1911 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1912 {
1913 tree def_vuse, def_vdef;
1914 def = gsi_stmt (gsi);
1915 def_vuse = gimple_vuse (def);
1916 def_vdef = gimple_vdef (def);
1917
1918 /* Not a memory statement. */
1919 if (!def_vuse)
1920 continue;
1921
1922 /* Not a may-def. */
1923 if (!def_vdef)
1924 {
1925 /* A load with the same VUSE, we're done. */
1926 if (def_vuse == vuse)
1927 break;
1928
1929 continue;
1930 }
1931
1932 /* Init ref only if we really need it. */
1933 if (ref.base == NULL_TREE
1934 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1935 refx->operands))
1936 {
1937 res = true;
1938 break;
1939 }
1940 /* If the statement may clobber expr, it dies. */
1941 if (stmt_may_clobber_ref_p_1 (def, &ref))
1942 {
1943 res = true;
1944 break;
1945 }
1946 }
1947
1948 /* Remember the result. */
1949 if (!EXPR_DIES (block))
1950 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1951 bitmap_set_bit (EXPR_DIES (block), id * 2);
1952 if (res)
1953 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1954
1955 return res;
1956 }
1957
1958
1959 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1960 contains its value-id. */
1961
1962 static bool
1963 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1964 {
1965 if (op && TREE_CODE (op) == SSA_NAME)
1966 {
1967 unsigned int value_id = VN_INFO (op)->value_id;
1968 if (!(bitmap_set_contains_value (set1, value_id)
1969 || (set2 && bitmap_set_contains_value (set2, value_id))))
1970 return false;
1971 }
1972 return true;
1973 }
1974
1975 /* Determine if the expression EXPR is valid in SET1 U SET2.
1976 ONLY SET2 CAN BE NULL.
1977 This means that we have a leader for each part of the expression
1978 (if it consists of values), or the expression is an SSA_NAME.
1979 For loads/calls, we also see if the vuse is killed in this block. */
1980
1981 static bool
1982 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
1983 {
1984 switch (expr->kind)
1985 {
1986 case NAME:
1987 /* By construction all NAMEs are available. Non-available
1988 NAMEs are removed by subtracting TMP_GEN from the sets. */
1989 return true;
1990 case NARY:
1991 {
1992 unsigned int i;
1993 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1994 for (i = 0; i < nary->length; i++)
1995 if (!op_valid_in_sets (set1, set2, nary->op[i]))
1996 return false;
1997 return true;
1998 }
1999 break;
2000 case REFERENCE:
2001 {
2002 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2003 vn_reference_op_t vro;
2004 unsigned int i;
2005
2006 FOR_EACH_VEC_ELT (ref->operands, i, vro)
2007 {
2008 if (!op_valid_in_sets (set1, set2, vro->op0)
2009 || !op_valid_in_sets (set1, set2, vro->op1)
2010 || !op_valid_in_sets (set1, set2, vro->op2))
2011 return false;
2012 }
2013 return true;
2014 }
2015 default:
2016 gcc_unreachable ();
2017 }
2018 }
2019
2020 /* Clean the set of expressions that are no longer valid in SET1 or
2021 SET2. This means expressions that are made up of values we have no
2022 leaders for in SET1 or SET2. This version is used for partial
2023 anticipation, which means it is not valid in either ANTIC_IN or
2024 PA_IN. */
2025
2026 static void
2027 dependent_clean (bitmap_set_t set1, bitmap_set_t set2)
2028 {
2029 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
2030 pre_expr expr;
2031 int i;
2032
2033 FOR_EACH_VEC_ELT (exprs, i, expr)
2034 {
2035 if (!valid_in_sets (set1, set2, expr))
2036 bitmap_remove_from_set (set1, expr);
2037 }
2038 exprs.release ();
2039 }
2040
2041 /* Clean the set of expressions that are no longer valid in SET. This
2042 means expressions that are made up of values we have no leaders for
2043 in SET. */
2044
2045 static void
2046 clean (bitmap_set_t set)
2047 {
2048 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set);
2049 pre_expr expr;
2050 int i;
2051
2052 FOR_EACH_VEC_ELT (exprs, i, expr)
2053 {
2054 if (!valid_in_sets (set, NULL, expr))
2055 bitmap_remove_from_set (set, expr);
2056 }
2057 exprs.release ();
2058 }
2059
2060 /* Clean the set of expressions that are no longer valid in SET because
2061 they are clobbered in BLOCK or because they trap and may not be executed. */
2062
2063 static void
2064 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2065 {
2066 bitmap_iterator bi;
2067 unsigned i;
2068
2069 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2070 {
2071 pre_expr expr = expression_for_id (i);
2072 if (expr->kind == REFERENCE)
2073 {
2074 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2075 if (ref->vuse)
2076 {
2077 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2078 if (!gimple_nop_p (def_stmt)
2079 && ((gimple_bb (def_stmt) != block
2080 && !dominated_by_p (CDI_DOMINATORS,
2081 block, gimple_bb (def_stmt)))
2082 || (gimple_bb (def_stmt) == block
2083 && value_dies_in_block_x (expr, block))))
2084 bitmap_remove_from_set (set, expr);
2085 }
2086 }
2087 else if (expr->kind == NARY)
2088 {
2089 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2090 /* If the NARY may trap make sure the block does not contain
2091 a possible exit point.
2092 ??? This is overly conservative if we translate AVAIL_OUT
2093 as the available expression might be after the exit point. */
2094 if (BB_MAY_NOTRETURN (block)
2095 && vn_nary_may_trap (nary))
2096 bitmap_remove_from_set (set, expr);
2097 }
2098 }
2099 }
2100
2101 static sbitmap has_abnormal_preds;
2102
2103 /* List of blocks that may have changed during ANTIC computation and
2104 thus need to be iterated over. */
2105
2106 static sbitmap changed_blocks;
2107
2108 /* Compute the ANTIC set for BLOCK.
2109
2110 If succs(BLOCK) > 1 then
2111 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2112 else if succs(BLOCK) == 1 then
2113 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2114
2115 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2116 */
2117
2118 static bool
2119 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2120 {
2121 bool changed = false;
2122 bitmap_set_t S, old, ANTIC_OUT;
2123 bitmap_iterator bi;
2124 unsigned int bii;
2125 edge e;
2126 edge_iterator ei;
2127
2128 old = ANTIC_OUT = S = NULL;
2129 BB_VISITED (block) = 1;
2130
2131 /* If any edges from predecessors are abnormal, antic_in is empty,
2132 so do nothing. */
2133 if (block_has_abnormal_pred_edge)
2134 goto maybe_dump_sets;
2135
2136 old = ANTIC_IN (block);
2137 ANTIC_OUT = bitmap_set_new ();
2138
2139 /* If the block has no successors, ANTIC_OUT is empty. */
2140 if (EDGE_COUNT (block->succs) == 0)
2141 ;
2142 /* If we have one successor, we could have some phi nodes to
2143 translate through. */
2144 else if (single_succ_p (block))
2145 {
2146 basic_block succ_bb = single_succ (block);
2147 gcc_assert (BB_VISITED (succ_bb));
2148 phi_translate_set (ANTIC_OUT, ANTIC_IN (succ_bb), block, succ_bb);
2149 }
2150 /* If we have multiple successors, we take the intersection of all of
2151 them. Note that in the case of loop exit phi nodes, we may have
2152 phis to translate through. */
2153 else
2154 {
2155 size_t i;
2156 basic_block bprime, first = NULL;
2157
2158 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2159 FOR_EACH_EDGE (e, ei, block->succs)
2160 {
2161 if (!first
2162 && BB_VISITED (e->dest))
2163 first = e->dest;
2164 else if (BB_VISITED (e->dest))
2165 worklist.quick_push (e->dest);
2166 }
2167
2168 /* Of multiple successors we have to have visited one already
2169 which is guaranteed by iteration order. */
2170 gcc_assert (first != NULL);
2171
2172 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2173
2174 FOR_EACH_VEC_ELT (worklist, i, bprime)
2175 {
2176 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2177 {
2178 bitmap_set_t tmp = bitmap_set_new ();
2179 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2180 bitmap_set_and (ANTIC_OUT, tmp);
2181 bitmap_set_free (tmp);
2182 }
2183 else
2184 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2185 }
2186 }
2187
2188 /* Prune expressions that are clobbered in block and thus become
2189 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2190 prune_clobbered_mems (ANTIC_OUT, block);
2191
2192 /* Generate ANTIC_OUT - TMP_GEN. */
2193 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2194
2195 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2196 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2197 TMP_GEN (block));
2198
2199 /* Then union in the ANTIC_OUT - TMP_GEN values,
2200 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2201 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2202 bitmap_value_insert_into_set (ANTIC_IN (block),
2203 expression_for_id (bii));
2204
2205 clean (ANTIC_IN (block));
2206
2207 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2208 {
2209 changed = true;
2210 bitmap_set_bit (changed_blocks, block->index);
2211 FOR_EACH_EDGE (e, ei, block->preds)
2212 bitmap_set_bit (changed_blocks, e->src->index);
2213 }
2214 else
2215 bitmap_clear_bit (changed_blocks, block->index);
2216
2217 maybe_dump_sets:
2218 if (dump_file && (dump_flags & TDF_DETAILS))
2219 {
2220 if (ANTIC_OUT)
2221 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2222
2223 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2224 block->index);
2225
2226 if (S)
2227 print_bitmap_set (dump_file, S, "S", block->index);
2228 }
2229 if (old)
2230 bitmap_set_free (old);
2231 if (S)
2232 bitmap_set_free (S);
2233 if (ANTIC_OUT)
2234 bitmap_set_free (ANTIC_OUT);
2235 return changed;
2236 }
2237
2238 /* Compute PARTIAL_ANTIC for BLOCK.
2239
2240 If succs(BLOCK) > 1 then
2241 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2242 in ANTIC_OUT for all succ(BLOCK)
2243 else if succs(BLOCK) == 1 then
2244 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2245
2246 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2247 - ANTIC_IN[BLOCK])
2248
2249 */
2250 static bool
2251 compute_partial_antic_aux (basic_block block,
2252 bool block_has_abnormal_pred_edge)
2253 {
2254 bool changed = false;
2255 bitmap_set_t old_PA_IN;
2256 bitmap_set_t PA_OUT;
2257 edge e;
2258 edge_iterator ei;
2259 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2260
2261 old_PA_IN = PA_OUT = NULL;
2262
2263 /* If any edges from predecessors are abnormal, antic_in is empty,
2264 so do nothing. */
2265 if (block_has_abnormal_pred_edge)
2266 goto maybe_dump_sets;
2267
2268 /* If there are too many partially anticipatable values in the
2269 block, phi_translate_set can take an exponential time: stop
2270 before the translation starts. */
2271 if (max_pa
2272 && single_succ_p (block)
2273 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2274 goto maybe_dump_sets;
2275
2276 old_PA_IN = PA_IN (block);
2277 PA_OUT = bitmap_set_new ();
2278
2279 /* If the block has no successors, ANTIC_OUT is empty. */
2280 if (EDGE_COUNT (block->succs) == 0)
2281 ;
2282 /* If we have one successor, we could have some phi nodes to
2283 translate through. Note that we can't phi translate across DFS
2284 back edges in partial antic, because it uses a union operation on
2285 the successors. For recurrences like IV's, we will end up
2286 generating a new value in the set on each go around (i + 3 (VH.1)
2287 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2288 else if (single_succ_p (block))
2289 {
2290 basic_block succ = single_succ (block);
2291 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2292 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2293 }
2294 /* If we have multiple successors, we take the union of all of
2295 them. */
2296 else
2297 {
2298 size_t i;
2299 basic_block bprime;
2300
2301 auto_vec<basic_block> worklist (EDGE_COUNT (block->succs));
2302 FOR_EACH_EDGE (e, ei, block->succs)
2303 {
2304 if (e->flags & EDGE_DFS_BACK)
2305 continue;
2306 worklist.quick_push (e->dest);
2307 }
2308 if (worklist.length () > 0)
2309 {
2310 FOR_EACH_VEC_ELT (worklist, i, bprime)
2311 {
2312 unsigned int i;
2313 bitmap_iterator bi;
2314
2315 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2316 bitmap_value_insert_into_set (PA_OUT,
2317 expression_for_id (i));
2318 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2319 {
2320 bitmap_set_t pa_in = bitmap_set_new ();
2321 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2322 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2323 bitmap_value_insert_into_set (PA_OUT,
2324 expression_for_id (i));
2325 bitmap_set_free (pa_in);
2326 }
2327 else
2328 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2329 bitmap_value_insert_into_set (PA_OUT,
2330 expression_for_id (i));
2331 }
2332 }
2333 }
2334
2335 /* Prune expressions that are clobbered in block and thus become
2336 invalid if translated from PA_OUT to PA_IN. */
2337 prune_clobbered_mems (PA_OUT, block);
2338
2339 /* PA_IN starts with PA_OUT - TMP_GEN.
2340 Then we subtract things from ANTIC_IN. */
2341 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2342
2343 /* For partial antic, we want to put back in the phi results, since
2344 we will properly avoid making them partially antic over backedges. */
2345 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2346 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2347
2348 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2349 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2350
2351 dependent_clean (PA_IN (block), ANTIC_IN (block));
2352
2353 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2354 {
2355 changed = true;
2356 bitmap_set_bit (changed_blocks, block->index);
2357 FOR_EACH_EDGE (e, ei, block->preds)
2358 bitmap_set_bit (changed_blocks, e->src->index);
2359 }
2360 else
2361 bitmap_clear_bit (changed_blocks, block->index);
2362
2363 maybe_dump_sets:
2364 if (dump_file && (dump_flags & TDF_DETAILS))
2365 {
2366 if (PA_OUT)
2367 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2368
2369 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2370 }
2371 if (old_PA_IN)
2372 bitmap_set_free (old_PA_IN);
2373 if (PA_OUT)
2374 bitmap_set_free (PA_OUT);
2375 return changed;
2376 }
2377
2378 /* Compute ANTIC and partial ANTIC sets. */
2379
2380 static void
2381 compute_antic (void)
2382 {
2383 bool changed = true;
2384 int num_iterations = 0;
2385 basic_block block;
2386 int i;
2387
2388 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2389 We pre-build the map of blocks with incoming abnormal edges here. */
2390 has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
2391 bitmap_clear (has_abnormal_preds);
2392
2393 FOR_ALL_BB_FN (block, cfun)
2394 {
2395 edge_iterator ei;
2396 edge e;
2397
2398 FOR_EACH_EDGE (e, ei, block->preds)
2399 {
2400 e->flags &= ~EDGE_DFS_BACK;
2401 if (e->flags & EDGE_ABNORMAL)
2402 {
2403 bitmap_set_bit (has_abnormal_preds, block->index);
2404 break;
2405 }
2406 }
2407
2408 BB_VISITED (block) = 0;
2409
2410 /* While we are here, give empty ANTIC_IN sets to each block. */
2411 ANTIC_IN (block) = bitmap_set_new ();
2412 PA_IN (block) = bitmap_set_new ();
2413 }
2414
2415 /* At the exit block we anticipate nothing. */
2416 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2417
2418 changed_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
2419 bitmap_ones (changed_blocks);
2420 while (changed)
2421 {
2422 if (dump_file && (dump_flags & TDF_DETAILS))
2423 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2424 /* ??? We need to clear our PHI translation cache here as the
2425 ANTIC sets shrink and we restrict valid translations to
2426 those having operands with leaders in ANTIC. Same below
2427 for PA ANTIC computation. */
2428 num_iterations++;
2429 changed = false;
2430 for (i = postorder_num - 1; i >= 0; i--)
2431 {
2432 if (bitmap_bit_p (changed_blocks, postorder[i]))
2433 {
2434 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2435 changed |= compute_antic_aux (block,
2436 bitmap_bit_p (has_abnormal_preds,
2437 block->index));
2438 }
2439 }
2440 /* Theoretically possible, but *highly* unlikely. */
2441 gcc_checking_assert (num_iterations < 500);
2442 }
2443
2444 statistics_histogram_event (cfun, "compute_antic iterations",
2445 num_iterations);
2446
2447 if (do_partial_partial)
2448 {
2449 bitmap_ones (changed_blocks);
2450 mark_dfs_back_edges ();
2451 num_iterations = 0;
2452 changed = true;
2453 while (changed)
2454 {
2455 if (dump_file && (dump_flags & TDF_DETAILS))
2456 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2457 num_iterations++;
2458 changed = false;
2459 for (i = postorder_num - 1 ; i >= 0; i--)
2460 {
2461 if (bitmap_bit_p (changed_blocks, postorder[i]))
2462 {
2463 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2464 changed
2465 |= compute_partial_antic_aux (block,
2466 bitmap_bit_p (has_abnormal_preds,
2467 block->index));
2468 }
2469 }
2470 /* Theoretically possible, but *highly* unlikely. */
2471 gcc_checking_assert (num_iterations < 500);
2472 }
2473 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2474 num_iterations);
2475 }
2476 sbitmap_free (has_abnormal_preds);
2477 sbitmap_free (changed_blocks);
2478 }
2479
2480
2481 /* Inserted expressions are placed onto this worklist, which is used
2482 for performing quick dead code elimination of insertions we made
2483 that didn't turn out to be necessary. */
2484 static bitmap inserted_exprs;
2485
2486 /* The actual worker for create_component_ref_by_pieces. */
2487
2488 static tree
2489 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2490 unsigned int *operand, gimple_seq *stmts)
2491 {
2492 vn_reference_op_t currop = &ref->operands[*operand];
2493 tree genop;
2494 ++*operand;
2495 switch (currop->opcode)
2496 {
2497 case CALL_EXPR:
2498 {
2499 tree folded, sc = NULL_TREE;
2500 unsigned int nargs = 0;
2501 tree fn, *args;
2502 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2503 fn = currop->op0;
2504 else
2505 fn = find_or_generate_expression (block, currop->op0, stmts);
2506 if (!fn)
2507 return NULL_TREE;
2508 if (currop->op1)
2509 {
2510 sc = find_or_generate_expression (block, currop->op1, stmts);
2511 if (!sc)
2512 return NULL_TREE;
2513 }
2514 args = XNEWVEC (tree, ref->operands.length () - 1);
2515 while (*operand < ref->operands.length ())
2516 {
2517 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2518 operand, stmts);
2519 if (!args[nargs])
2520 return NULL_TREE;
2521 nargs++;
2522 }
2523 folded = build_call_array (currop->type,
2524 (TREE_CODE (fn) == FUNCTION_DECL
2525 ? build_fold_addr_expr (fn) : fn),
2526 nargs, args);
2527 if (currop->with_bounds)
2528 CALL_WITH_BOUNDS_P (folded) = true;
2529 free (args);
2530 if (sc)
2531 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2532 return folded;
2533 }
2534
2535 case MEM_REF:
2536 {
2537 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2538 stmts);
2539 if (!baseop)
2540 return NULL_TREE;
2541 tree offset = currop->op0;
2542 if (TREE_CODE (baseop) == ADDR_EXPR
2543 && handled_component_p (TREE_OPERAND (baseop, 0)))
2544 {
2545 HOST_WIDE_INT off;
2546 tree base;
2547 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2548 &off);
2549 gcc_assert (base);
2550 offset = int_const_binop (PLUS_EXPR, offset,
2551 build_int_cst (TREE_TYPE (offset),
2552 off));
2553 baseop = build_fold_addr_expr (base);
2554 }
2555 return fold_build2 (MEM_REF, currop->type, baseop, offset);
2556 }
2557
2558 case TARGET_MEM_REF:
2559 {
2560 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2561 vn_reference_op_t nextop = &ref->operands[++*operand];
2562 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2563 stmts);
2564 if (!baseop)
2565 return NULL_TREE;
2566 if (currop->op0)
2567 {
2568 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2569 if (!genop0)
2570 return NULL_TREE;
2571 }
2572 if (nextop->op0)
2573 {
2574 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2575 if (!genop1)
2576 return NULL_TREE;
2577 }
2578 return build5 (TARGET_MEM_REF, currop->type,
2579 baseop, currop->op2, genop0, currop->op1, genop1);
2580 }
2581
2582 case ADDR_EXPR:
2583 if (currop->op0)
2584 {
2585 gcc_assert (is_gimple_min_invariant (currop->op0));
2586 return currop->op0;
2587 }
2588 /* Fallthrough. */
2589 case REALPART_EXPR:
2590 case IMAGPART_EXPR:
2591 case VIEW_CONVERT_EXPR:
2592 {
2593 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2594 stmts);
2595 if (!genop0)
2596 return NULL_TREE;
2597 return fold_build1 (currop->opcode, currop->type, genop0);
2598 }
2599
2600 case WITH_SIZE_EXPR:
2601 {
2602 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2603 stmts);
2604 if (!genop0)
2605 return NULL_TREE;
2606 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2607 if (!genop1)
2608 return NULL_TREE;
2609 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2610 }
2611
2612 case BIT_FIELD_REF:
2613 {
2614 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2615 stmts);
2616 if (!genop0)
2617 return NULL_TREE;
2618 tree op1 = currop->op0;
2619 tree op2 = currop->op1;
2620 return fold_build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2621 }
2622
2623 /* For array ref vn_reference_op's, operand 1 of the array ref
2624 is op0 of the reference op and operand 3 of the array ref is
2625 op1. */
2626 case ARRAY_RANGE_REF:
2627 case ARRAY_REF:
2628 {
2629 tree genop0;
2630 tree genop1 = currop->op0;
2631 tree genop2 = currop->op1;
2632 tree genop3 = currop->op2;
2633 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2634 stmts);
2635 if (!genop0)
2636 return NULL_TREE;
2637 genop1 = find_or_generate_expression (block, genop1, stmts);
2638 if (!genop1)
2639 return NULL_TREE;
2640 if (genop2)
2641 {
2642 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2643 /* Drop zero minimum index if redundant. */
2644 if (integer_zerop (genop2)
2645 && (!domain_type
2646 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2647 genop2 = NULL_TREE;
2648 else
2649 {
2650 genop2 = find_or_generate_expression (block, genop2, stmts);
2651 if (!genop2)
2652 return NULL_TREE;
2653 }
2654 }
2655 if (genop3)
2656 {
2657 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2658 /* We can't always put a size in units of the element alignment
2659 here as the element alignment may be not visible. See
2660 PR43783. Simply drop the element size for constant
2661 sizes. */
2662 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2663 genop3 = NULL_TREE;
2664 else
2665 {
2666 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2667 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2668 genop3 = find_or_generate_expression (block, genop3, stmts);
2669 if (!genop3)
2670 return NULL_TREE;
2671 }
2672 }
2673 return build4 (currop->opcode, currop->type, genop0, genop1,
2674 genop2, genop3);
2675 }
2676 case COMPONENT_REF:
2677 {
2678 tree op0;
2679 tree op1;
2680 tree genop2 = currop->op1;
2681 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2682 if (!op0)
2683 return NULL_TREE;
2684 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2685 op1 = currop->op0;
2686 if (genop2)
2687 {
2688 genop2 = find_or_generate_expression (block, genop2, stmts);
2689 if (!genop2)
2690 return NULL_TREE;
2691 }
2692 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2693 }
2694
2695 case SSA_NAME:
2696 {
2697 genop = find_or_generate_expression (block, currop->op0, stmts);
2698 return genop;
2699 }
2700 case STRING_CST:
2701 case INTEGER_CST:
2702 case COMPLEX_CST:
2703 case VECTOR_CST:
2704 case REAL_CST:
2705 case CONSTRUCTOR:
2706 case VAR_DECL:
2707 case PARM_DECL:
2708 case CONST_DECL:
2709 case RESULT_DECL:
2710 case FUNCTION_DECL:
2711 return currop->op0;
2712
2713 default:
2714 gcc_unreachable ();
2715 }
2716 }
2717
2718 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2719 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2720 trying to rename aggregates into ssa form directly, which is a no no.
2721
2722 Thus, this routine doesn't create temporaries, it just builds a
2723 single access expression for the array, calling
2724 find_or_generate_expression to build the innermost pieces.
2725
2726 This function is a subroutine of create_expression_by_pieces, and
2727 should not be called on it's own unless you really know what you
2728 are doing. */
2729
2730 static tree
2731 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2732 gimple_seq *stmts)
2733 {
2734 unsigned int op = 0;
2735 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2736 }
2737
2738 /* Find a simple leader for an expression, or generate one using
2739 create_expression_by_pieces from a NARY expression for the value.
2740 BLOCK is the basic_block we are looking for leaders in.
2741 OP is the tree expression to find a leader for or generate.
2742 Returns the leader or NULL_TREE on failure. */
2743
2744 static tree
2745 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2746 {
2747 pre_expr expr = get_or_alloc_expr_for (op);
2748 unsigned int lookfor = get_expr_value_id (expr);
2749 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2750 if (leader)
2751 {
2752 if (leader->kind == NAME)
2753 return PRE_EXPR_NAME (leader);
2754 else if (leader->kind == CONSTANT)
2755 return PRE_EXPR_CONSTANT (leader);
2756
2757 /* Defer. */
2758 return NULL_TREE;
2759 }
2760
2761 /* It must be a complex expression, so generate it recursively. Note
2762 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2763 where the insert algorithm fails to insert a required expression. */
2764 bitmap exprset = value_expressions[lookfor];
2765 bitmap_iterator bi;
2766 unsigned int i;
2767 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2768 {
2769 pre_expr temp = expression_for_id (i);
2770 /* We cannot insert random REFERENCE expressions at arbitrary
2771 places. We can insert NARYs which eventually re-materializes
2772 its operand values. */
2773 if (temp->kind == NARY)
2774 return create_expression_by_pieces (block, temp, stmts,
2775 get_expr_type (expr));
2776 }
2777
2778 /* Defer. */
2779 return NULL_TREE;
2780 }
2781
2782 #define NECESSARY GF_PLF_1
2783
2784 /* Create an expression in pieces, so that we can handle very complex
2785 expressions that may be ANTIC, but not necessary GIMPLE.
2786 BLOCK is the basic block the expression will be inserted into,
2787 EXPR is the expression to insert (in value form)
2788 STMTS is a statement list to append the necessary insertions into.
2789
2790 This function will die if we hit some value that shouldn't be
2791 ANTIC but is (IE there is no leader for it, or its components).
2792 The function returns NULL_TREE in case a different antic expression
2793 has to be inserted first.
2794 This function may also generate expressions that are themselves
2795 partially or fully redundant. Those that are will be either made
2796 fully redundant during the next iteration of insert (for partially
2797 redundant ones), or eliminated by eliminate (for fully redundant
2798 ones). */
2799
2800 static tree
2801 create_expression_by_pieces (basic_block block, pre_expr expr,
2802 gimple_seq *stmts, tree type)
2803 {
2804 tree name;
2805 tree folded;
2806 gimple_seq forced_stmts = NULL;
2807 unsigned int value_id;
2808 gimple_stmt_iterator gsi;
2809 tree exprtype = type ? type : get_expr_type (expr);
2810 pre_expr nameexpr;
2811 gassign *newstmt;
2812
2813 switch (expr->kind)
2814 {
2815 /* We may hit the NAME/CONSTANT case if we have to convert types
2816 that value numbering saw through. */
2817 case NAME:
2818 folded = PRE_EXPR_NAME (expr);
2819 break;
2820 case CONSTANT:
2821 folded = PRE_EXPR_CONSTANT (expr);
2822 break;
2823 case REFERENCE:
2824 {
2825 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2826 folded = create_component_ref_by_pieces (block, ref, stmts);
2827 if (!folded)
2828 return NULL_TREE;
2829 }
2830 break;
2831 case NARY:
2832 {
2833 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2834 tree *genop = XALLOCAVEC (tree, nary->length);
2835 unsigned i;
2836 for (i = 0; i < nary->length; ++i)
2837 {
2838 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2839 if (!genop[i])
2840 return NULL_TREE;
2841 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2842 may have conversions stripped. */
2843 if (nary->opcode == POINTER_PLUS_EXPR)
2844 {
2845 if (i == 0)
2846 genop[i] = gimple_convert (&forced_stmts,
2847 nary->type, genop[i]);
2848 else if (i == 1)
2849 genop[i] = gimple_convert (&forced_stmts,
2850 sizetype, genop[i]);
2851 }
2852 else
2853 genop[i] = gimple_convert (&forced_stmts,
2854 TREE_TYPE (nary->op[i]), genop[i]);
2855 }
2856 if (nary->opcode == CONSTRUCTOR)
2857 {
2858 vec<constructor_elt, va_gc> *elts = NULL;
2859 for (i = 0; i < nary->length; ++i)
2860 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
2861 folded = build_constructor (nary->type, elts);
2862 }
2863 else
2864 {
2865 switch (nary->length)
2866 {
2867 case 1:
2868 folded = fold_build1 (nary->opcode, nary->type,
2869 genop[0]);
2870 break;
2871 case 2:
2872 folded = fold_build2 (nary->opcode, nary->type,
2873 genop[0], genop[1]);
2874 break;
2875 case 3:
2876 folded = fold_build3 (nary->opcode, nary->type,
2877 genop[0], genop[1], genop[2]);
2878 break;
2879 default:
2880 gcc_unreachable ();
2881 }
2882 }
2883 }
2884 break;
2885 default:
2886 gcc_unreachable ();
2887 }
2888
2889 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2890 folded = fold_convert (exprtype, folded);
2891
2892 /* Force the generated expression to be a sequence of GIMPLE
2893 statements.
2894 We have to call unshare_expr because force_gimple_operand may
2895 modify the tree we pass to it. */
2896 gimple_seq tem = NULL;
2897 folded = force_gimple_operand (unshare_expr (folded), &tem,
2898 false, NULL);
2899 gimple_seq_add_seq_without_update (&forced_stmts, tem);
2900
2901 /* If we have any intermediate expressions to the value sets, add them
2902 to the value sets and chain them in the instruction stream. */
2903 if (forced_stmts)
2904 {
2905 gsi = gsi_start (forced_stmts);
2906 for (; !gsi_end_p (gsi); gsi_next (&gsi))
2907 {
2908 gimple stmt = gsi_stmt (gsi);
2909 tree forcedname = gimple_get_lhs (stmt);
2910 pre_expr nameexpr;
2911
2912 if (TREE_CODE (forcedname) == SSA_NAME)
2913 {
2914 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
2915 VN_INFO_GET (forcedname)->valnum = forcedname;
2916 VN_INFO (forcedname)->value_id = get_next_value_id ();
2917 nameexpr = get_or_alloc_expr_for_name (forcedname);
2918 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
2919 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2920 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2921 }
2922
2923 gimple_set_vuse (stmt, BB_LIVE_VOP_ON_EXIT (block));
2924 gimple_set_modified (stmt, true);
2925 }
2926 gimple_seq_add_seq (stmts, forced_stmts);
2927 }
2928
2929 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2930 newstmt = gimple_build_assign (name, folded);
2931 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2932 gimple_set_modified (newstmt, true);
2933 gimple_set_plf (newstmt, NECESSARY, false);
2934
2935 gimple_seq_add_stmt (stmts, newstmt);
2936 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
2937
2938 /* Fold the last statement. */
2939 gsi = gsi_last (*stmts);
2940 if (fold_stmt_inplace (&gsi))
2941 update_stmt (gsi_stmt (gsi));
2942
2943 /* Add a value number to the temporary.
2944 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
2945 we are creating the expression by pieces, and this particular piece of
2946 the expression may have been represented. There is no harm in replacing
2947 here. */
2948 value_id = get_expr_value_id (expr);
2949 VN_INFO_GET (name)->value_id = value_id;
2950 VN_INFO (name)->valnum = sccvn_valnum_from_value_id (value_id);
2951 if (VN_INFO (name)->valnum == NULL_TREE)
2952 VN_INFO (name)->valnum = name;
2953 gcc_assert (VN_INFO (name)->valnum != NULL_TREE);
2954 nameexpr = get_or_alloc_expr_for_name (name);
2955 add_to_value (value_id, nameexpr);
2956 if (NEW_SETS (block))
2957 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
2958 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
2959
2960 pre_stats.insertions++;
2961 if (dump_file && (dump_flags & TDF_DETAILS))
2962 {
2963 fprintf (dump_file, "Inserted ");
2964 print_gimple_stmt (dump_file, newstmt, 0, 0);
2965 fprintf (dump_file, " in predecessor %d (%04d)\n",
2966 block->index, value_id);
2967 }
2968
2969 return name;
2970 }
2971
2972
2973 /* Insert the to-be-made-available values of expression EXPRNUM for each
2974 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
2975 merge the result with a phi node, given the same value number as
2976 NODE. Return true if we have inserted new stuff. */
2977
2978 static bool
2979 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
2980 vec<pre_expr> avail)
2981 {
2982 pre_expr expr = expression_for_id (exprnum);
2983 pre_expr newphi;
2984 unsigned int val = get_expr_value_id (expr);
2985 edge pred;
2986 bool insertions = false;
2987 bool nophi = false;
2988 basic_block bprime;
2989 pre_expr eprime;
2990 edge_iterator ei;
2991 tree type = get_expr_type (expr);
2992 tree temp;
2993 gphi *phi;
2994
2995 /* Make sure we aren't creating an induction variable. */
2996 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
2997 {
2998 bool firstinsideloop = false;
2999 bool secondinsideloop = false;
3000 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3001 EDGE_PRED (block, 0)->src);
3002 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3003 EDGE_PRED (block, 1)->src);
3004 /* Induction variables only have one edge inside the loop. */
3005 if ((firstinsideloop ^ secondinsideloop)
3006 && expr->kind != REFERENCE)
3007 {
3008 if (dump_file && (dump_flags & TDF_DETAILS))
3009 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3010 nophi = true;
3011 }
3012 }
3013
3014 /* Make the necessary insertions. */
3015 FOR_EACH_EDGE (pred, ei, block->preds)
3016 {
3017 gimple_seq stmts = NULL;
3018 tree builtexpr;
3019 bprime = pred->src;
3020 eprime = avail[pred->dest_idx];
3021
3022 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3023 {
3024 builtexpr = create_expression_by_pieces (bprime, eprime,
3025 &stmts, type);
3026 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3027 gsi_insert_seq_on_edge (pred, stmts);
3028 if (!builtexpr)
3029 {
3030 /* We cannot insert a PHI node if we failed to insert
3031 on one edge. */
3032 nophi = true;
3033 continue;
3034 }
3035 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3036 insertions = true;
3037 }
3038 else if (eprime->kind == CONSTANT)
3039 {
3040 /* Constants may not have the right type, fold_convert
3041 should give us back a constant with the right type. */
3042 tree constant = PRE_EXPR_CONSTANT (eprime);
3043 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3044 {
3045 tree builtexpr = fold_convert (type, constant);
3046 if (!is_gimple_min_invariant (builtexpr))
3047 {
3048 tree forcedexpr = force_gimple_operand (builtexpr,
3049 &stmts, true,
3050 NULL);
3051 if (!is_gimple_min_invariant (forcedexpr))
3052 {
3053 if (forcedexpr != builtexpr)
3054 {
3055 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3056 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3057 }
3058 if (stmts)
3059 {
3060 gimple_stmt_iterator gsi;
3061 gsi = gsi_start (stmts);
3062 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3063 {
3064 gimple stmt = gsi_stmt (gsi);
3065 tree lhs = gimple_get_lhs (stmt);
3066 if (TREE_CODE (lhs) == SSA_NAME)
3067 bitmap_set_bit (inserted_exprs,
3068 SSA_NAME_VERSION (lhs));
3069 gimple_set_plf (stmt, NECESSARY, false);
3070 }
3071 gsi_insert_seq_on_edge (pred, stmts);
3072 }
3073 avail[pred->dest_idx]
3074 = get_or_alloc_expr_for_name (forcedexpr);
3075 }
3076 }
3077 else
3078 avail[pred->dest_idx]
3079 = get_or_alloc_expr_for_constant (builtexpr);
3080 }
3081 }
3082 else if (eprime->kind == NAME)
3083 {
3084 /* We may have to do a conversion because our value
3085 numbering can look through types in certain cases, but
3086 our IL requires all operands of a phi node have the same
3087 type. */
3088 tree name = PRE_EXPR_NAME (eprime);
3089 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3090 {
3091 tree builtexpr;
3092 tree forcedexpr;
3093 builtexpr = fold_convert (type, name);
3094 forcedexpr = force_gimple_operand (builtexpr,
3095 &stmts, true,
3096 NULL);
3097
3098 if (forcedexpr != name)
3099 {
3100 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3101 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3102 }
3103
3104 if (stmts)
3105 {
3106 gimple_stmt_iterator gsi;
3107 gsi = gsi_start (stmts);
3108 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3109 {
3110 gimple stmt = gsi_stmt (gsi);
3111 tree lhs = gimple_get_lhs (stmt);
3112 if (TREE_CODE (lhs) == SSA_NAME)
3113 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3114 gimple_set_plf (stmt, NECESSARY, false);
3115 }
3116 gsi_insert_seq_on_edge (pred, stmts);
3117 }
3118 avail[pred->dest_idx] = get_or_alloc_expr_for_name (forcedexpr);
3119 }
3120 }
3121 }
3122 /* If we didn't want a phi node, and we made insertions, we still have
3123 inserted new stuff, and thus return true. If we didn't want a phi node,
3124 and didn't make insertions, we haven't added anything new, so return
3125 false. */
3126 if (nophi && insertions)
3127 return true;
3128 else if (nophi && !insertions)
3129 return false;
3130
3131 /* Now build a phi for the new variable. */
3132 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3133 phi = create_phi_node (temp, block);
3134
3135 gimple_set_plf (phi, NECESSARY, false);
3136 VN_INFO_GET (temp)->value_id = val;
3137 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3138 if (VN_INFO (temp)->valnum == NULL_TREE)
3139 VN_INFO (temp)->valnum = temp;
3140 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3141 FOR_EACH_EDGE (pred, ei, block->preds)
3142 {
3143 pre_expr ae = avail[pred->dest_idx];
3144 gcc_assert (get_expr_type (ae) == type
3145 || useless_type_conversion_p (type, get_expr_type (ae)));
3146 if (ae->kind == CONSTANT)
3147 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3148 pred, UNKNOWN_LOCATION);
3149 else
3150 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3151 }
3152
3153 newphi = get_or_alloc_expr_for_name (temp);
3154 add_to_value (val, newphi);
3155
3156 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3157 this insertion, since we test for the existence of this value in PHI_GEN
3158 before proceeding with the partial redundancy checks in insert_aux.
3159
3160 The value may exist in AVAIL_OUT, in particular, it could be represented
3161 by the expression we are trying to eliminate, in which case we want the
3162 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3163 inserted there.
3164
3165 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3166 this block, because if it did, it would have existed in our dominator's
3167 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3168 */
3169
3170 bitmap_insert_into_set (PHI_GEN (block), newphi);
3171 bitmap_value_replace_in_set (AVAIL_OUT (block),
3172 newphi);
3173 bitmap_insert_into_set (NEW_SETS (block),
3174 newphi);
3175
3176 if (dump_file && (dump_flags & TDF_DETAILS))
3177 {
3178 fprintf (dump_file, "Created phi ");
3179 print_gimple_stmt (dump_file, phi, 0, 0);
3180 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3181 }
3182 pre_stats.phis++;
3183 return true;
3184 }
3185
3186
3187
3188 /* Perform insertion of partially redundant values.
3189 For BLOCK, do the following:
3190 1. Propagate the NEW_SETS of the dominator into the current block.
3191 If the block has multiple predecessors,
3192 2a. Iterate over the ANTIC expressions for the block to see if
3193 any of them are partially redundant.
3194 2b. If so, insert them into the necessary predecessors to make
3195 the expression fully redundant.
3196 2c. Insert a new PHI merging the values of the predecessors.
3197 2d. Insert the new PHI, and the new expressions, into the
3198 NEW_SETS set.
3199 3. Recursively call ourselves on the dominator children of BLOCK.
3200
3201 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3202 do_regular_insertion and do_partial_insertion.
3203
3204 */
3205
3206 static bool
3207 do_regular_insertion (basic_block block, basic_block dom)
3208 {
3209 bool new_stuff = false;
3210 vec<pre_expr> exprs;
3211 pre_expr expr;
3212 auto_vec<pre_expr> avail;
3213 int i;
3214
3215 exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3216 avail.safe_grow (EDGE_COUNT (block->preds));
3217
3218 FOR_EACH_VEC_ELT (exprs, i, expr)
3219 {
3220 if (expr->kind == NARY
3221 || expr->kind == REFERENCE)
3222 {
3223 unsigned int val;
3224 bool by_some = false;
3225 bool cant_insert = false;
3226 bool all_same = true;
3227 pre_expr first_s = NULL;
3228 edge pred;
3229 basic_block bprime;
3230 pre_expr eprime = NULL;
3231 edge_iterator ei;
3232 pre_expr edoubleprime = NULL;
3233 bool do_insertion = false;
3234
3235 val = get_expr_value_id (expr);
3236 if (bitmap_set_contains_value (PHI_GEN (block), val))
3237 continue;
3238 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3239 {
3240 if (dump_file && (dump_flags & TDF_DETAILS))
3241 {
3242 fprintf (dump_file, "Found fully redundant value: ");
3243 print_pre_expr (dump_file, expr);
3244 fprintf (dump_file, "\n");
3245 }
3246 continue;
3247 }
3248
3249 FOR_EACH_EDGE (pred, ei, block->preds)
3250 {
3251 unsigned int vprime;
3252
3253 /* We should never run insertion for the exit block
3254 and so not come across fake pred edges. */
3255 gcc_assert (!(pred->flags & EDGE_FAKE));
3256 bprime = pred->src;
3257 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3258 bprime, block);
3259
3260 /* eprime will generally only be NULL if the
3261 value of the expression, translated
3262 through the PHI for this predecessor, is
3263 undefined. If that is the case, we can't
3264 make the expression fully redundant,
3265 because its value is undefined along a
3266 predecessor path. We can thus break out
3267 early because it doesn't matter what the
3268 rest of the results are. */
3269 if (eprime == NULL)
3270 {
3271 avail[pred->dest_idx] = NULL;
3272 cant_insert = true;
3273 break;
3274 }
3275
3276 eprime = fully_constant_expression (eprime);
3277 vprime = get_expr_value_id (eprime);
3278 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3279 vprime);
3280 if (edoubleprime == NULL)
3281 {
3282 avail[pred->dest_idx] = eprime;
3283 all_same = false;
3284 }
3285 else
3286 {
3287 avail[pred->dest_idx] = edoubleprime;
3288 by_some = true;
3289 /* We want to perform insertions to remove a redundancy on
3290 a path in the CFG we want to optimize for speed. */
3291 if (optimize_edge_for_speed_p (pred))
3292 do_insertion = true;
3293 if (first_s == NULL)
3294 first_s = edoubleprime;
3295 else if (!pre_expr_d::equal (first_s, edoubleprime))
3296 all_same = false;
3297 }
3298 }
3299 /* If we can insert it, it's not the same value
3300 already existing along every predecessor, and
3301 it's defined by some predecessor, it is
3302 partially redundant. */
3303 if (!cant_insert && !all_same && by_some)
3304 {
3305 if (!do_insertion)
3306 {
3307 if (dump_file && (dump_flags & TDF_DETAILS))
3308 {
3309 fprintf (dump_file, "Skipping partial redundancy for "
3310 "expression ");
3311 print_pre_expr (dump_file, expr);
3312 fprintf (dump_file, " (%04d), no redundancy on to be "
3313 "optimized for speed edge\n", val);
3314 }
3315 }
3316 else if (dbg_cnt (treepre_insert))
3317 {
3318 if (dump_file && (dump_flags & TDF_DETAILS))
3319 {
3320 fprintf (dump_file, "Found partial redundancy for "
3321 "expression ");
3322 print_pre_expr (dump_file, expr);
3323 fprintf (dump_file, " (%04d)\n",
3324 get_expr_value_id (expr));
3325 }
3326 if (insert_into_preds_of_block (block,
3327 get_expression_id (expr),
3328 avail))
3329 new_stuff = true;
3330 }
3331 }
3332 /* If all edges produce the same value and that value is
3333 an invariant, then the PHI has the same value on all
3334 edges. Note this. */
3335 else if (!cant_insert && all_same)
3336 {
3337 gcc_assert (edoubleprime->kind == CONSTANT
3338 || edoubleprime->kind == NAME);
3339
3340 tree temp = make_temp_ssa_name (get_expr_type (expr),
3341 NULL, "pretmp");
3342 gassign *assign
3343 = gimple_build_assign (temp,
3344 edoubleprime->kind == CONSTANT ?
3345 PRE_EXPR_CONSTANT (edoubleprime) :
3346 PRE_EXPR_NAME (edoubleprime));
3347 gimple_stmt_iterator gsi = gsi_after_labels (block);
3348 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3349
3350 gimple_set_plf (assign, NECESSARY, false);
3351 VN_INFO_GET (temp)->value_id = val;
3352 VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
3353 if (VN_INFO (temp)->valnum == NULL_TREE)
3354 VN_INFO (temp)->valnum = temp;
3355 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3356 pre_expr newe = get_or_alloc_expr_for_name (temp);
3357 add_to_value (val, newe);
3358 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3359 bitmap_insert_into_set (NEW_SETS (block), newe);
3360 }
3361 }
3362 }
3363
3364 exprs.release ();
3365 return new_stuff;
3366 }
3367
3368
3369 /* Perform insertion for partially anticipatable expressions. There
3370 is only one case we will perform insertion for these. This case is
3371 if the expression is partially anticipatable, and fully available.
3372 In this case, we know that putting it earlier will enable us to
3373 remove the later computation. */
3374
3375
3376 static bool
3377 do_partial_partial_insertion (basic_block block, basic_block dom)
3378 {
3379 bool new_stuff = false;
3380 vec<pre_expr> exprs;
3381 pre_expr expr;
3382 auto_vec<pre_expr> avail;
3383 int i;
3384
3385 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3386 avail.safe_grow (EDGE_COUNT (block->preds));
3387
3388 FOR_EACH_VEC_ELT (exprs, i, expr)
3389 {
3390 if (expr->kind == NARY
3391 || expr->kind == REFERENCE)
3392 {
3393 unsigned int val;
3394 bool by_all = true;
3395 bool cant_insert = false;
3396 edge pred;
3397 basic_block bprime;
3398 pre_expr eprime = NULL;
3399 edge_iterator ei;
3400
3401 val = get_expr_value_id (expr);
3402 if (bitmap_set_contains_value (PHI_GEN (block), val))
3403 continue;
3404 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3405 continue;
3406
3407 FOR_EACH_EDGE (pred, ei, block->preds)
3408 {
3409 unsigned int vprime;
3410 pre_expr edoubleprime;
3411
3412 /* We should never run insertion for the exit block
3413 and so not come across fake pred edges. */
3414 gcc_assert (!(pred->flags & EDGE_FAKE));
3415 bprime = pred->src;
3416 eprime = phi_translate (expr, ANTIC_IN (block),
3417 PA_IN (block),
3418 bprime, block);
3419
3420 /* eprime will generally only be NULL if the
3421 value of the expression, translated
3422 through the PHI for this predecessor, is
3423 undefined. If that is the case, we can't
3424 make the expression fully redundant,
3425 because its value is undefined along a
3426 predecessor path. We can thus break out
3427 early because it doesn't matter what the
3428 rest of the results are. */
3429 if (eprime == NULL)
3430 {
3431 avail[pred->dest_idx] = NULL;
3432 cant_insert = true;
3433 break;
3434 }
3435
3436 eprime = fully_constant_expression (eprime);
3437 vprime = get_expr_value_id (eprime);
3438 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3439 avail[pred->dest_idx] = edoubleprime;
3440 if (edoubleprime == NULL)
3441 {
3442 by_all = false;
3443 break;
3444 }
3445 }
3446
3447 /* If we can insert it, it's not the same value
3448 already existing along every predecessor, and
3449 it's defined by some predecessor, it is
3450 partially redundant. */
3451 if (!cant_insert && by_all)
3452 {
3453 edge succ;
3454 bool do_insertion = false;
3455
3456 /* Insert only if we can remove a later expression on a path
3457 that we want to optimize for speed.
3458 The phi node that we will be inserting in BLOCK is not free,
3459 and inserting it for the sake of !optimize_for_speed successor
3460 may cause regressions on the speed path. */
3461 FOR_EACH_EDGE (succ, ei, block->succs)
3462 {
3463 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3464 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3465 {
3466 if (optimize_edge_for_speed_p (succ))
3467 do_insertion = true;
3468 }
3469 }
3470
3471 if (!do_insertion)
3472 {
3473 if (dump_file && (dump_flags & TDF_DETAILS))
3474 {
3475 fprintf (dump_file, "Skipping partial partial redundancy "
3476 "for expression ");
3477 print_pre_expr (dump_file, expr);
3478 fprintf (dump_file, " (%04d), not (partially) anticipated "
3479 "on any to be optimized for speed edges\n", val);
3480 }
3481 }
3482 else if (dbg_cnt (treepre_insert))
3483 {
3484 pre_stats.pa_insert++;
3485 if (dump_file && (dump_flags & TDF_DETAILS))
3486 {
3487 fprintf (dump_file, "Found partial partial redundancy "
3488 "for expression ");
3489 print_pre_expr (dump_file, expr);
3490 fprintf (dump_file, " (%04d)\n",
3491 get_expr_value_id (expr));
3492 }
3493 if (insert_into_preds_of_block (block,
3494 get_expression_id (expr),
3495 avail))
3496 new_stuff = true;
3497 }
3498 }
3499 }
3500 }
3501
3502 exprs.release ();
3503 return new_stuff;
3504 }
3505
3506 static bool
3507 insert_aux (basic_block block)
3508 {
3509 basic_block son;
3510 bool new_stuff = false;
3511
3512 if (block)
3513 {
3514 basic_block dom;
3515 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3516 if (dom)
3517 {
3518 unsigned i;
3519 bitmap_iterator bi;
3520 bitmap_set_t newset = NEW_SETS (dom);
3521 if (newset)
3522 {
3523 /* Note that we need to value_replace both NEW_SETS, and
3524 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3525 represented by some non-simple expression here that we want
3526 to replace it with. */
3527 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3528 {
3529 pre_expr expr = expression_for_id (i);
3530 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3531 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3532 }
3533 }
3534 if (!single_pred_p (block))
3535 {
3536 new_stuff |= do_regular_insertion (block, dom);
3537 if (do_partial_partial)
3538 new_stuff |= do_partial_partial_insertion (block, dom);
3539 }
3540 }
3541 }
3542 for (son = first_dom_son (CDI_DOMINATORS, block);
3543 son;
3544 son = next_dom_son (CDI_DOMINATORS, son))
3545 {
3546 new_stuff |= insert_aux (son);
3547 }
3548
3549 return new_stuff;
3550 }
3551
3552 /* Perform insertion of partially redundant values. */
3553
3554 static void
3555 insert (void)
3556 {
3557 bool new_stuff = true;
3558 basic_block bb;
3559 int num_iterations = 0;
3560
3561 FOR_ALL_BB_FN (bb, cfun)
3562 NEW_SETS (bb) = bitmap_set_new ();
3563
3564 while (new_stuff)
3565 {
3566 num_iterations++;
3567 if (dump_file && dump_flags & TDF_DETAILS)
3568 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3569 new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3570
3571 /* Clear the NEW sets before the next iteration. We have already
3572 fully propagated its contents. */
3573 if (new_stuff)
3574 FOR_ALL_BB_FN (bb, cfun)
3575 bitmap_set_free (NEW_SETS (bb));
3576 }
3577 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3578 }
3579
3580
3581 /* Compute the AVAIL set for all basic blocks.
3582
3583 This function performs value numbering of the statements in each basic
3584 block. The AVAIL sets are built from information we glean while doing
3585 this value numbering, since the AVAIL sets contain only one entry per
3586 value.
3587
3588 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3589 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3590
3591 static void
3592 compute_avail (void)
3593 {
3594
3595 basic_block block, son;
3596 basic_block *worklist;
3597 size_t sp = 0;
3598 unsigned i;
3599
3600 /* We pretend that default definitions are defined in the entry block.
3601 This includes function arguments and the static chain decl. */
3602 for (i = 1; i < num_ssa_names; ++i)
3603 {
3604 tree name = ssa_name (i);
3605 pre_expr e;
3606 if (!name
3607 || !SSA_NAME_IS_DEFAULT_DEF (name)
3608 || has_zero_uses (name)
3609 || virtual_operand_p (name))
3610 continue;
3611
3612 e = get_or_alloc_expr_for_name (name);
3613 add_to_value (get_expr_value_id (e), e);
3614 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3615 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3616 e);
3617 }
3618
3619 if (dump_file && (dump_flags & TDF_DETAILS))
3620 {
3621 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3622 "tmp_gen", ENTRY_BLOCK);
3623 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3624 "avail_out", ENTRY_BLOCK);
3625 }
3626
3627 /* Allocate the worklist. */
3628 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3629
3630 /* Seed the algorithm by putting the dominator children of the entry
3631 block on the worklist. */
3632 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3633 son;
3634 son = next_dom_son (CDI_DOMINATORS, son))
3635 worklist[sp++] = son;
3636
3637 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3638 = ssa_default_def (cfun, gimple_vop (cfun));
3639
3640 /* Loop until the worklist is empty. */
3641 while (sp)
3642 {
3643 gimple stmt;
3644 basic_block dom;
3645
3646 /* Pick a block from the worklist. */
3647 block = worklist[--sp];
3648
3649 /* Initially, the set of available values in BLOCK is that of
3650 its immediate dominator. */
3651 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3652 if (dom)
3653 {
3654 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3655 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3656 }
3657
3658 /* Generate values for PHI nodes. */
3659 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3660 gsi_next (&gsi))
3661 {
3662 tree result = gimple_phi_result (gsi.phi ());
3663
3664 /* We have no need for virtual phis, as they don't represent
3665 actual computations. */
3666 if (virtual_operand_p (result))
3667 {
3668 BB_LIVE_VOP_ON_EXIT (block) = result;
3669 continue;
3670 }
3671
3672 pre_expr e = get_or_alloc_expr_for_name (result);
3673 add_to_value (get_expr_value_id (e), e);
3674 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3675 bitmap_insert_into_set (PHI_GEN (block), e);
3676 }
3677
3678 BB_MAY_NOTRETURN (block) = 0;
3679
3680 /* Now compute value numbers and populate value sets with all
3681 the expressions computed in BLOCK. */
3682 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3683 gsi_next (&gsi))
3684 {
3685 ssa_op_iter iter;
3686 tree op;
3687
3688 stmt = gsi_stmt (gsi);
3689
3690 /* Cache whether the basic-block has any non-visible side-effect
3691 or control flow.
3692 If this isn't a call or it is the last stmt in the
3693 basic-block then the CFG represents things correctly. */
3694 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3695 {
3696 /* Non-looping const functions always return normally.
3697 Otherwise the call might not return or have side-effects
3698 that forbids hoisting possibly trapping expressions
3699 before it. */
3700 int flags = gimple_call_flags (stmt);
3701 if (!(flags & ECF_CONST)
3702 || (flags & ECF_LOOPING_CONST_OR_PURE))
3703 BB_MAY_NOTRETURN (block) = 1;
3704 }
3705
3706 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3707 {
3708 pre_expr e = get_or_alloc_expr_for_name (op);
3709
3710 add_to_value (get_expr_value_id (e), e);
3711 bitmap_insert_into_set (TMP_GEN (block), e);
3712 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3713 }
3714
3715 if (gimple_vdef (stmt))
3716 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
3717
3718 if (gimple_has_side_effects (stmt)
3719 || stmt_could_throw_p (stmt)
3720 || is_gimple_debug (stmt))
3721 continue;
3722
3723 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3724 {
3725 if (ssa_undefined_value_p (op))
3726 continue;
3727 pre_expr e = get_or_alloc_expr_for_name (op);
3728 bitmap_value_insert_into_set (EXP_GEN (block), e);
3729 }
3730
3731 switch (gimple_code (stmt))
3732 {
3733 case GIMPLE_RETURN:
3734 continue;
3735
3736 case GIMPLE_CALL:
3737 {
3738 vn_reference_t ref;
3739 vn_reference_s ref1;
3740 pre_expr result = NULL;
3741
3742 /* We can value number only calls to real functions. */
3743 if (gimple_call_internal_p (stmt))
3744 continue;
3745
3746 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
3747 if (!ref)
3748 continue;
3749
3750 /* If the value of the call is not invalidated in
3751 this block until it is computed, add the expression
3752 to EXP_GEN. */
3753 if (!gimple_vuse (stmt)
3754 || gimple_code
3755 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
3756 || gimple_bb (SSA_NAME_DEF_STMT
3757 (gimple_vuse (stmt))) != block)
3758 {
3759 result = (pre_expr) pool_alloc (pre_expr_pool);
3760 result->kind = REFERENCE;
3761 result->id = 0;
3762 PRE_EXPR_REFERENCE (result) = ref;
3763
3764 get_or_alloc_expression_id (result);
3765 add_to_value (get_expr_value_id (result), result);
3766 bitmap_value_insert_into_set (EXP_GEN (block), result);
3767 }
3768 continue;
3769 }
3770
3771 case GIMPLE_ASSIGN:
3772 {
3773 pre_expr result = NULL;
3774 switch (vn_get_stmt_kind (stmt))
3775 {
3776 case VN_NARY:
3777 {
3778 enum tree_code code = gimple_assign_rhs_code (stmt);
3779 vn_nary_op_t nary;
3780
3781 /* COND_EXPR and VEC_COND_EXPR are awkward in
3782 that they contain an embedded complex expression.
3783 Don't even try to shove those through PRE. */
3784 if (code == COND_EXPR
3785 || code == VEC_COND_EXPR)
3786 continue;
3787
3788 vn_nary_op_lookup_stmt (stmt, &nary);
3789 if (!nary)
3790 continue;
3791
3792 /* If the NARY traps and there was a preceding
3793 point in the block that might not return avoid
3794 adding the nary to EXP_GEN. */
3795 if (BB_MAY_NOTRETURN (block)
3796 && vn_nary_may_trap (nary))
3797 continue;
3798
3799 result = (pre_expr) pool_alloc (pre_expr_pool);
3800 result->kind = NARY;
3801 result->id = 0;
3802 PRE_EXPR_NARY (result) = nary;
3803 break;
3804 }
3805
3806 case VN_REFERENCE:
3807 {
3808 vn_reference_t ref;
3809 vn_reference_lookup (gimple_assign_rhs1 (stmt),
3810 gimple_vuse (stmt),
3811 VN_WALK, &ref);
3812 if (!ref)
3813 continue;
3814
3815 /* If the value of the reference is not invalidated in
3816 this block until it is computed, add the expression
3817 to EXP_GEN. */
3818 if (gimple_vuse (stmt))
3819 {
3820 gimple def_stmt;
3821 bool ok = true;
3822 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
3823 while (!gimple_nop_p (def_stmt)
3824 && gimple_code (def_stmt) != GIMPLE_PHI
3825 && gimple_bb (def_stmt) == block)
3826 {
3827 if (stmt_may_clobber_ref_p
3828 (def_stmt, gimple_assign_rhs1 (stmt)))
3829 {
3830 ok = false;
3831 break;
3832 }
3833 def_stmt
3834 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
3835 }
3836 if (!ok)
3837 continue;
3838 }
3839
3840 result = (pre_expr) pool_alloc (pre_expr_pool);
3841 result->kind = REFERENCE;
3842 result->id = 0;
3843 PRE_EXPR_REFERENCE (result) = ref;
3844 break;
3845 }
3846
3847 default:
3848 continue;
3849 }
3850
3851 get_or_alloc_expression_id (result);
3852 add_to_value (get_expr_value_id (result), result);
3853 bitmap_value_insert_into_set (EXP_GEN (block), result);
3854 continue;
3855 }
3856 default:
3857 break;
3858 }
3859 }
3860
3861 if (dump_file && (dump_flags & TDF_DETAILS))
3862 {
3863 print_bitmap_set (dump_file, EXP_GEN (block),
3864 "exp_gen", block->index);
3865 print_bitmap_set (dump_file, PHI_GEN (block),
3866 "phi_gen", block->index);
3867 print_bitmap_set (dump_file, TMP_GEN (block),
3868 "tmp_gen", block->index);
3869 print_bitmap_set (dump_file, AVAIL_OUT (block),
3870 "avail_out", block->index);
3871 }
3872
3873 /* Put the dominator children of BLOCK on the worklist of blocks
3874 to compute available sets for. */
3875 for (son = first_dom_son (CDI_DOMINATORS, block);
3876 son;
3877 son = next_dom_son (CDI_DOMINATORS, son))
3878 worklist[sp++] = son;
3879 }
3880
3881 free (worklist);
3882 }
3883
3884
3885 /* Local state for the eliminate domwalk. */
3886 static vec<gimple> el_to_remove;
3887 static unsigned int el_todo;
3888 static vec<tree> el_avail;
3889 static vec<tree> el_avail_stack;
3890
3891 /* Return a leader for OP that is available at the current point of the
3892 eliminate domwalk. */
3893
3894 static tree
3895 eliminate_avail (tree op)
3896 {
3897 tree valnum = VN_INFO (op)->valnum;
3898 if (TREE_CODE (valnum) == SSA_NAME)
3899 {
3900 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
3901 return valnum;
3902 if (el_avail.length () > SSA_NAME_VERSION (valnum))
3903 return el_avail[SSA_NAME_VERSION (valnum)];
3904 }
3905 else if (is_gimple_min_invariant (valnum))
3906 return valnum;
3907 return NULL_TREE;
3908 }
3909
3910 /* At the current point of the eliminate domwalk make OP available. */
3911
3912 static void
3913 eliminate_push_avail (tree op)
3914 {
3915 tree valnum = VN_INFO (op)->valnum;
3916 if (TREE_CODE (valnum) == SSA_NAME)
3917 {
3918 if (el_avail.length () <= SSA_NAME_VERSION (valnum))
3919 el_avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
3920 tree pushop = op;
3921 if (el_avail[SSA_NAME_VERSION (valnum)])
3922 pushop = el_avail[SSA_NAME_VERSION (valnum)];
3923 el_avail_stack.safe_push (pushop);
3924 el_avail[SSA_NAME_VERSION (valnum)] = op;
3925 }
3926 }
3927
3928 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
3929 the leader for the expression if insertion was successful. */
3930
3931 static tree
3932 eliminate_insert (gimple_stmt_iterator *gsi, tree val)
3933 {
3934 tree expr = vn_get_expr_for (val);
3935 if (!CONVERT_EXPR_P (expr)
3936 && TREE_CODE (expr) != VIEW_CONVERT_EXPR)
3937 return NULL_TREE;
3938
3939 tree op = TREE_OPERAND (expr, 0);
3940 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
3941 if (!leader)
3942 return NULL_TREE;
3943
3944 tree res = make_temp_ssa_name (TREE_TYPE (val), NULL, "pretmp");
3945 gassign *tem = gimple_build_assign (res,
3946 fold_build1 (TREE_CODE (expr),
3947 TREE_TYPE (expr), leader));
3948 gsi_insert_before (gsi, tem, GSI_SAME_STMT);
3949 VN_INFO_GET (res)->valnum = val;
3950
3951 if (TREE_CODE (leader) == SSA_NAME)
3952 gimple_set_plf (SSA_NAME_DEF_STMT (leader), NECESSARY, true);
3953
3954 pre_stats.insertions++;
3955 if (dump_file && (dump_flags & TDF_DETAILS))
3956 {
3957 fprintf (dump_file, "Inserted ");
3958 print_gimple_stmt (dump_file, tem, 0, 0);
3959 }
3960
3961 return res;
3962 }
3963
3964 class eliminate_dom_walker : public dom_walker
3965 {
3966 public:
3967 eliminate_dom_walker (cdi_direction direction, bool do_pre_)
3968 : dom_walker (direction), do_pre (do_pre_) {}
3969
3970 virtual void before_dom_children (basic_block);
3971 virtual void after_dom_children (basic_block);
3972
3973 bool do_pre;
3974 };
3975
3976 /* Perform elimination for the basic-block B during the domwalk. */
3977
3978 void
3979 eliminate_dom_walker::before_dom_children (basic_block b)
3980 {
3981 /* Mark new bb. */
3982 el_avail_stack.safe_push (NULL_TREE);
3983
3984 /* ??? If we do nothing for unreachable blocks then this will confuse
3985 tailmerging. Eventually we can reduce its reliance on SCCVN now
3986 that we fully copy/constant-propagate (most) things. */
3987
3988 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
3989 {
3990 gphi *phi = gsi.phi ();
3991 tree res = PHI_RESULT (phi);
3992
3993 if (virtual_operand_p (res))
3994 {
3995 gsi_next (&gsi);
3996 continue;
3997 }
3998
3999 tree sprime = eliminate_avail (res);
4000 if (sprime
4001 && sprime != res)
4002 {
4003 if (dump_file && (dump_flags & TDF_DETAILS))
4004 {
4005 fprintf (dump_file, "Replaced redundant PHI node defining ");
4006 print_generic_expr (dump_file, res, 0);
4007 fprintf (dump_file, " with ");
4008 print_generic_expr (dump_file, sprime, 0);
4009 fprintf (dump_file, "\n");
4010 }
4011
4012 /* If we inserted this PHI node ourself, it's not an elimination. */
4013 if (inserted_exprs
4014 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4015 pre_stats.phis--;
4016 else
4017 pre_stats.eliminations++;
4018
4019 /* If we will propagate into all uses don't bother to do
4020 anything. */
4021 if (may_propagate_copy (res, sprime))
4022 {
4023 /* Mark the PHI for removal. */
4024 el_to_remove.safe_push (phi);
4025 gsi_next (&gsi);
4026 continue;
4027 }
4028
4029 remove_phi_node (&gsi, false);
4030
4031 if (inserted_exprs
4032 && !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4033 && TREE_CODE (sprime) == SSA_NAME)
4034 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4035
4036 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4037 sprime = fold_convert (TREE_TYPE (res), sprime);
4038 gimple stmt = gimple_build_assign (res, sprime);
4039 /* ??? It cannot yet be necessary (DOM walk). */
4040 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4041
4042 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
4043 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4044 continue;
4045 }
4046
4047 eliminate_push_avail (res);
4048 gsi_next (&gsi);
4049 }
4050
4051 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
4052 !gsi_end_p (gsi);
4053 gsi_next (&gsi))
4054 {
4055 tree sprime = NULL_TREE;
4056 gimple stmt = gsi_stmt (gsi);
4057 tree lhs = gimple_get_lhs (stmt);
4058 if (lhs && TREE_CODE (lhs) == SSA_NAME
4059 && !gimple_has_volatile_ops (stmt)
4060 /* See PR43491. Do not replace a global register variable when
4061 it is a the RHS of an assignment. Do replace local register
4062 variables since gcc does not guarantee a local variable will
4063 be allocated in register.
4064 ??? The fix isn't effective here. This should instead
4065 be ensured by not value-numbering them the same but treating
4066 them like volatiles? */
4067 && !(gimple_assign_single_p (stmt)
4068 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4069 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4070 && is_global_var (gimple_assign_rhs1 (stmt)))))
4071 {
4072 sprime = eliminate_avail (lhs);
4073 if (!sprime)
4074 {
4075 /* If there is no existing usable leader but SCCVN thinks
4076 it has an expression it wants to use as replacement,
4077 insert that. */
4078 tree val = VN_INFO (lhs)->valnum;
4079 if (val != VN_TOP
4080 && TREE_CODE (val) == SSA_NAME
4081 && VN_INFO (val)->needs_insertion
4082 && VN_INFO (val)->expr != NULL_TREE
4083 && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
4084 eliminate_push_avail (sprime);
4085 }
4086
4087 /* If this now constitutes a copy duplicate points-to
4088 and range info appropriately. This is especially
4089 important for inserted code. See tree-ssa-copy.c
4090 for similar code. */
4091 if (sprime
4092 && TREE_CODE (sprime) == SSA_NAME)
4093 {
4094 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
4095 if (POINTER_TYPE_P (TREE_TYPE (lhs))
4096 && SSA_NAME_PTR_INFO (lhs)
4097 && !SSA_NAME_PTR_INFO (sprime))
4098 {
4099 duplicate_ssa_name_ptr_info (sprime,
4100 SSA_NAME_PTR_INFO (lhs));
4101 if (b != sprime_b)
4102 mark_ptr_info_alignment_unknown
4103 (SSA_NAME_PTR_INFO (sprime));
4104 }
4105 else if (!POINTER_TYPE_P (TREE_TYPE (lhs))
4106 && SSA_NAME_RANGE_INFO (lhs)
4107 && !SSA_NAME_RANGE_INFO (sprime)
4108 && b == sprime_b)
4109 duplicate_ssa_name_range_info (sprime,
4110 SSA_NAME_RANGE_TYPE (lhs),
4111 SSA_NAME_RANGE_INFO (lhs));
4112 }
4113
4114 /* Inhibit the use of an inserted PHI on a loop header when
4115 the address of the memory reference is a simple induction
4116 variable. In other cases the vectorizer won't do anything
4117 anyway (either it's loop invariant or a complicated
4118 expression). */
4119 if (sprime
4120 && TREE_CODE (sprime) == SSA_NAME
4121 && do_pre
4122 && flag_tree_loop_vectorize
4123 && loop_outer (b->loop_father)
4124 && has_zero_uses (sprime)
4125 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
4126 && gimple_assign_load_p (stmt))
4127 {
4128 gimple def_stmt = SSA_NAME_DEF_STMT (sprime);
4129 basic_block def_bb = gimple_bb (def_stmt);
4130 if (gimple_code (def_stmt) == GIMPLE_PHI
4131 && b->loop_father->header == def_bb)
4132 {
4133 ssa_op_iter iter;
4134 tree op;
4135 bool found = false;
4136 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4137 {
4138 affine_iv iv;
4139 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
4140 if (def_bb
4141 && flow_bb_inside_loop_p (b->loop_father, def_bb)
4142 && simple_iv (b->loop_father,
4143 b->loop_father, op, &iv, true))
4144 {
4145 found = true;
4146 break;
4147 }
4148 }
4149 if (found)
4150 {
4151 if (dump_file && (dump_flags & TDF_DETAILS))
4152 {
4153 fprintf (dump_file, "Not replacing ");
4154 print_gimple_expr (dump_file, stmt, 0, 0);
4155 fprintf (dump_file, " with ");
4156 print_generic_expr (dump_file, sprime, 0);
4157 fprintf (dump_file, " which would add a loop"
4158 " carried dependence to loop %d\n",
4159 b->loop_father->num);
4160 }
4161 /* Don't keep sprime available. */
4162 sprime = NULL_TREE;
4163 }
4164 }
4165 }
4166
4167 if (sprime)
4168 {
4169 /* If we can propagate the value computed for LHS into
4170 all uses don't bother doing anything with this stmt. */
4171 if (may_propagate_copy (lhs, sprime))
4172 {
4173 /* Mark it for removal. */
4174 el_to_remove.safe_push (stmt);
4175
4176 /* ??? Don't count copy/constant propagations. */
4177 if (gimple_assign_single_p (stmt)
4178 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4179 || gimple_assign_rhs1 (stmt) == sprime))
4180 continue;
4181
4182 if (dump_file && (dump_flags & TDF_DETAILS))
4183 {
4184 fprintf (dump_file, "Replaced ");
4185 print_gimple_expr (dump_file, stmt, 0, 0);
4186 fprintf (dump_file, " with ");
4187 print_generic_expr (dump_file, sprime, 0);
4188 fprintf (dump_file, " in all uses of ");
4189 print_gimple_stmt (dump_file, stmt, 0, 0);
4190 }
4191
4192 pre_stats.eliminations++;
4193 continue;
4194 }
4195
4196 /* If this is an assignment from our leader (which
4197 happens in the case the value-number is a constant)
4198 then there is nothing to do. */
4199 if (gimple_assign_single_p (stmt)
4200 && sprime == gimple_assign_rhs1 (stmt))
4201 continue;
4202
4203 /* Else replace its RHS. */
4204 bool can_make_abnormal_goto
4205 = is_gimple_call (stmt)
4206 && stmt_can_make_abnormal_goto (stmt);
4207
4208 if (dump_file && (dump_flags & TDF_DETAILS))
4209 {
4210 fprintf (dump_file, "Replaced ");
4211 print_gimple_expr (dump_file, stmt, 0, 0);
4212 fprintf (dump_file, " with ");
4213 print_generic_expr (dump_file, sprime, 0);
4214 fprintf (dump_file, " in ");
4215 print_gimple_stmt (dump_file, stmt, 0, 0);
4216 }
4217
4218 if (TREE_CODE (sprime) == SSA_NAME)
4219 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4220 NECESSARY, true);
4221
4222 pre_stats.eliminations++;
4223 gimple orig_stmt = stmt;
4224 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4225 TREE_TYPE (sprime)))
4226 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4227 tree vdef = gimple_vdef (stmt);
4228 tree vuse = gimple_vuse (stmt);
4229 propagate_tree_value_into_stmt (&gsi, sprime);
4230 stmt = gsi_stmt (gsi);
4231 update_stmt (stmt);
4232 if (vdef != gimple_vdef (stmt))
4233 VN_INFO (vdef)->valnum = vuse;
4234
4235 /* If we removed EH side-effects from the statement, clean
4236 its EH information. */
4237 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4238 {
4239 bitmap_set_bit (need_eh_cleanup,
4240 gimple_bb (stmt)->index);
4241 if (dump_file && (dump_flags & TDF_DETAILS))
4242 fprintf (dump_file, " Removed EH side-effects.\n");
4243 }
4244
4245 /* Likewise for AB side-effects. */
4246 if (can_make_abnormal_goto
4247 && !stmt_can_make_abnormal_goto (stmt))
4248 {
4249 bitmap_set_bit (need_ab_cleanup,
4250 gimple_bb (stmt)->index);
4251 if (dump_file && (dump_flags & TDF_DETAILS))
4252 fprintf (dump_file, " Removed AB side-effects.\n");
4253 }
4254
4255 continue;
4256 }
4257 }
4258
4259 /* If the statement is a scalar store, see if the expression
4260 has the same value number as its rhs. If so, the store is
4261 dead. */
4262 if (gimple_assign_single_p (stmt)
4263 && !gimple_has_volatile_ops (stmt)
4264 && !is_gimple_reg (gimple_assign_lhs (stmt))
4265 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4266 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
4267 {
4268 tree val;
4269 tree rhs = gimple_assign_rhs1 (stmt);
4270 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4271 gimple_vuse (stmt), VN_WALK, NULL);
4272 if (TREE_CODE (rhs) == SSA_NAME)
4273 rhs = VN_INFO (rhs)->valnum;
4274 if (val
4275 && operand_equal_p (val, rhs, 0))
4276 {
4277 if (dump_file && (dump_flags & TDF_DETAILS))
4278 {
4279 fprintf (dump_file, "Deleted redundant store ");
4280 print_gimple_stmt (dump_file, stmt, 0, 0);
4281 }
4282
4283 /* Queue stmt for removal. */
4284 el_to_remove.safe_push (stmt);
4285 continue;
4286 }
4287 }
4288
4289 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
4290 bool was_noreturn = (is_gimple_call (stmt)
4291 && gimple_call_noreturn_p (stmt));
4292 tree vdef = gimple_vdef (stmt);
4293 tree vuse = gimple_vuse (stmt);
4294
4295 /* If we didn't replace the whole stmt (or propagate the result
4296 into all uses), replace all uses on this stmt with their
4297 leaders. */
4298 use_operand_p use_p;
4299 ssa_op_iter iter;
4300 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4301 {
4302 tree use = USE_FROM_PTR (use_p);
4303 /* ??? The call code above leaves stmt operands un-updated. */
4304 if (TREE_CODE (use) != SSA_NAME)
4305 continue;
4306 tree sprime = eliminate_avail (use);
4307 if (sprime && sprime != use
4308 && may_propagate_copy (use, sprime)
4309 /* We substitute into debug stmts to avoid excessive
4310 debug temporaries created by removed stmts, but we need
4311 to avoid doing so for inserted sprimes as we never want
4312 to create debug temporaries for them. */
4313 && (!inserted_exprs
4314 || TREE_CODE (sprime) != SSA_NAME
4315 || !is_gimple_debug (stmt)
4316 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
4317 {
4318 propagate_value (use_p, sprime);
4319 gimple_set_modified (stmt, true);
4320 if (TREE_CODE (sprime) == SSA_NAME
4321 && !is_gimple_debug (stmt))
4322 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4323 NECESSARY, true);
4324 }
4325 }
4326
4327 /* Visit indirect calls and turn them into direct calls if
4328 possible using the devirtualization machinery. */
4329 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4330 {
4331 tree fn = gimple_call_fn (call_stmt);
4332 if (fn
4333 && flag_devirtualize
4334 && virtual_method_call_p (fn))
4335 {
4336 tree otr_type = obj_type_ref_class (fn);
4337 tree instance;
4338 ipa_polymorphic_call_context context (current_function_decl, fn, stmt, &instance);
4339 bool final;
4340
4341 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn), otr_type, stmt);
4342
4343 vec <cgraph_node *>targets
4344 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
4345 tree_to_uhwi
4346 (OBJ_TYPE_REF_TOKEN (fn)),
4347 context,
4348 &final);
4349 if (dump_enabled_p ())
4350 dump_possible_polymorphic_call_targets (dump_file,
4351 obj_type_ref_class (fn),
4352 tree_to_uhwi
4353 (OBJ_TYPE_REF_TOKEN (fn)),
4354 context);
4355 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4356 {
4357 tree fn;
4358 if (targets.length () == 1)
4359 fn = targets[0]->decl;
4360 else
4361 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4362 if (dump_enabled_p ())
4363 {
4364 location_t loc = gimple_location_safe (stmt);
4365 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4366 "converting indirect call to "
4367 "function %s\n",
4368 cgraph_node::get (fn)->name ());
4369 }
4370 gimple_call_set_fndecl (call_stmt, fn);
4371 gimple_set_modified (stmt, true);
4372 }
4373 }
4374 }
4375
4376 if (gimple_modified_p (stmt))
4377 {
4378 /* If a formerly non-invariant ADDR_EXPR is turned into an
4379 invariant one it was on a separate stmt. */
4380 if (gimple_assign_single_p (stmt)
4381 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
4382 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
4383 gimple old_stmt = stmt;
4384 if (is_gimple_call (stmt))
4385 {
4386 /* ??? Only fold calls inplace for now, this may create new
4387 SSA names which in turn will confuse free_scc_vn SSA name
4388 release code. */
4389 fold_stmt_inplace (&gsi);
4390 /* When changing a call into a noreturn call, cfg cleanup
4391 is needed to fix up the noreturn call. */
4392 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4393 el_todo |= TODO_cleanup_cfg;
4394 }
4395 else
4396 {
4397 fold_stmt (&gsi);
4398 stmt = gsi_stmt (gsi);
4399 if ((gimple_code (stmt) == GIMPLE_COND
4400 && (gimple_cond_true_p (as_a <gcond *> (stmt))
4401 || gimple_cond_false_p (as_a <gcond *> (stmt))))
4402 || (gimple_code (stmt) == GIMPLE_SWITCH
4403 && TREE_CODE (gimple_switch_index (
4404 as_a <gswitch *> (stmt)))
4405 == INTEGER_CST))
4406 el_todo |= TODO_cleanup_cfg;
4407 }
4408 /* If we removed EH side-effects from the statement, clean
4409 its EH information. */
4410 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
4411 {
4412 bitmap_set_bit (need_eh_cleanup,
4413 gimple_bb (stmt)->index);
4414 if (dump_file && (dump_flags & TDF_DETAILS))
4415 fprintf (dump_file, " Removed EH side-effects.\n");
4416 }
4417 /* Likewise for AB side-effects. */
4418 if (can_make_abnormal_goto
4419 && !stmt_can_make_abnormal_goto (stmt))
4420 {
4421 bitmap_set_bit (need_ab_cleanup,
4422 gimple_bb (stmt)->index);
4423 if (dump_file && (dump_flags & TDF_DETAILS))
4424 fprintf (dump_file, " Removed AB side-effects.\n");
4425 }
4426 update_stmt (stmt);
4427 if (vdef != gimple_vdef (stmt))
4428 VN_INFO (vdef)->valnum = vuse;
4429 }
4430
4431 /* Make new values available - for fully redundant LHS we
4432 continue with the next stmt above and skip this. */
4433 def_operand_p defp;
4434 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4435 eliminate_push_avail (DEF_FROM_PTR (defp));
4436 }
4437
4438 /* Replace destination PHI arguments. */
4439 edge_iterator ei;
4440 edge e;
4441 FOR_EACH_EDGE (e, ei, b->succs)
4442 {
4443 for (gphi_iterator gsi = gsi_start_phis (e->dest);
4444 !gsi_end_p (gsi);
4445 gsi_next (&gsi))
4446 {
4447 gphi *phi = gsi.phi ();
4448 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
4449 tree arg = USE_FROM_PTR (use_p);
4450 if (TREE_CODE (arg) != SSA_NAME
4451 || virtual_operand_p (arg))
4452 continue;
4453 tree sprime = eliminate_avail (arg);
4454 if (sprime && may_propagate_copy (arg, sprime))
4455 {
4456 propagate_value (use_p, sprime);
4457 if (TREE_CODE (sprime) == SSA_NAME)
4458 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4459 }
4460 }
4461 }
4462 }
4463
4464 /* Make no longer available leaders no longer available. */
4465
4466 void
4467 eliminate_dom_walker::after_dom_children (basic_block)
4468 {
4469 tree entry;
4470 while ((entry = el_avail_stack.pop ()) != NULL_TREE)
4471 {
4472 tree valnum = VN_INFO (entry)->valnum;
4473 tree old = el_avail[SSA_NAME_VERSION (valnum)];
4474 if (old == entry)
4475 el_avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
4476 else
4477 el_avail[SSA_NAME_VERSION (valnum)] = entry;
4478 }
4479 }
4480
4481 /* Eliminate fully redundant computations. */
4482
4483 static unsigned int
4484 eliminate (bool do_pre)
4485 {
4486 gimple_stmt_iterator gsi;
4487 gimple stmt;
4488
4489 need_eh_cleanup = BITMAP_ALLOC (NULL);
4490 need_ab_cleanup = BITMAP_ALLOC (NULL);
4491
4492 el_to_remove.create (0);
4493 el_todo = 0;
4494 el_avail.create (num_ssa_names);
4495 el_avail_stack.create (0);
4496
4497 eliminate_dom_walker (CDI_DOMINATORS,
4498 do_pre).walk (cfun->cfg->x_entry_block_ptr);
4499
4500 el_avail.release ();
4501 el_avail_stack.release ();
4502
4503 /* We cannot remove stmts during BB walk, especially not release SSA
4504 names there as this confuses the VN machinery. The stmts ending
4505 up in el_to_remove are either stores or simple copies.
4506 Remove stmts in reverse order to make debug stmt creation possible. */
4507 while (!el_to_remove.is_empty ())
4508 {
4509 stmt = el_to_remove.pop ();
4510
4511 if (dump_file && (dump_flags & TDF_DETAILS))
4512 {
4513 fprintf (dump_file, "Removing dead stmt ");
4514 print_gimple_stmt (dump_file, stmt, 0, 0);
4515 }
4516
4517 tree lhs;
4518 if (gimple_code (stmt) == GIMPLE_PHI)
4519 lhs = gimple_phi_result (stmt);
4520 else
4521 lhs = gimple_get_lhs (stmt);
4522
4523 if (inserted_exprs
4524 && TREE_CODE (lhs) == SSA_NAME)
4525 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4526
4527 gsi = gsi_for_stmt (stmt);
4528 if (gimple_code (stmt) == GIMPLE_PHI)
4529 remove_phi_node (&gsi, true);
4530 else
4531 {
4532 basic_block bb = gimple_bb (stmt);
4533 unlink_stmt_vdef (stmt);
4534 if (gsi_remove (&gsi, true))
4535 bitmap_set_bit (need_eh_cleanup, bb->index);
4536 release_defs (stmt);
4537 }
4538
4539 /* Removing a stmt may expose a forwarder block. */
4540 el_todo |= TODO_cleanup_cfg;
4541 }
4542 el_to_remove.release ();
4543
4544 return el_todo;
4545 }
4546
4547 /* Perform CFG cleanups made necessary by elimination. */
4548
4549 static unsigned
4550 fini_eliminate (void)
4551 {
4552 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4553 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4554
4555 if (do_eh_cleanup)
4556 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4557
4558 if (do_ab_cleanup)
4559 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4560
4561 BITMAP_FREE (need_eh_cleanup);
4562 BITMAP_FREE (need_ab_cleanup);
4563
4564 if (do_eh_cleanup || do_ab_cleanup)
4565 return TODO_cleanup_cfg;
4566 return 0;
4567 }
4568
4569 /* Borrow a bit of tree-ssa-dce.c for the moment.
4570 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4571 this may be a bit faster, and we may want critical edges kept split. */
4572
4573 /* If OP's defining statement has not already been determined to be necessary,
4574 mark that statement necessary. Return the stmt, if it is newly
4575 necessary. */
4576
4577 static inline gimple
4578 mark_operand_necessary (tree op)
4579 {
4580 gimple stmt;
4581
4582 gcc_assert (op);
4583
4584 if (TREE_CODE (op) != SSA_NAME)
4585 return NULL;
4586
4587 stmt = SSA_NAME_DEF_STMT (op);
4588 gcc_assert (stmt);
4589
4590 if (gimple_plf (stmt, NECESSARY)
4591 || gimple_nop_p (stmt))
4592 return NULL;
4593
4594 gimple_set_plf (stmt, NECESSARY, true);
4595 return stmt;
4596 }
4597
4598 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4599 to insert PHI nodes sometimes, and because value numbering of casts isn't
4600 perfect, we sometimes end up inserting dead code. This simple DCE-like
4601 pass removes any insertions we made that weren't actually used. */
4602
4603 static void
4604 remove_dead_inserted_code (void)
4605 {
4606 bitmap worklist;
4607 unsigned i;
4608 bitmap_iterator bi;
4609 gimple t;
4610
4611 worklist = BITMAP_ALLOC (NULL);
4612 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4613 {
4614 t = SSA_NAME_DEF_STMT (ssa_name (i));
4615 if (gimple_plf (t, NECESSARY))
4616 bitmap_set_bit (worklist, i);
4617 }
4618 while (!bitmap_empty_p (worklist))
4619 {
4620 i = bitmap_first_set_bit (worklist);
4621 bitmap_clear_bit (worklist, i);
4622 t = SSA_NAME_DEF_STMT (ssa_name (i));
4623
4624 /* PHI nodes are somewhat special in that each PHI alternative has
4625 data and control dependencies. All the statements feeding the
4626 PHI node's arguments are always necessary. */
4627 if (gimple_code (t) == GIMPLE_PHI)
4628 {
4629 unsigned k;
4630
4631 for (k = 0; k < gimple_phi_num_args (t); k++)
4632 {
4633 tree arg = PHI_ARG_DEF (t, k);
4634 if (TREE_CODE (arg) == SSA_NAME)
4635 {
4636 gimple n = mark_operand_necessary (arg);
4637 if (n)
4638 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4639 }
4640 }
4641 }
4642 else
4643 {
4644 /* Propagate through the operands. Examine all the USE, VUSE and
4645 VDEF operands in this statement. Mark all the statements
4646 which feed this statement's uses as necessary. */
4647 ssa_op_iter iter;
4648 tree use;
4649
4650 /* The operands of VDEF expressions are also needed as they
4651 represent potential definitions that may reach this
4652 statement (VDEF operands allow us to follow def-def
4653 links). */
4654
4655 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4656 {
4657 gimple n = mark_operand_necessary (use);
4658 if (n)
4659 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4660 }
4661 }
4662 }
4663
4664 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4665 {
4666 t = SSA_NAME_DEF_STMT (ssa_name (i));
4667 if (!gimple_plf (t, NECESSARY))
4668 {
4669 gimple_stmt_iterator gsi;
4670
4671 if (dump_file && (dump_flags & TDF_DETAILS))
4672 {
4673 fprintf (dump_file, "Removing unnecessary insertion:");
4674 print_gimple_stmt (dump_file, t, 0, 0);
4675 }
4676
4677 gsi = gsi_for_stmt (t);
4678 if (gimple_code (t) == GIMPLE_PHI)
4679 remove_phi_node (&gsi, true);
4680 else
4681 {
4682 gsi_remove (&gsi, true);
4683 release_defs (t);
4684 }
4685 }
4686 }
4687 BITMAP_FREE (worklist);
4688 }
4689
4690
4691 /* Initialize data structures used by PRE. */
4692
4693 static void
4694 init_pre (void)
4695 {
4696 basic_block bb;
4697
4698 next_expression_id = 1;
4699 expressions.create (0);
4700 expressions.safe_push (NULL);
4701 value_expressions.create (get_max_value_id () + 1);
4702 value_expressions.safe_grow_cleared (get_max_value_id () + 1);
4703 name_to_id.create (0);
4704
4705 inserted_exprs = BITMAP_ALLOC (NULL);
4706
4707 connect_infinite_loops_to_exit ();
4708 memset (&pre_stats, 0, sizeof (pre_stats));
4709
4710 postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
4711 postorder_num = inverted_post_order_compute (postorder);
4712
4713 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4714
4715 calculate_dominance_info (CDI_POST_DOMINATORS);
4716 calculate_dominance_info (CDI_DOMINATORS);
4717
4718 bitmap_obstack_initialize (&grand_bitmap_obstack);
4719 phi_translate_table = new hash_table<expr_pred_trans_d> (5110);
4720 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4721 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4722 sizeof (struct bitmap_set), 30);
4723 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4724 sizeof (struct pre_expr_d), 30);
4725 FOR_ALL_BB_FN (bb, cfun)
4726 {
4727 EXP_GEN (bb) = bitmap_set_new ();
4728 PHI_GEN (bb) = bitmap_set_new ();
4729 TMP_GEN (bb) = bitmap_set_new ();
4730 AVAIL_OUT (bb) = bitmap_set_new ();
4731 }
4732 }
4733
4734
4735 /* Deallocate data structures used by PRE. */
4736
4737 static void
4738 fini_pre ()
4739 {
4740 free (postorder);
4741 value_expressions.release ();
4742 BITMAP_FREE (inserted_exprs);
4743 bitmap_obstack_release (&grand_bitmap_obstack);
4744 free_alloc_pool (bitmap_set_pool);
4745 free_alloc_pool (pre_expr_pool);
4746 delete phi_translate_table;
4747 phi_translate_table = NULL;
4748 delete expression_to_id;
4749 expression_to_id = NULL;
4750 name_to_id.release ();
4751
4752 free_aux_for_blocks ();
4753
4754 free_dominance_info (CDI_POST_DOMINATORS);
4755 }
4756
4757 namespace {
4758
4759 const pass_data pass_data_pre =
4760 {
4761 GIMPLE_PASS, /* type */
4762 "pre", /* name */
4763 OPTGROUP_NONE, /* optinfo_flags */
4764 TV_TREE_PRE, /* tv_id */
4765 /* PROP_no_crit_edges is ensured by placing pass_split_crit_edges before
4766 pass_pre. */
4767 ( PROP_no_crit_edges | PROP_cfg | PROP_ssa ), /* properties_required */
4768 0, /* properties_provided */
4769 PROP_no_crit_edges, /* properties_destroyed */
4770 TODO_rebuild_alias, /* todo_flags_start */
4771 0, /* todo_flags_finish */
4772 };
4773
4774 class pass_pre : public gimple_opt_pass
4775 {
4776 public:
4777 pass_pre (gcc::context *ctxt)
4778 : gimple_opt_pass (pass_data_pre, ctxt)
4779 {}
4780
4781 /* opt_pass methods: */
4782 virtual bool gate (function *) { return flag_tree_pre != 0; }
4783 virtual unsigned int execute (function *);
4784
4785 }; // class pass_pre
4786
4787 unsigned int
4788 pass_pre::execute (function *fun)
4789 {
4790 unsigned int todo = 0;
4791
4792 do_partial_partial =
4793 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4794
4795 /* This has to happen before SCCVN runs because
4796 loop_optimizer_init may create new phis, etc. */
4797 loop_optimizer_init (LOOPS_NORMAL);
4798
4799 if (!run_scc_vn (VN_WALK))
4800 {
4801 loop_optimizer_finalize ();
4802 return 0;
4803 }
4804
4805 init_pre ();
4806 scev_initialize ();
4807
4808 /* Collect and value number expressions computed in each basic block. */
4809 compute_avail ();
4810
4811 /* Insert can get quite slow on an incredibly large number of basic
4812 blocks due to some quadratic behavior. Until this behavior is
4813 fixed, don't run it when he have an incredibly large number of
4814 bb's. If we aren't going to run insert, there is no point in
4815 computing ANTIC, either, even though it's plenty fast. */
4816 if (n_basic_blocks_for_fn (fun) < 4000)
4817 {
4818 compute_antic ();
4819 insert ();
4820 }
4821
4822 /* Make sure to remove fake edges before committing our inserts.
4823 This makes sure we don't end up with extra critical edges that
4824 we would need to split. */
4825 remove_fake_exit_edges ();
4826 gsi_commit_edge_inserts ();
4827
4828 /* Eliminate folds statements which might (should not...) end up
4829 not keeping virtual operands up-to-date. */
4830 gcc_assert (!need_ssa_update_p (fun));
4831
4832 /* Remove all the redundant expressions. */
4833 todo |= eliminate (true);
4834
4835 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4836 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4837 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4838 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
4839
4840 clear_expression_ids ();
4841 remove_dead_inserted_code ();
4842
4843 scev_finalize ();
4844 fini_pre ();
4845 todo |= fini_eliminate ();
4846 loop_optimizer_finalize ();
4847
4848 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4849 case we can merge the block with the remaining predecessor of the block.
4850 It should either:
4851 - call merge_blocks after each tail merge iteration
4852 - call merge_blocks after all tail merge iterations
4853 - mark TODO_cleanup_cfg when necessary
4854 - share the cfg cleanup with fini_pre. */
4855 todo |= tail_merge_optimize (todo);
4856
4857 free_scc_vn ();
4858
4859 /* Tail merging invalidates the virtual SSA web, together with
4860 cfg-cleanup opportunities exposed by PRE this will wreck the
4861 SSA updating machinery. So make sure to run update-ssa
4862 manually, before eventually scheduling cfg-cleanup as part of
4863 the todo. */
4864 update_ssa (TODO_update_ssa_only_virtuals);
4865
4866 return todo;
4867 }
4868
4869 } // anon namespace
4870
4871 gimple_opt_pass *
4872 make_pass_pre (gcc::context *ctxt)
4873 {
4874 return new pass_pre (ctxt);
4875 }
4876
4877 namespace {
4878
4879 const pass_data pass_data_fre =
4880 {
4881 GIMPLE_PASS, /* type */
4882 "fre", /* name */
4883 OPTGROUP_NONE, /* optinfo_flags */
4884 TV_TREE_FRE, /* tv_id */
4885 ( PROP_cfg | PROP_ssa ), /* properties_required */
4886 0, /* properties_provided */
4887 0, /* properties_destroyed */
4888 0, /* todo_flags_start */
4889 0, /* todo_flags_finish */
4890 };
4891
4892 class pass_fre : public gimple_opt_pass
4893 {
4894 public:
4895 pass_fre (gcc::context *ctxt)
4896 : gimple_opt_pass (pass_data_fre, ctxt)
4897 {}
4898
4899 /* opt_pass methods: */
4900 opt_pass * clone () { return new pass_fre (m_ctxt); }
4901 virtual bool gate (function *) { return flag_tree_fre != 0; }
4902 virtual unsigned int execute (function *);
4903
4904 }; // class pass_fre
4905
4906 unsigned int
4907 pass_fre::execute (function *fun)
4908 {
4909 unsigned int todo = 0;
4910
4911 if (!run_scc_vn (VN_WALKREWRITE))
4912 return 0;
4913
4914 memset (&pre_stats, 0, sizeof (pre_stats));
4915
4916 /* Remove all the redundant expressions. */
4917 todo |= eliminate (false);
4918
4919 todo |= fini_eliminate ();
4920
4921 free_scc_vn ();
4922
4923 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4924 statistics_counter_event (fun, "Eliminated", pre_stats.eliminations);
4925
4926 return todo;
4927 }
4928
4929 } // anon namespace
4930
4931 gimple_opt_pass *
4932 make_pass_fre (gcc::context *ctxt)
4933 {
4934 return new pass_fre (ctxt);
4935 }