1 /* SCC value numbering for trees
2 Copyright (C) 2006-2020 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "splay-tree.h"
31 #include "insn-config.h"
35 #include "gimple-pretty-print.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
56 #include "tree-ssa-propagate.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
73 #include "tree-ssa-sccvn.h"
75 /* This algorithm is based on the SCC algorithm presented by Keith
76 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77 (http://citeseer.ist.psu.edu/41805.html). In
78 straight line code, it is equivalent to a regular hash based value
79 numbering that is performed in reverse postorder.
81 For code with cycles, there are two alternatives, both of which
82 require keeping the hashtables separate from the actual list of
83 value numbers for SSA names.
85 1. Iterate value numbering in an RPO walk of the blocks, removing
86 all the entries from the hashtable after each iteration (but
87 keeping the SSA name->value number mapping between iterations).
88 Iterate until it does not change.
90 2. Perform value numbering as part of an SCC walk on the SSA graph,
91 iterating only the cycles in the SSA graph until they do not change
92 (using a separate, optimistic hashtable for value numbering the SCC
95 The second is not just faster in practice (because most SSA graph
96 cycles do not involve all the variables in the graph), it also has
99 One of these nice properties is that when we pop an SCC off the
100 stack, we are guaranteed to have processed all the operands coming from
101 *outside of that SCC*, so we do not need to do anything special to
102 ensure they have value numbers.
104 Another nice property is that the SCC walk is done as part of a DFS
105 of the SSA graph, which makes it easy to perform combining and
106 simplifying operations at the same time.
108 The code below is deliberately written in a way that makes it easy
109 to separate the SCC walk from the other work it does.
111 In order to propagate constants through the code, we track which
112 expressions contain constants, and use those while folding. In
113 theory, we could also track expressions whose value numbers are
114 replaced, in case we end up folding based on expression
117 In order to value number memory, we assign value numbers to vuses.
118 This enables us to note that, for example, stores to the same
119 address of the same value from the same starting memory states are
123 1. We can iterate only the changing portions of the SCC's, but
124 I have not seen an SCC big enough for this to be a win.
125 2. If you differentiate between phi nodes for loops and phi nodes
126 for if-then-else, you can properly consider phi nodes in different
127 blocks for equivalence.
128 3. We could value number vuses in more cases, particularly, whole
132 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
133 #define BB_EXECUTABLE BB_VISITED
135 static vn_lookup_kind default_vn_walk_kind
;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
141 typedef vn_nary_op_s
*compare_type
;
142 static inline hashval_t
hash (const vn_nary_op_s
*);
143 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
146 /* Return the computed hashcode for nary operation P1. */
149 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
151 return vno1
->hashcode
;
154 /* Compare nary operations P1 and P2 and return true if they are
158 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
160 return vno1
== vno2
|| vn_nary_op_eq (vno1
, vno2
);
163 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
167 /* vn_phi hashtable helpers. */
170 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
172 struct vn_phi_hasher
: nofree_ptr_hash
<vn_phi_s
>
174 static inline hashval_t
hash (const vn_phi_s
*);
175 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
178 /* Return the computed hashcode for phi operation P1. */
181 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
183 return vp1
->hashcode
;
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
191 return vp1
== vp2
|| vn_phi_eq (vp1
, vp2
);
194 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
198 /* Compare two reference operands P1 and P2 for equality. Return true if
199 they are equal, and false otherwise. */
202 vn_reference_op_eq (const void *p1
, const void *p2
)
204 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
205 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
207 return (vro1
->opcode
== vro2
->opcode
208 /* We do not care for differences in type qualification. */
209 && (vro1
->type
== vro2
->type
210 || (vro1
->type
&& vro2
->type
211 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
212 TYPE_MAIN_VARIANT (vro2
->type
))))
213 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
214 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
215 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
218 /* Free a reference operation structure VP. */
221 free_reference (vn_reference_s
*vr
)
223 vr
->operands
.release ();
227 /* vn_reference hashtable helpers. */
229 struct vn_reference_hasher
: nofree_ptr_hash
<vn_reference_s
>
231 static inline hashval_t
hash (const vn_reference_s
*);
232 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
235 /* Return the hashcode for a given reference operation P1. */
238 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
240 return vr1
->hashcode
;
244 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
246 return v
== c
|| vn_reference_eq (v
, c
);
249 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
253 /* The set of VN hashtables. */
255 typedef struct vn_tables_s
257 vn_nary_op_table_type
*nary
;
258 vn_phi_table_type
*phis
;
259 vn_reference_table_type
*references
;
263 /* vn_constant hashtable helpers. */
265 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
267 static inline hashval_t
hash (const vn_constant_s
*);
268 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
271 /* Hash table hash function for vn_constant_t. */
274 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
276 return vc1
->hashcode
;
279 /* Hash table equality function for vn_constant_t. */
282 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
284 if (vc1
->hashcode
!= vc2
->hashcode
)
287 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
290 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
291 static bitmap constant_value_ids
;
294 /* Obstack we allocate the vn-tables elements from. */
295 static obstack vn_tables_obstack
;
296 /* Special obstack we never unwind. */
297 static obstack vn_tables_insert_obstack
;
299 static vn_reference_t last_inserted_ref
;
300 static vn_phi_t last_inserted_phi
;
301 static vn_nary_op_t last_inserted_nary
;
303 /* Valid hashtables storing information we have proven to be
305 static vn_tables_t valid_info
;
308 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 tree (*vn_valueize
) (tree
);
311 tree
vn_valueize_wrapper (tree t
, void* context ATTRIBUTE_UNUSED
)
313 return vn_valueize (t
);
317 /* This represents the top of the VN lattice, which is the universal
322 /* Unique counter for our value ids. */
324 static unsigned int next_value_id
;
327 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
328 are allocated on an obstack for locality reasons, and to free them
329 without looping over the vec. */
331 struct vn_ssa_aux_hasher
: typed_noop_remove
<vn_ssa_aux_t
>
333 typedef vn_ssa_aux_t value_type
;
334 typedef tree compare_type
;
335 static inline hashval_t
hash (const value_type
&);
336 static inline bool equal (const value_type
&, const compare_type
&);
337 static inline void mark_deleted (value_type
&) {}
338 static const bool empty_zero_p
= true;
339 static inline void mark_empty (value_type
&e
) { e
= NULL
; }
340 static inline bool is_deleted (value_type
&) { return false; }
341 static inline bool is_empty (value_type
&e
) { return e
== NULL
; }
345 vn_ssa_aux_hasher::hash (const value_type
&entry
)
347 return SSA_NAME_VERSION (entry
->name
);
351 vn_ssa_aux_hasher::equal (const value_type
&entry
, const compare_type
&name
)
353 return name
== entry
->name
;
356 static hash_table
<vn_ssa_aux_hasher
> *vn_ssa_aux_hash
;
357 typedef hash_table
<vn_ssa_aux_hasher
>::iterator vn_ssa_aux_iterator_type
;
358 static struct obstack vn_ssa_aux_obstack
;
360 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*, tree
);
361 static unsigned int vn_nary_length_from_stmt (gimple
*);
362 static vn_nary_op_t
alloc_vn_nary_op_noinit (unsigned int, obstack
*);
363 static vn_nary_op_t
vn_nary_op_insert_into (vn_nary_op_t
,
364 vn_nary_op_table_type
*, bool);
365 static void init_vn_nary_op_from_stmt (vn_nary_op_t
, gimple
*);
366 static void init_vn_nary_op_from_pieces (vn_nary_op_t
, unsigned int,
367 enum tree_code
, tree
, tree
*);
368 static tree
vn_lookup_simplify_result (gimple_match_op
*);
369 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
370 (tree
, alias_set_type
, alias_set_type
, tree
,
371 vec
<vn_reference_op_s
, va_heap
>, tree
);
373 /* Return whether there is value numbering information for a given SSA name. */
376 has_VN_INFO (tree name
)
378 return vn_ssa_aux_hash
->find_with_hash (name
, SSA_NAME_VERSION (name
));
385 = vn_ssa_aux_hash
->find_slot_with_hash (name
, SSA_NAME_VERSION (name
),
390 vn_ssa_aux_t newinfo
= *res
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
391 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
392 newinfo
->name
= name
;
393 newinfo
->valnum
= VN_TOP
;
394 /* We are using the visited flag to handle uses with defs not within the
395 region being value-numbered. */
396 newinfo
->visited
= false;
398 /* Given we create the VN_INFOs on-demand now we have to do initialization
399 different than VN_TOP here. */
400 if (SSA_NAME_IS_DEFAULT_DEF (name
))
401 switch (TREE_CODE (SSA_NAME_VAR (name
)))
404 /* All undefined vars are VARYING. */
405 newinfo
->valnum
= name
;
406 newinfo
->visited
= true;
410 /* Parameters are VARYING but we can record a condition
411 if we know it is a non-NULL pointer. */
412 newinfo
->visited
= true;
413 newinfo
->valnum
= name
;
414 if (POINTER_TYPE_P (TREE_TYPE (name
))
415 && nonnull_arg_p (SSA_NAME_VAR (name
)))
419 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
421 /* Allocate from non-unwinding stack. */
422 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
423 init_vn_nary_op_from_pieces (nary
, 2, NE_EXPR
,
424 boolean_type_node
, ops
);
425 nary
->predicated_values
= 0;
426 nary
->u
.result
= boolean_true_node
;
427 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
428 gcc_assert (nary
->unwind_to
== NULL
);
429 /* Also do not link it into the undo chain. */
430 last_inserted_nary
= nary
->next
;
431 nary
->next
= (vn_nary_op_t
)(void *)-1;
432 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
433 init_vn_nary_op_from_pieces (nary
, 2, EQ_EXPR
,
434 boolean_type_node
, ops
);
435 nary
->predicated_values
= 0;
436 nary
->u
.result
= boolean_false_node
;
437 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
438 gcc_assert (nary
->unwind_to
== NULL
);
439 last_inserted_nary
= nary
->next
;
440 nary
->next
= (vn_nary_op_t
)(void *)-1;
441 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
443 fprintf (dump_file
, "Recording ");
444 print_generic_expr (dump_file
, name
, TDF_SLIM
);
445 fprintf (dump_file
, " != 0\n");
451 /* If the result is passed by invisible reference the default
452 def is initialized, otherwise it's uninitialized. Still
453 undefined is varying. */
454 newinfo
->visited
= true;
455 newinfo
->valnum
= name
;
464 /* Return the SSA value of X. */
467 SSA_VAL (tree x
, bool *visited
= NULL
)
469 vn_ssa_aux_t tem
= vn_ssa_aux_hash
->find_with_hash (x
, SSA_NAME_VERSION (x
));
471 *visited
= tem
&& tem
->visited
;
472 return tem
&& tem
->visited
? tem
->valnum
: x
;
475 /* Return the SSA value of the VUSE x, supporting released VDEFs
476 during elimination which will value-number the VDEF to the
477 associated VUSE (but not substitute in the whole lattice). */
480 vuse_ssa_val (tree x
)
488 gcc_assert (x
!= VN_TOP
);
490 while (SSA_NAME_IN_FREE_LIST (x
));
495 /* Similar to the above but used as callback for walk_non_aliases_vuses
496 and thus should stop at unvisited VUSE to not walk across region
500 vuse_valueize (tree vuse
)
505 vuse
= SSA_VAL (vuse
, &visited
);
508 gcc_assert (vuse
!= VN_TOP
);
510 while (SSA_NAME_IN_FREE_LIST (vuse
));
515 /* Return the vn_kind the expression computed by the stmt should be
519 vn_get_stmt_kind (gimple
*stmt
)
521 switch (gimple_code (stmt
))
529 enum tree_code code
= gimple_assign_rhs_code (stmt
);
530 tree rhs1
= gimple_assign_rhs1 (stmt
);
531 switch (get_gimple_rhs_class (code
))
533 case GIMPLE_UNARY_RHS
:
534 case GIMPLE_BINARY_RHS
:
535 case GIMPLE_TERNARY_RHS
:
537 case GIMPLE_SINGLE_RHS
:
538 switch (TREE_CODE_CLASS (code
))
541 /* VOP-less references can go through unary case. */
542 if ((code
== REALPART_EXPR
543 || code
== IMAGPART_EXPR
544 || code
== VIEW_CONVERT_EXPR
545 || code
== BIT_FIELD_REF
)
546 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
550 case tcc_declaration
:
557 if (code
== ADDR_EXPR
)
558 return (is_gimple_min_invariant (rhs1
)
559 ? VN_CONSTANT
: VN_REFERENCE
);
560 else if (code
== CONSTRUCTOR
)
573 /* Lookup a value id for CONSTANT and return it. If it does not
577 get_constant_value_id (tree constant
)
579 vn_constant_s
**slot
;
580 struct vn_constant_s vc
;
582 vc
.hashcode
= vn_hash_constant_with_type (constant
);
583 vc
.constant
= constant
;
584 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
586 return (*slot
)->value_id
;
590 /* Lookup a value id for CONSTANT, and if it does not exist, create a
591 new one and return it. If it does exist, return it. */
594 get_or_alloc_constant_value_id (tree constant
)
596 vn_constant_s
**slot
;
597 struct vn_constant_s vc
;
600 /* If the hashtable isn't initialized we're not running from PRE and thus
601 do not need value-ids. */
602 if (!constant_to_value_id
)
605 vc
.hashcode
= vn_hash_constant_with_type (constant
);
606 vc
.constant
= constant
;
607 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
609 return (*slot
)->value_id
;
611 vcp
= XNEW (struct vn_constant_s
);
612 vcp
->hashcode
= vc
.hashcode
;
613 vcp
->constant
= constant
;
614 vcp
->value_id
= get_next_value_id ();
616 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
617 return vcp
->value_id
;
620 /* Return true if V is a value id for a constant. */
623 value_id_constant_p (unsigned int v
)
625 return bitmap_bit_p (constant_value_ids
, v
);
628 /* Compute the hash for a reference operand VRO1. */
631 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
633 hstate
.add_int (vro1
->opcode
);
635 inchash::add_expr (vro1
->op0
, hstate
);
637 inchash::add_expr (vro1
->op1
, hstate
);
639 inchash::add_expr (vro1
->op2
, hstate
);
642 /* Compute a hash for the reference operation VR1 and return it. */
645 vn_reference_compute_hash (const vn_reference_t vr1
)
647 inchash::hash hstate
;
650 vn_reference_op_t vro
;
654 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
656 if (vro
->opcode
== MEM_REF
)
658 else if (vro
->opcode
!= ADDR_EXPR
)
660 if (maybe_ne (vro
->off
, -1))
662 if (known_eq (off
, -1))
668 if (maybe_ne (off
, -1)
669 && maybe_ne (off
, 0))
670 hstate
.add_poly_int (off
);
673 && vro
->opcode
== ADDR_EXPR
)
677 tree op
= TREE_OPERAND (vro
->op0
, 0);
678 hstate
.add_int (TREE_CODE (op
));
679 inchash::add_expr (op
, hstate
);
683 vn_reference_op_compute_hash (vro
, hstate
);
686 result
= hstate
.end ();
687 /* ??? We would ICE later if we hash instead of adding that in. */
689 result
+= SSA_NAME_VERSION (vr1
->vuse
);
694 /* Return true if reference operations VR1 and VR2 are equivalent. This
695 means they have the same set of operands and vuses. */
698 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
702 /* Early out if this is not a hash collision. */
703 if (vr1
->hashcode
!= vr2
->hashcode
)
706 /* The VOP needs to be the same. */
707 if (vr1
->vuse
!= vr2
->vuse
)
710 /* If the operands are the same we are done. */
711 if (vr1
->operands
== vr2
->operands
)
714 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
717 if (INTEGRAL_TYPE_P (vr1
->type
)
718 && INTEGRAL_TYPE_P (vr2
->type
))
720 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
723 else if (INTEGRAL_TYPE_P (vr1
->type
)
724 && (TYPE_PRECISION (vr1
->type
)
725 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
727 else if (INTEGRAL_TYPE_P (vr2
->type
)
728 && (TYPE_PRECISION (vr2
->type
)
729 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
736 poly_int64 off1
= 0, off2
= 0;
737 vn_reference_op_t vro1
, vro2
;
738 vn_reference_op_s tem1
, tem2
;
739 bool deref1
= false, deref2
= false;
740 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
742 if (vro1
->opcode
== MEM_REF
)
744 /* Do not look through a storage order barrier. */
745 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
747 if (known_eq (vro1
->off
, -1))
751 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
753 if (vro2
->opcode
== MEM_REF
)
755 /* Do not look through a storage order barrier. */
756 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
758 if (known_eq (vro2
->off
, -1))
762 if (maybe_ne (off1
, off2
))
764 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
766 memset (&tem1
, 0, sizeof (tem1
));
767 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
768 tem1
.type
= TREE_TYPE (tem1
.op0
);
769 tem1
.opcode
= TREE_CODE (tem1
.op0
);
773 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
775 memset (&tem2
, 0, sizeof (tem2
));
776 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
777 tem2
.type
= TREE_TYPE (tem2
.op0
);
778 tem2
.opcode
= TREE_CODE (tem2
.op0
);
782 if (deref1
!= deref2
)
784 if (!vn_reference_op_eq (vro1
, vro2
))
789 while (vr1
->operands
.length () != i
790 || vr2
->operands
.length () != j
);
795 /* Copy the operations present in load/store REF into RESULT, a vector of
796 vn_reference_op_s's. */
799 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
801 /* For non-calls, store the information that makes up the address. */
805 vn_reference_op_s temp
;
807 memset (&temp
, 0, sizeof (temp
));
808 temp
.type
= TREE_TYPE (ref
);
809 temp
.opcode
= TREE_CODE (ref
);
815 temp
.op0
= TREE_OPERAND (ref
, 1);
818 temp
.op0
= TREE_OPERAND (ref
, 1);
822 /* The base address gets its own vn_reference_op_s structure. */
823 temp
.op0
= TREE_OPERAND (ref
, 1);
824 if (!mem_ref_offset (ref
).to_shwi (&temp
.off
))
826 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
827 temp
.base
= MR_DEPENDENCE_BASE (ref
);
828 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
831 /* The base address gets its own vn_reference_op_s structure. */
832 temp
.op0
= TMR_INDEX (ref
);
833 temp
.op1
= TMR_STEP (ref
);
834 temp
.op2
= TMR_OFFSET (ref
);
835 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
836 temp
.base
= MR_DEPENDENCE_BASE (ref
);
837 result
->safe_push (temp
);
838 memset (&temp
, 0, sizeof (temp
));
839 temp
.type
= NULL_TREE
;
840 temp
.opcode
= ERROR_MARK
;
841 temp
.op0
= TMR_INDEX2 (ref
);
845 /* Record bits, position and storage order. */
846 temp
.op0
= TREE_OPERAND (ref
, 1);
847 temp
.op1
= TREE_OPERAND (ref
, 2);
848 if (!multiple_p (bit_field_offset (ref
), BITS_PER_UNIT
, &temp
.off
))
850 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
853 /* The field decl is enough to unambiguously specify the field,
854 a matching type is not necessary and a mismatching type
855 is always a spurious difference. */
856 temp
.type
= NULL_TREE
;
857 temp
.op0
= TREE_OPERAND (ref
, 1);
858 temp
.op1
= TREE_OPERAND (ref
, 2);
860 tree this_offset
= component_ref_field_offset (ref
);
862 && poly_int_tree_p (this_offset
))
864 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
865 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
868 = (wi::to_poly_offset (this_offset
)
869 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
870 /* Probibit value-numbering zero offset components
871 of addresses the same before the pass folding
872 __builtin_object_size had a chance to run
873 (checking cfun->after_inlining does the
875 if (TREE_CODE (orig
) != ADDR_EXPR
877 || cfun
->after_inlining
)
878 off
.to_shwi (&temp
.off
);
883 case ARRAY_RANGE_REF
:
886 tree eltype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref
, 0)));
887 /* Record index as operand. */
888 temp
.op0
= TREE_OPERAND (ref
, 1);
889 /* Always record lower bounds and element size. */
890 temp
.op1
= array_ref_low_bound (ref
);
891 /* But record element size in units of the type alignment. */
892 temp
.op2
= TREE_OPERAND (ref
, 3);
893 temp
.align
= eltype
->type_common
.align
;
895 temp
.op2
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE_UNIT (eltype
),
896 size_int (TYPE_ALIGN_UNIT (eltype
)));
897 if (poly_int_tree_p (temp
.op0
)
898 && poly_int_tree_p (temp
.op1
)
899 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
901 poly_offset_int off
= ((wi::to_poly_offset (temp
.op0
)
902 - wi::to_poly_offset (temp
.op1
))
903 * wi::to_offset (temp
.op2
)
904 * vn_ref_op_align_unit (&temp
));
905 off
.to_shwi (&temp
.off
);
910 if (DECL_HARD_REGISTER (ref
))
919 /* Canonicalize decls to MEM[&decl] which is what we end up with
920 when valueizing MEM[ptr] with ptr = &decl. */
921 temp
.opcode
= MEM_REF
;
922 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
924 result
->safe_push (temp
);
925 temp
.opcode
= ADDR_EXPR
;
926 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
927 temp
.type
= TREE_TYPE (temp
.op0
);
942 if (is_gimple_min_invariant (ref
))
948 /* These are only interesting for their operands, their
949 existence, and their type. They will never be the last
950 ref in the chain of references (IE they require an
951 operand), so we don't have to put anything
952 for op* as it will be handled by the iteration */
956 case VIEW_CONVERT_EXPR
:
958 temp
.reverse
= storage_order_barrier_p (ref
);
961 /* This is only interesting for its constant offset. */
962 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
967 result
->safe_push (temp
);
969 if (REFERENCE_CLASS_P (ref
)
970 || TREE_CODE (ref
) == MODIFY_EXPR
971 || TREE_CODE (ref
) == WITH_SIZE_EXPR
972 || (TREE_CODE (ref
) == ADDR_EXPR
973 && !is_gimple_min_invariant (ref
)))
974 ref
= TREE_OPERAND (ref
, 0);
980 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
981 operands in *OPS, the reference alias set SET and the reference type TYPE.
982 Return true if something useful was produced. */
985 ao_ref_init_from_vn_reference (ao_ref
*ref
,
986 alias_set_type set
, alias_set_type base_set
,
987 tree type
, vec
<vn_reference_op_s
> ops
)
989 vn_reference_op_t op
;
991 tree base
= NULL_TREE
;
993 poly_offset_int offset
= 0;
994 poly_offset_int max_size
;
995 poly_offset_int size
= -1;
996 tree size_tree
= NULL_TREE
;
998 /* First get the final access size from just the outermost expression. */
1000 if (op
->opcode
== COMPONENT_REF
)
1001 size_tree
= DECL_SIZE (op
->op0
);
1002 else if (op
->opcode
== BIT_FIELD_REF
)
1003 size_tree
= op
->op0
;
1006 machine_mode mode
= TYPE_MODE (type
);
1007 if (mode
== BLKmode
)
1008 size_tree
= TYPE_SIZE (type
);
1010 size
= GET_MODE_BITSIZE (mode
);
1012 if (size_tree
!= NULL_TREE
1013 && poly_int_tree_p (size_tree
))
1014 size
= wi::to_poly_offset (size_tree
);
1016 /* Initially, maxsize is the same as the accessed element size.
1017 In the following it will only grow (or become -1). */
1020 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1021 and find the ultimate containing object. */
1022 FOR_EACH_VEC_ELT (ops
, i
, op
)
1026 /* These may be in the reference ops, but we cannot do anything
1027 sensible with them here. */
1029 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1030 if (base
!= NULL_TREE
1031 && TREE_CODE (base
) == MEM_REF
1033 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1035 vn_reference_op_t pop
= &ops
[i
-1];
1036 base
= TREE_OPERAND (op
->op0
, 0);
1037 if (known_eq (pop
->off
, -1))
1043 offset
+= pop
->off
* BITS_PER_UNIT
;
1051 /* Record the base objects. */
1053 *op0_p
= build2 (MEM_REF
, op
->type
,
1054 NULL_TREE
, op
->op0
);
1055 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
1056 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
1057 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1068 /* And now the usual component-reference style ops. */
1070 offset
+= wi::to_poly_offset (op
->op1
);
1075 tree field
= op
->op0
;
1076 /* We do not have a complete COMPONENT_REF tree here so we
1077 cannot use component_ref_field_offset. Do the interesting
1079 tree this_offset
= DECL_FIELD_OFFSET (field
);
1081 if (op
->op1
|| !poly_int_tree_p (this_offset
))
1085 poly_offset_int woffset
= (wi::to_poly_offset (this_offset
)
1086 << LOG2_BITS_PER_UNIT
);
1087 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1093 case ARRAY_RANGE_REF
:
1095 /* We recorded the lower bound and the element size. */
1096 if (!poly_int_tree_p (op
->op0
)
1097 || !poly_int_tree_p (op
->op1
)
1098 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1102 poly_offset_int woffset
1103 = wi::sext (wi::to_poly_offset (op
->op0
)
1104 - wi::to_poly_offset (op
->op1
),
1105 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1106 woffset
*= wi::to_offset (op
->op2
) * vn_ref_op_align_unit (op
);
1107 woffset
<<= LOG2_BITS_PER_UNIT
;
1119 case VIEW_CONVERT_EXPR
:
1136 if (base
== NULL_TREE
)
1139 ref
->ref
= NULL_TREE
;
1141 ref
->ref_alias_set
= set
;
1142 ref
->base_alias_set
= base_set
;
1143 /* We discount volatiles from value-numbering elsewhere. */
1144 ref
->volatile_p
= false;
1146 if (!size
.to_shwi (&ref
->size
) || maybe_lt (ref
->size
, 0))
1154 if (!offset
.to_shwi (&ref
->offset
))
1161 if (!max_size
.to_shwi (&ref
->max_size
) || maybe_lt (ref
->max_size
, 0))
1167 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1168 vn_reference_op_s's. */
1171 copy_reference_ops_from_call (gcall
*call
,
1172 vec
<vn_reference_op_s
> *result
)
1174 vn_reference_op_s temp
;
1176 tree lhs
= gimple_call_lhs (call
);
1179 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1180 different. By adding the lhs here in the vector, we ensure that the
1181 hashcode is different, guaranteeing a different value number. */
1182 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1184 memset (&temp
, 0, sizeof (temp
));
1185 temp
.opcode
= MODIFY_EXPR
;
1186 temp
.type
= TREE_TYPE (lhs
);
1189 result
->safe_push (temp
);
1192 /* Copy the type, opcode, function, static chain and EH region, if any. */
1193 memset (&temp
, 0, sizeof (temp
));
1194 temp
.type
= gimple_call_fntype (call
);
1195 temp
.opcode
= CALL_EXPR
;
1196 temp
.op0
= gimple_call_fn (call
);
1197 temp
.op1
= gimple_call_chain (call
);
1198 if (stmt_could_throw_p (cfun
, call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1199 temp
.op2
= size_int (lr
);
1201 result
->safe_push (temp
);
1203 /* Copy the call arguments. As they can be references as well,
1204 just chain them together. */
1205 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1207 tree callarg
= gimple_call_arg (call
, i
);
1208 copy_reference_ops_from_ref (callarg
, result
);
1212 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1213 *I_P to point to the last element of the replacement. */
1215 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1218 unsigned int i
= *i_p
;
1219 vn_reference_op_t op
= &(*ops
)[i
];
1220 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1222 poly_int64 addr_offset
= 0;
1224 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1225 from .foo.bar to the preceding MEM_REF offset and replace the
1226 address with &OBJ. */
1227 addr_base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (op
->op0
, 0),
1228 &addr_offset
, vn_valueize
);
1229 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1230 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1233 = (poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
),
1236 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1237 op
->op0
= build_fold_addr_expr (addr_base
);
1238 if (tree_fits_shwi_p (mem_op
->op0
))
1239 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1247 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1248 *I_P to point to the last element of the replacement. */
1250 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1253 bool changed
= false;
1254 vn_reference_op_t op
;
1258 unsigned int i
= *i_p
;
1260 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1262 enum tree_code code
;
1263 poly_offset_int off
;
1265 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1266 if (!is_gimple_assign (def_stmt
))
1269 code
= gimple_assign_rhs_code (def_stmt
);
1270 if (code
!= ADDR_EXPR
1271 && code
!= POINTER_PLUS_EXPR
)
1274 off
= poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
), SIGNED
);
1276 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1277 from .foo.bar to the preceding MEM_REF offset and replace the
1278 address with &OBJ. */
1279 if (code
== ADDR_EXPR
)
1281 tree addr
, addr_base
;
1282 poly_int64 addr_offset
;
1284 addr
= gimple_assign_rhs1 (def_stmt
);
1285 addr_base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr
, 0),
1288 /* If that didn't work because the address isn't invariant propagate
1289 the reference tree from the address operation in case the current
1290 dereference isn't offsetted. */
1292 && *i_p
== ops
->length () - 1
1293 && known_eq (off
, 0)
1294 /* This makes us disable this transform for PRE where the
1295 reference ops might be also used for code insertion which
1297 && default_vn_walk_kind
== VN_WALKREWRITE
)
1299 auto_vec
<vn_reference_op_s
, 32> tem
;
1300 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1301 /* Make sure to preserve TBAA info. The only objects not
1302 wrapped in MEM_REFs that can have their address taken are
1304 if (tem
.length () >= 2
1305 && tem
[tem
.length () - 2].opcode
== MEM_REF
)
1307 vn_reference_op_t new_mem_op
= &tem
[tem
.length () - 2];
1309 = wide_int_to_tree (TREE_TYPE (mem_op
->op0
),
1310 wi::to_poly_wide (new_mem_op
->op0
));
1313 gcc_assert (tem
.last ().opcode
== STRING_CST
);
1316 ops
->safe_splice (tem
);
1321 || TREE_CODE (addr_base
) != MEM_REF
1322 || (TREE_CODE (TREE_OPERAND (addr_base
, 0)) == SSA_NAME
1323 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base
,
1328 off
+= mem_ref_offset (addr_base
);
1329 op
->op0
= TREE_OPERAND (addr_base
, 0);
1334 ptr
= gimple_assign_rhs1 (def_stmt
);
1335 ptroff
= gimple_assign_rhs2 (def_stmt
);
1336 if (TREE_CODE (ptr
) != SSA_NAME
1337 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1338 /* Make sure to not endlessly recurse.
1339 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1340 happen when we value-number a PHI to its backedge value. */
1341 || SSA_VAL (ptr
) == op
->op0
1342 || !poly_int_tree_p (ptroff
))
1345 off
+= wi::to_poly_offset (ptroff
);
1349 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1350 if (tree_fits_shwi_p (mem_op
->op0
))
1351 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1354 /* ??? Can end up with endless recursion here!?
1355 gcc.c-torture/execute/strcmp-1.c */
1356 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1357 op
->op0
= SSA_VAL (op
->op0
);
1358 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1359 op
->opcode
= TREE_CODE (op
->op0
);
1364 while (TREE_CODE (op
->op0
) == SSA_NAME
);
1366 /* Fold a remaining *&. */
1367 if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1368 vn_reference_fold_indirect (ops
, i_p
);
1373 /* Optimize the reference REF to a constant if possible or return
1374 NULL_TREE if not. */
1377 fully_constant_vn_reference_p (vn_reference_t ref
)
1379 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1380 vn_reference_op_t op
;
1382 /* Try to simplify the translated expression if it is
1383 a call to a builtin function with at most two arguments. */
1385 if (op
->opcode
== CALL_EXPR
1386 && TREE_CODE (op
->op0
) == ADDR_EXPR
1387 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1388 && fndecl_built_in_p (TREE_OPERAND (op
->op0
, 0))
1389 && operands
.length () >= 2
1390 && operands
.length () <= 3)
1392 vn_reference_op_t arg0
, arg1
= NULL
;
1393 bool anyconst
= false;
1394 arg0
= &operands
[1];
1395 if (operands
.length () > 2)
1396 arg1
= &operands
[2];
1397 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1398 || (arg0
->opcode
== ADDR_EXPR
1399 && is_gimple_min_invariant (arg0
->op0
)))
1402 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1403 || (arg1
->opcode
== ADDR_EXPR
1404 && is_gimple_min_invariant (arg1
->op0
))))
1408 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1411 arg1
? arg1
->op0
: NULL
);
1413 && TREE_CODE (folded
) == NOP_EXPR
)
1414 folded
= TREE_OPERAND (folded
, 0);
1416 && is_gimple_min_invariant (folded
))
1421 /* Simplify reads from constants or constant initializers. */
1422 else if (BITS_PER_UNIT
== 8
1423 && COMPLETE_TYPE_P (ref
->type
)
1424 && is_gimple_reg_type (ref
->type
))
1428 if (INTEGRAL_TYPE_P (ref
->type
))
1429 size
= TYPE_PRECISION (ref
->type
);
1430 else if (tree_fits_shwi_p (TYPE_SIZE (ref
->type
)))
1431 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1434 if (size
% BITS_PER_UNIT
!= 0
1435 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1437 size
/= BITS_PER_UNIT
;
1439 for (i
= 0; i
< operands
.length (); ++i
)
1441 if (TREE_CODE_CLASS (operands
[i
].opcode
) == tcc_constant
)
1446 if (known_eq (operands
[i
].off
, -1))
1448 off
+= operands
[i
].off
;
1449 if (operands
[i
].opcode
== MEM_REF
)
1455 vn_reference_op_t base
= &operands
[--i
];
1456 tree ctor
= error_mark_node
;
1457 tree decl
= NULL_TREE
;
1458 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1460 else if (base
->opcode
== MEM_REF
1461 && base
[1].opcode
== ADDR_EXPR
1462 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1463 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
1464 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == STRING_CST
))
1466 decl
= TREE_OPERAND (base
[1].op0
, 0);
1467 if (TREE_CODE (decl
) == STRING_CST
)
1470 ctor
= ctor_for_folding (decl
);
1472 if (ctor
== NULL_TREE
)
1473 return build_zero_cst (ref
->type
);
1474 else if (ctor
!= error_mark_node
)
1476 HOST_WIDE_INT const_off
;
1479 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1480 off
* BITS_PER_UNIT
,
1481 size
* BITS_PER_UNIT
, decl
);
1484 STRIP_USELESS_TYPE_CONVERSION (res
);
1485 if (is_gimple_min_invariant (res
))
1489 else if (off
.is_constant (&const_off
))
1491 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1492 int len
= native_encode_expr (ctor
, buf
, size
, const_off
);
1494 return native_interpret_expr (ref
->type
, buf
, len
);
1502 /* Return true if OPS contain a storage order barrier. */
1505 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1507 vn_reference_op_t op
;
1510 FOR_EACH_VEC_ELT (ops
, i
, op
)
1511 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1517 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1518 structures into their value numbers. This is done in-place, and
1519 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1520 whether any operands were valueized. */
1522 static vec
<vn_reference_op_s
>
1523 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
,
1524 bool with_avail
= false)
1526 vn_reference_op_t vro
;
1529 *valueized_anything
= false;
1531 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1533 if (vro
->opcode
== SSA_NAME
1534 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1536 tree tem
= with_avail
? vn_valueize (vro
->op0
) : SSA_VAL (vro
->op0
);
1537 if (tem
!= vro
->op0
)
1539 *valueized_anything
= true;
1542 /* If it transforms from an SSA_NAME to a constant, update
1544 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1545 vro
->opcode
= TREE_CODE (vro
->op0
);
1547 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1549 tree tem
= with_avail
? vn_valueize (vro
->op1
) : SSA_VAL (vro
->op1
);
1550 if (tem
!= vro
->op1
)
1552 *valueized_anything
= true;
1556 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1558 tree tem
= with_avail
? vn_valueize (vro
->op2
) : SSA_VAL (vro
->op2
);
1559 if (tem
!= vro
->op2
)
1561 *valueized_anything
= true;
1565 /* If it transforms from an SSA_NAME to an address, fold with
1566 a preceding indirect reference. */
1569 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1570 && orig
[i
- 1].opcode
== MEM_REF
)
1572 if (vn_reference_fold_indirect (&orig
, &i
))
1573 *valueized_anything
= true;
1576 && vro
->opcode
== SSA_NAME
1577 && orig
[i
- 1].opcode
== MEM_REF
)
1579 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1580 *valueized_anything
= true;
1582 /* If it transforms a non-constant ARRAY_REF into a constant
1583 one, adjust the constant offset. */
1584 else if (vro
->opcode
== ARRAY_REF
1585 && known_eq (vro
->off
, -1)
1586 && poly_int_tree_p (vro
->op0
)
1587 && poly_int_tree_p (vro
->op1
)
1588 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1590 poly_offset_int off
= ((wi::to_poly_offset (vro
->op0
)
1591 - wi::to_poly_offset (vro
->op1
))
1592 * wi::to_offset (vro
->op2
)
1593 * vn_ref_op_align_unit (vro
));
1594 off
.to_shwi (&vro
->off
);
1601 static vec
<vn_reference_op_s
>
1602 valueize_refs (vec
<vn_reference_op_s
> orig
)
1605 return valueize_refs_1 (orig
, &tem
);
1608 static vec
<vn_reference_op_s
> shared_lookup_references
;
1610 /* Create a vector of vn_reference_op_s structures from REF, a
1611 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1612 this function. *VALUEIZED_ANYTHING will specify whether any
1613 operands were valueized. */
1615 static vec
<vn_reference_op_s
>
1616 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1620 shared_lookup_references
.truncate (0);
1621 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1622 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1623 valueized_anything
);
1624 return shared_lookup_references
;
1627 /* Create a vector of vn_reference_op_s structures from CALL, a
1628 call statement. The vector is shared among all callers of
1631 static vec
<vn_reference_op_s
>
1632 valueize_shared_reference_ops_from_call (gcall
*call
)
1636 shared_lookup_references
.truncate (0);
1637 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1638 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1639 return shared_lookup_references
;
1642 /* Lookup a SCCVN reference operation VR in the current hash table.
1643 Returns the resulting value number if it exists in the hash table,
1644 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1645 vn_reference_t stored in the hashtable if something is found. */
1648 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1650 vn_reference_s
**slot
;
1653 hash
= vr
->hashcode
;
1654 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1658 *vnresult
= (vn_reference_t
)*slot
;
1659 return ((vn_reference_t
)*slot
)->result
;
1666 /* Partial definition tracking support. */
1670 HOST_WIDE_INT offset
;
1677 HOST_WIDE_INT offset
;
1681 /* Context for alias walking. */
1683 struct vn_walk_cb_data
1685 vn_walk_cb_data (vn_reference_t vr_
, tree orig_ref_
, tree
*last_vuse_ptr_
,
1686 vn_lookup_kind vn_walk_kind_
, bool tbaa_p_
, tree mask_
)
1687 : vr (vr_
), last_vuse_ptr (last_vuse_ptr_
), last_vuse (NULL_TREE
),
1688 mask (mask_
), masked_result (NULL_TREE
), vn_walk_kind (vn_walk_kind_
),
1689 tbaa_p (tbaa_p_
), saved_operands (vNULL
), first_set (-2),
1690 first_base_set (-2), known_ranges (NULL
)
1693 last_vuse_ptr
= &last_vuse
;
1694 ao_ref_init (&orig_ref
, orig_ref_
);
1697 wide_int w
= wi::to_wide (mask
);
1698 unsigned int pos
= 0, prec
= w
.get_precision ();
1700 pd
.rhs
= build_constructor (NULL_TREE
, NULL
);
1701 /* When bitwise and with a constant is done on a memory load,
1702 we don't really need all the bits to be defined or defined
1703 to constants, we don't really care what is in the position
1704 corresponding to 0 bits in the mask.
1705 So, push the ranges of those 0 bits in the mask as artificial
1706 zero stores and let the partial def handling code do the
1710 int tz
= wi::ctz (w
);
1711 if (pos
+ tz
> prec
)
1715 if (BYTES_BIG_ENDIAN
)
1716 pd
.offset
= prec
- pos
- tz
;
1720 void *r
= push_partial_def (pd
, 0, 0, 0, prec
);
1721 gcc_assert (r
== NULL_TREE
);
1726 w
= wi::lrshift (w
, tz
);
1727 tz
= wi::ctz (wi::bit_not (w
));
1728 if (pos
+ tz
> prec
)
1731 w
= wi::lrshift (w
, tz
);
1735 ~vn_walk_cb_data ();
1736 void *finish (alias_set_type
, alias_set_type
, tree
);
1737 void *push_partial_def (pd_data pd
,
1738 alias_set_type
, alias_set_type
, HOST_WIDE_INT
,
1743 tree
*last_vuse_ptr
;
1747 vn_lookup_kind vn_walk_kind
;
1749 vec
<vn_reference_op_s
> saved_operands
;
1751 /* The VDEFs of partial defs we come along. */
1752 auto_vec
<pd_data
, 2> partial_defs
;
1753 /* The first defs range to avoid splay tree setup in most cases. */
1754 pd_range first_range
;
1755 alias_set_type first_set
;
1756 alias_set_type first_base_set
;
1757 splay_tree known_ranges
;
1758 obstack ranges_obstack
;
1761 vn_walk_cb_data::~vn_walk_cb_data ()
1765 splay_tree_delete (known_ranges
);
1766 obstack_free (&ranges_obstack
, NULL
);
1768 saved_operands
.release ();
1772 vn_walk_cb_data::finish (alias_set_type set
, alias_set_type base_set
, tree val
)
1774 if (first_set
!= -2)
1777 base_set
= first_base_set
;
1781 masked_result
= val
;
1784 vec
<vn_reference_op_s
> &operands
1785 = saved_operands
.exists () ? saved_operands
: vr
->operands
;
1786 return vn_reference_lookup_or_insert_for_pieces (last_vuse
, set
, base_set
,
1787 vr
->type
, operands
, val
);
1790 /* pd_range splay-tree helpers. */
1793 pd_range_compare (splay_tree_key offset1p
, splay_tree_key offset2p
)
1795 HOST_WIDE_INT offset1
= *(HOST_WIDE_INT
*)offset1p
;
1796 HOST_WIDE_INT offset2
= *(HOST_WIDE_INT
*)offset2p
;
1797 if (offset1
< offset2
)
1799 else if (offset1
> offset2
)
1805 pd_tree_alloc (int size
, void *data_
)
1807 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
1808 return obstack_alloc (&data
->ranges_obstack
, size
);
1812 pd_tree_dealloc (void *, void *)
1816 /* Push PD to the vector of partial definitions returning a
1817 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1818 NULL when we want to continue looking for partial defs or -1
1822 vn_walk_cb_data::push_partial_def (pd_data pd
,
1823 alias_set_type set
, alias_set_type base_set
,
1824 HOST_WIDE_INT offseti
,
1825 HOST_WIDE_INT maxsizei
)
1827 const HOST_WIDE_INT bufsize
= 64;
1828 /* We're using a fixed buffer for encoding so fail early if the object
1829 we want to interpret is bigger. */
1830 if (maxsizei
> bufsize
* BITS_PER_UNIT
1832 || BITS_PER_UNIT
!= 8
1833 /* Not prepared to handle PDP endian. */
1834 || BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
1837 /* Turn too large constant stores into non-constant stores. */
1838 if (CONSTANT_CLASS_P (pd
.rhs
) && pd
.size
> bufsize
* BITS_PER_UNIT
)
1839 pd
.rhs
= error_mark_node
;
1841 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1842 most a partial byte before and/or after the region. */
1843 if (!CONSTANT_CLASS_P (pd
.rhs
))
1845 if (pd
.offset
< offseti
)
1847 HOST_WIDE_INT o
= ROUND_DOWN (offseti
- pd
.offset
, BITS_PER_UNIT
);
1848 gcc_assert (pd
.size
> o
);
1852 if (pd
.size
> maxsizei
)
1853 pd
.size
= maxsizei
+ ((pd
.size
- maxsizei
) % BITS_PER_UNIT
);
1856 pd
.offset
-= offseti
;
1858 bool pd_constant_p
= (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
1859 || CONSTANT_CLASS_P (pd
.rhs
));
1860 if (partial_defs
.is_empty ())
1862 /* If we get a clobber upfront, fail. */
1863 if (TREE_CLOBBER_P (pd
.rhs
))
1867 partial_defs
.safe_push (pd
);
1868 first_range
.offset
= pd
.offset
;
1869 first_range
.size
= pd
.size
;
1871 first_base_set
= base_set
;
1872 last_vuse_ptr
= NULL
;
1873 /* Continue looking for partial defs. */
1879 /* ??? Optimize the case where the 2nd partial def completes things. */
1880 gcc_obstack_init (&ranges_obstack
);
1881 known_ranges
= splay_tree_new_with_allocator (pd_range_compare
, 0, 0,
1883 pd_tree_dealloc
, this);
1884 splay_tree_insert (known_ranges
,
1885 (splay_tree_key
)&first_range
.offset
,
1886 (splay_tree_value
)&first_range
);
1889 pd_range newr
= { pd
.offset
, pd
.size
};
1892 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
1893 HOST_WIDE_INT loffset
= newr
.offset
+ 1;
1894 if ((n
= splay_tree_predecessor (known_ranges
, (splay_tree_key
)&loffset
))
1895 && ((r
= (pd_range
*)n
->value
), true)
1896 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
1897 newr
.offset
, newr
.size
))
1899 /* Ignore partial defs already covered. Here we also drop shadowed
1900 clobbers arriving here at the floor. */
1901 if (known_subrange_p (newr
.offset
, newr
.size
, r
->offset
, r
->size
))
1903 r
->size
= MAX (r
->offset
+ r
->size
, newr
.offset
+ newr
.size
) - r
->offset
;
1907 /* newr.offset wasn't covered yet, insert the range. */
1908 r
= XOBNEW (&ranges_obstack
, pd_range
);
1910 splay_tree_insert (known_ranges
, (splay_tree_key
)&r
->offset
,
1911 (splay_tree_value
)r
);
1913 /* Merge r which now contains newr and is a member of the splay tree with
1914 adjacent overlapping ranges. */
1916 while ((n
= splay_tree_successor (known_ranges
, (splay_tree_key
)&r
->offset
))
1917 && ((rafter
= (pd_range
*)n
->value
), true)
1918 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
1919 rafter
->offset
, rafter
->size
))
1921 r
->size
= MAX (r
->offset
+ r
->size
,
1922 rafter
->offset
+ rafter
->size
) - r
->offset
;
1923 splay_tree_remove (known_ranges
, (splay_tree_key
)&rafter
->offset
);
1925 /* If we get a clobber, fail. */
1926 if (TREE_CLOBBER_P (pd
.rhs
))
1928 /* Non-constants are OK as long as they are shadowed by a constant. */
1931 partial_defs
.safe_push (pd
);
1933 /* Now we have merged newr into the range tree. When we have covered
1934 [offseti, sizei] then the tree will contain exactly one node which has
1935 the desired properties and it will be 'r'. */
1936 if (!known_subrange_p (0, maxsizei
, r
->offset
, r
->size
))
1937 /* Continue looking for partial defs. */
1940 /* Now simply native encode all partial defs in reverse order. */
1941 unsigned ndefs
= partial_defs
.length ();
1942 /* We support up to 512-bit values (for V8DFmode). */
1943 unsigned char buffer
[bufsize
+ 1];
1944 unsigned char this_buffer
[bufsize
+ 1];
1947 memset (buffer
, 0, bufsize
+ 1);
1948 unsigned needed_len
= ROUND_UP (maxsizei
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
1949 while (!partial_defs
.is_empty ())
1951 pd_data pd
= partial_defs
.pop ();
1953 if (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
)
1955 /* Empty CONSTRUCTOR. */
1956 if (pd
.size
>= needed_len
* BITS_PER_UNIT
)
1959 len
= ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
1960 memset (this_buffer
, 0, len
);
1964 len
= native_encode_expr (pd
.rhs
, this_buffer
, bufsize
,
1965 MAX (0, -pd
.offset
) / BITS_PER_UNIT
);
1967 || len
< (ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
1968 - MAX (0, -pd
.offset
) / BITS_PER_UNIT
))
1970 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1971 fprintf (dump_file
, "Failed to encode %u "
1972 "partial definitions\n", ndefs
);
1977 unsigned char *p
= buffer
;
1978 HOST_WIDE_INT size
= pd
.size
;
1980 size
-= ROUND_DOWN (-pd
.offset
, BITS_PER_UNIT
);
1981 this_buffer
[len
] = 0;
1982 if (BYTES_BIG_ENDIAN
)
1984 /* LSB of this_buffer[len - 1] byte should be at
1985 pd.offset + pd.size - 1 bits in buffer. */
1986 amnt
= ((unsigned HOST_WIDE_INT
) pd
.offset
1987 + pd
.size
) % BITS_PER_UNIT
;
1989 shift_bytes_in_array_right (this_buffer
, len
+ 1, amnt
);
1990 unsigned char *q
= this_buffer
;
1991 unsigned int off
= 0;
1995 off
= pd
.offset
/ BITS_PER_UNIT
;
1996 gcc_assert (off
< needed_len
);
2000 msk
= ((1 << size
) - 1) << (BITS_PER_UNIT
- amnt
);
2001 *p
= (*p
& ~msk
) | (this_buffer
[len
] & msk
);
2006 if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
2007 q
= (this_buffer
+ len
2008 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
2010 if (pd
.offset
% BITS_PER_UNIT
)
2012 msk
= -1U << (BITS_PER_UNIT
2013 - (pd
.offset
% BITS_PER_UNIT
));
2014 *p
= (*p
& msk
) | (*q
& ~msk
);
2018 size
-= BITS_PER_UNIT
- (pd
.offset
% BITS_PER_UNIT
);
2019 gcc_assert (size
>= 0);
2023 else if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
2025 q
= (this_buffer
+ len
2026 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
2028 if (pd
.offset
% BITS_PER_UNIT
)
2031 size
-= BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) pd
.offset
2033 gcc_assert (size
>= 0);
2036 if ((unsigned HOST_WIDE_INT
) size
/ BITS_PER_UNIT
+ off
2038 size
= (needed_len
- off
) * BITS_PER_UNIT
;
2039 memcpy (p
, q
, size
/ BITS_PER_UNIT
);
2040 if (size
% BITS_PER_UNIT
)
2043 = -1U << (BITS_PER_UNIT
- (size
% BITS_PER_UNIT
));
2044 p
+= size
/ BITS_PER_UNIT
;
2045 q
+= size
/ BITS_PER_UNIT
;
2046 *p
= (*q
& msk
) | (*p
& ~msk
);
2051 size
= MIN (size
, (HOST_WIDE_INT
) needed_len
* BITS_PER_UNIT
);
2054 /* LSB of this_buffer[0] byte should be at pd.offset bits
2057 amnt
= pd
.offset
% BITS_PER_UNIT
;
2059 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
2060 unsigned int off
= pd
.offset
/ BITS_PER_UNIT
;
2061 gcc_assert (off
< needed_len
);
2063 (HOST_WIDE_INT
) (needed_len
- off
) * BITS_PER_UNIT
);
2065 if (amnt
+ size
< BITS_PER_UNIT
)
2067 /* Low amnt bits come from *p, then size bits
2068 from this_buffer[0] and the remaining again from
2070 msk
= ((1 << size
) - 1) << amnt
;
2071 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2077 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2079 size
-= (BITS_PER_UNIT
- amnt
);
2084 amnt
= (unsigned HOST_WIDE_INT
) pd
.offset
% BITS_PER_UNIT
;
2086 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
2088 memcpy (p
, this_buffer
+ (amnt
!= 0), size
/ BITS_PER_UNIT
);
2089 p
+= size
/ BITS_PER_UNIT
;
2090 if (size
% BITS_PER_UNIT
)
2092 unsigned int msk
= -1U << (size
% BITS_PER_UNIT
);
2093 *p
= (this_buffer
[(amnt
!= 0) + size
/ BITS_PER_UNIT
]
2094 & ~msk
) | (*p
& msk
);
2099 tree type
= vr
->type
;
2100 /* Make sure to interpret in a type that has a range covering the whole
2102 if (INTEGRAL_TYPE_P (vr
->type
) && maxsizei
!= TYPE_PRECISION (vr
->type
))
2103 type
= build_nonstandard_integer_type (maxsizei
, TYPE_UNSIGNED (type
));
2105 if (BYTES_BIG_ENDIAN
)
2107 unsigned sz
= needed_len
;
2108 if (maxsizei
% BITS_PER_UNIT
)
2109 shift_bytes_in_array_right (buffer
, needed_len
,
2111 - (maxsizei
% BITS_PER_UNIT
));
2112 if (INTEGRAL_TYPE_P (type
))
2113 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
2114 if (sz
> needed_len
)
2116 memcpy (this_buffer
+ (sz
- needed_len
), buffer
, needed_len
);
2117 val
= native_interpret_expr (type
, this_buffer
, sz
);
2120 val
= native_interpret_expr (type
, buffer
, needed_len
);
2123 val
= native_interpret_expr (type
, buffer
, bufsize
);
2124 /* If we chop off bits because the types precision doesn't match the memory
2125 access size this is ok when optimizing reads but not when called from
2126 the DSE code during elimination. */
2127 if (val
&& type
!= vr
->type
)
2129 if (! int_fits_type_p (val
, vr
->type
))
2132 val
= fold_convert (vr
->type
, val
);
2137 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2139 "Successfully combined %u partial definitions\n", ndefs
);
2140 /* We are using the alias-set of the first store we encounter which
2141 should be appropriate here. */
2142 return finish (first_set
, first_base_set
, val
);
2146 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2148 "Failed to interpret %u encoded partial definitions\n", ndefs
);
2153 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2154 with the current VUSE and performs the expression lookup. */
2157 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
, void *data_
)
2159 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2160 vn_reference_t vr
= data
->vr
;
2161 vn_reference_s
**slot
;
2164 /* If we have partial definitions recorded we have to go through
2165 vn_reference_lookup_3. */
2166 if (!data
->partial_defs
.is_empty ())
2169 if (data
->last_vuse_ptr
)
2171 *data
->last_vuse_ptr
= vuse
;
2172 data
->last_vuse
= vuse
;
2175 /* Fixup vuse and hash. */
2177 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
2178 vr
->vuse
= vuse_ssa_val (vuse
);
2180 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
2182 hash
= vr
->hashcode
;
2183 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
2186 if ((*slot
)->result
&& data
->saved_operands
.exists ())
2187 return data
->finish (vr
->set
, vr
->base_set
, (*slot
)->result
);
2194 /* Lookup an existing or insert a new vn_reference entry into the
2195 value table for the VUSE, SET, TYPE, OPERANDS reference which
2196 has the value VALUE which is either a constant or an SSA name. */
2198 static vn_reference_t
2199 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
2201 alias_set_type base_set
,
2203 vec
<vn_reference_op_s
,
2208 vn_reference_t result
;
2210 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2211 vr1
.operands
= operands
;
2214 vr1
.base_set
= base_set
;
2215 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2216 if (vn_reference_lookup_1 (&vr1
, &result
))
2218 if (TREE_CODE (value
) == SSA_NAME
)
2219 value_id
= VN_INFO (value
)->value_id
;
2221 value_id
= get_or_alloc_constant_value_id (value
);
2222 return vn_reference_insert_pieces (vuse
, set
, base_set
, type
,
2223 operands
.copy (), value
, value_id
);
2226 /* Return a value-number for RCODE OPS... either by looking up an existing
2227 value-number for the simplified result or by inserting the operation if
2231 vn_nary_build_or_lookup_1 (gimple_match_op
*res_op
, bool insert
)
2233 tree result
= NULL_TREE
;
2234 /* We will be creating a value number for
2236 So first simplify and lookup this expression to see if it
2237 is already available. */
2238 /* For simplification valueize. */
2240 for (i
= 0; i
< res_op
->num_ops
; ++i
)
2241 if (TREE_CODE (res_op
->ops
[i
]) == SSA_NAME
)
2243 tree tem
= vn_valueize (res_op
->ops
[i
]);
2246 res_op
->ops
[i
] = tem
;
2248 /* If valueization of an operand fails (it is not available), skip
2251 if (i
== res_op
->num_ops
)
2253 mprts_hook
= vn_lookup_simplify_result
;
2254 res
= res_op
->resimplify (NULL
, vn_valueize
);
2257 gimple
*new_stmt
= NULL
;
2259 && gimple_simplified_result_is_gimple_val (res_op
))
2261 /* The expression is already available. */
2262 result
= res_op
->ops
[0];
2263 /* Valueize it, simplification returns sth in AVAIL only. */
2264 if (TREE_CODE (result
) == SSA_NAME
)
2265 result
= SSA_VAL (result
);
2269 tree val
= vn_lookup_simplify_result (res_op
);
2272 gimple_seq stmts
= NULL
;
2273 result
= maybe_push_res_to_seq (res_op
, &stmts
);
2276 gcc_assert (gimple_seq_singleton_p (stmts
));
2277 new_stmt
= gimple_seq_first_stmt (stmts
);
2281 /* The expression is already available. */
2286 /* The expression is not yet available, value-number lhs to
2287 the new SSA_NAME we created. */
2288 /* Initialize value-number information properly. */
2289 vn_ssa_aux_t result_info
= VN_INFO (result
);
2290 result_info
->valnum
= result
;
2291 result_info
->value_id
= get_next_value_id ();
2292 result_info
->visited
= 1;
2293 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
2295 result_info
->needs_insertion
= true;
2296 /* ??? PRE phi-translation inserts NARYs without corresponding
2297 SSA name result. Re-use those but set their result according
2298 to the stmt we just built. */
2299 vn_nary_op_t nary
= NULL
;
2300 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
2303 gcc_assert (! nary
->predicated_values
&& nary
->u
.result
== NULL_TREE
);
2304 nary
->u
.result
= gimple_assign_lhs (new_stmt
);
2306 /* As all "inserted" statements are singleton SCCs, insert
2307 to the valid table. This is strictly needed to
2308 avoid re-generating new value SSA_NAMEs for the same
2309 expression during SCC iteration over and over (the
2310 optimistic table gets cleared after each iteration).
2311 We do not need to insert into the optimistic table, as
2312 lookups there will fall back to the valid table. */
2315 unsigned int length
= vn_nary_length_from_stmt (new_stmt
);
2317 = alloc_vn_nary_op_noinit (length
, &vn_tables_insert_obstack
);
2318 vno1
->value_id
= result_info
->value_id
;
2319 vno1
->length
= length
;
2320 vno1
->predicated_values
= 0;
2321 vno1
->u
.result
= result
;
2322 init_vn_nary_op_from_stmt (vno1
, new_stmt
);
2323 vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
2324 /* Also do not link it into the undo chain. */
2325 last_inserted_nary
= vno1
->next
;
2326 vno1
->next
= (vn_nary_op_t
)(void *)-1;
2328 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2330 fprintf (dump_file
, "Inserting name ");
2331 print_generic_expr (dump_file
, result
);
2332 fprintf (dump_file
, " for expression ");
2333 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
2334 fprintf (dump_file
, "\n");
2340 /* Return a value-number for RCODE OPS... either by looking up an existing
2341 value-number for the simplified result or by inserting the operation. */
2344 vn_nary_build_or_lookup (gimple_match_op
*res_op
)
2346 return vn_nary_build_or_lookup_1 (res_op
, true);
2349 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2350 its value if present. */
2353 vn_nary_simplify (vn_nary_op_t nary
)
2355 if (nary
->length
> gimple_match_op::MAX_NUM_OPS
)
2357 gimple_match_op
op (gimple_match_cond::UNCOND
, nary
->opcode
,
2358 nary
->type
, nary
->length
);
2359 memcpy (op
.ops
, nary
->op
, sizeof (tree
) * nary
->length
);
2360 return vn_nary_build_or_lookup_1 (&op
, false);
2363 /* Elimination engine. */
2365 class eliminate_dom_walker
: public dom_walker
2368 eliminate_dom_walker (cdi_direction
, bitmap
);
2369 ~eliminate_dom_walker ();
2371 virtual edge
before_dom_children (basic_block
);
2372 virtual void after_dom_children (basic_block
);
2374 virtual tree
eliminate_avail (basic_block
, tree op
);
2375 virtual void eliminate_push_avail (basic_block
, tree op
);
2376 tree
eliminate_insert (basic_block
, gimple_stmt_iterator
*gsi
, tree val
);
2378 void eliminate_stmt (basic_block
, gimple_stmt_iterator
*);
2380 unsigned eliminate_cleanup (bool region_p
= false);
2383 unsigned int el_todo
;
2384 unsigned int eliminations
;
2385 unsigned int insertions
;
2387 /* SSA names that had their defs inserted by PRE if do_pre. */
2388 bitmap inserted_exprs
;
2390 /* Blocks with statements that have had their EH properties changed. */
2391 bitmap need_eh_cleanup
;
2393 /* Blocks with statements that have had their AB properties changed. */
2394 bitmap need_ab_cleanup
;
2396 /* Local state for the eliminate domwalk. */
2397 auto_vec
<gimple
*> to_remove
;
2398 auto_vec
<gimple
*> to_fixup
;
2399 auto_vec
<tree
> avail
;
2400 auto_vec
<tree
> avail_stack
;
2403 /* Adaptor to the elimination engine using RPO availability. */
2405 class rpo_elim
: public eliminate_dom_walker
2408 rpo_elim(basic_block entry_
)
2409 : eliminate_dom_walker (CDI_DOMINATORS
, NULL
), entry (entry_
),
2410 m_avail_freelist (NULL
) {}
2412 virtual tree
eliminate_avail (basic_block
, tree op
);
2414 virtual void eliminate_push_avail (basic_block
, tree
);
2417 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2419 vn_avail
*m_avail_freelist
;
2422 /* Global RPO state for access from hooks. */
2423 static eliminate_dom_walker
*rpo_avail
;
2424 basic_block vn_context_bb
;
2426 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2427 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2428 Otherwise return false. */
2431 adjust_offsets_for_equal_base_address (tree base1
, poly_int64
*offset1
,
2432 tree base2
, poly_int64
*offset2
)
2435 if (TREE_CODE (base1
) == MEM_REF
2436 && TREE_CODE (base2
) == MEM_REF
)
2438 if (mem_ref_offset (base1
).to_shwi (&soff
))
2440 base1
= TREE_OPERAND (base1
, 0);
2441 *offset1
+= soff
* BITS_PER_UNIT
;
2443 if (mem_ref_offset (base2
).to_shwi (&soff
))
2445 base2
= TREE_OPERAND (base2
, 0);
2446 *offset2
+= soff
* BITS_PER_UNIT
;
2448 return operand_equal_p (base1
, base2
, 0);
2450 return operand_equal_p (base1
, base2
, OEP_ADDRESS_OF
);
2453 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2454 from the statement defining VUSE and if not successful tries to
2455 translate *REFP and VR_ through an aggregate copy at the definition
2456 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2457 of *REF and *VR. If only disambiguation was performed then
2458 *DISAMBIGUATE_ONLY is set to true. */
2461 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *data_
,
2462 translate_flags
*disambiguate_only
)
2464 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2465 vn_reference_t vr
= data
->vr
;
2466 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2467 tree base
= ao_ref_base (ref
);
2468 HOST_WIDE_INT offseti
= 0, maxsizei
, sizei
= 0;
2469 static vec
<vn_reference_op_s
> lhs_ops
;
2471 bool lhs_ref_ok
= false;
2472 poly_int64 copy_size
;
2474 /* First try to disambiguate after value-replacing in the definitions LHS. */
2475 if (is_gimple_assign (def_stmt
))
2477 tree lhs
= gimple_assign_lhs (def_stmt
);
2478 bool valueized_anything
= false;
2479 /* Avoid re-allocation overhead. */
2480 lhs_ops
.truncate (0);
2481 basic_block saved_rpo_bb
= vn_context_bb
;
2482 vn_context_bb
= gimple_bb (def_stmt
);
2483 if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
)
2485 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
2486 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
, true);
2488 vn_context_bb
= saved_rpo_bb
;
2489 ao_ref_init (&lhs_ref
, lhs
);
2491 if (valueized_anything
2492 && ao_ref_init_from_vn_reference
2493 (&lhs_ref
, ao_ref_alias_set (&lhs_ref
),
2494 ao_ref_base_alias_set (&lhs_ref
), TREE_TYPE (lhs
), lhs_ops
)
2495 && !refs_may_alias_p_1 (ref
, &lhs_ref
, data
->tbaa_p
))
2497 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2501 /* Besides valueizing the LHS we can also use access-path based
2502 disambiguation on the original non-valueized ref. */
2505 && data
->orig_ref
.ref
)
2507 /* We want to use the non-valueized LHS for this, but avoid redundant
2509 ao_ref
*lref
= &lhs_ref
;
2511 if (valueized_anything
)
2513 ao_ref_init (&lref_alt
, lhs
);
2516 if (!refs_may_alias_p_1 (&data
->orig_ref
, lref
, data
->tbaa_p
))
2518 *disambiguate_only
= (valueized_anything
2519 ? TR_VALUEIZE_AND_DISAMBIGUATE
2525 /* If we reach a clobbering statement try to skip it and see if
2526 we find a VN result with exactly the same value as the
2527 possible clobber. In this case we can ignore the clobber
2528 and return the found value. */
2529 if (is_gimple_reg_type (TREE_TYPE (lhs
))
2530 && types_compatible_p (TREE_TYPE (lhs
), vr
->type
)
2531 && (ref
->ref
|| data
->orig_ref
.ref
))
2533 tree
*saved_last_vuse_ptr
= data
->last_vuse_ptr
;
2534 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2535 data
->last_vuse_ptr
= NULL
;
2536 tree saved_vuse
= vr
->vuse
;
2537 hashval_t saved_hashcode
= vr
->hashcode
;
2538 void *res
= vn_reference_lookup_2 (ref
, gimple_vuse (def_stmt
), data
);
2539 /* Need to restore vr->vuse and vr->hashcode. */
2540 vr
->vuse
= saved_vuse
;
2541 vr
->hashcode
= saved_hashcode
;
2542 data
->last_vuse_ptr
= saved_last_vuse_ptr
;
2543 if (res
&& res
!= (void *)-1)
2545 vn_reference_t vnresult
= (vn_reference_t
) res
;
2546 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2547 if (TREE_CODE (rhs
) == SSA_NAME
)
2548 rhs
= SSA_VAL (rhs
);
2549 if (vnresult
->result
2550 && operand_equal_p (vnresult
->result
, rhs
, 0)
2551 /* We have to honor our promise about union type punning
2552 and also support arbitrary overlaps with
2553 -fno-strict-aliasing. So simply resort to alignment to
2554 rule out overlaps. Do this check last because it is
2555 quite expensive compared to the hash-lookup above. */
2556 && multiple_p (get_object_alignment
2557 (ref
->ref
? ref
->ref
: data
->orig_ref
.ref
),
2559 && multiple_p (get_object_alignment (lhs
), ref
->size
))
2564 else if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
2565 && gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
2566 && gimple_call_num_args (def_stmt
) <= 4)
2568 /* For builtin calls valueize its arguments and call the
2569 alias oracle again. Valueization may improve points-to
2570 info of pointers and constify size and position arguments.
2571 Originally this was motivated by PR61034 which has
2572 conditional calls to free falsely clobbering ref because
2573 of imprecise points-to info of the argument. */
2575 bool valueized_anything
= false;
2576 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2578 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
2579 tree val
= vn_valueize (oldargs
[i
]);
2580 if (val
!= oldargs
[i
])
2582 gimple_call_set_arg (def_stmt
, i
, val
);
2583 valueized_anything
= true;
2586 if (valueized_anything
)
2588 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
2590 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2591 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
2594 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2600 if (*disambiguate_only
> TR_TRANSLATE
)
2603 /* If we cannot constrain the size of the reference we cannot
2604 test if anything kills it. */
2605 if (!ref
->max_size_known_p ())
2608 poly_int64 offset
= ref
->offset
;
2609 poly_int64 maxsize
= ref
->max_size
;
2611 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2612 from that definition.
2614 if (is_gimple_reg_type (vr
->type
)
2615 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
2616 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET_CHK
))
2617 && (integer_zerop (gimple_call_arg (def_stmt
, 1))
2618 || ((TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
2619 || (INTEGRAL_TYPE_P (vr
->type
) && known_eq (ref
->size
, 8)))
2621 && BITS_PER_UNIT
== 8
2622 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
2623 && offset
.is_constant (&offseti
)
2624 && ref
->size
.is_constant (&sizei
)
2625 && (offseti
% BITS_PER_UNIT
== 0
2626 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
)))
2627 && (poly_int_tree_p (gimple_call_arg (def_stmt
, 2))
2628 || (TREE_CODE (gimple_call_arg (def_stmt
, 2)) == SSA_NAME
2629 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt
, 2)))))
2630 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2631 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
))
2634 poly_int64 offset2
, size2
, maxsize2
;
2636 tree ref2
= gimple_call_arg (def_stmt
, 0);
2637 if (TREE_CODE (ref2
) == SSA_NAME
)
2639 ref2
= SSA_VAL (ref2
);
2640 if (TREE_CODE (ref2
) == SSA_NAME
2641 && (TREE_CODE (base
) != MEM_REF
2642 || TREE_OPERAND (base
, 0) != ref2
))
2644 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref2
);
2645 if (gimple_assign_single_p (def_stmt
)
2646 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2647 ref2
= gimple_assign_rhs1 (def_stmt
);
2650 if (TREE_CODE (ref2
) == ADDR_EXPR
)
2652 ref2
= TREE_OPERAND (ref2
, 0);
2653 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
2655 if (!known_size_p (maxsize2
)
2656 || !known_eq (maxsize2
, size2
)
2657 || !operand_equal_p (base
, base2
, OEP_ADDRESS_OF
))
2660 else if (TREE_CODE (ref2
) == SSA_NAME
)
2663 if (TREE_CODE (base
) != MEM_REF
2664 || !(mem_ref_offset (base
)
2665 << LOG2_BITS_PER_UNIT
).to_shwi (&soff
))
2669 if (TREE_OPERAND (base
, 0) != ref2
)
2671 gimple
*def
= SSA_NAME_DEF_STMT (ref2
);
2672 if (is_gimple_assign (def
)
2673 && gimple_assign_rhs_code (def
) == POINTER_PLUS_EXPR
2674 && gimple_assign_rhs1 (def
) == TREE_OPERAND (base
, 0)
2675 && poly_int_tree_p (gimple_assign_rhs2 (def
)))
2677 tree rhs2
= gimple_assign_rhs2 (def
);
2678 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2
),
2680 << LOG2_BITS_PER_UNIT
).to_shwi (&offset2
))
2682 ref2
= gimple_assign_rhs1 (def
);
2683 if (TREE_CODE (ref2
) == SSA_NAME
)
2684 ref2
= SSA_VAL (ref2
);
2692 tree len
= gimple_call_arg (def_stmt
, 2);
2693 HOST_WIDE_INT leni
, offset2i
;
2694 if (TREE_CODE (len
) == SSA_NAME
)
2695 len
= SSA_VAL (len
);
2696 /* Sometimes the above trickery is smarter than alias analysis. Take
2697 advantage of that. */
2698 if (!ranges_maybe_overlap_p (offset
, maxsize
, offset2
,
2699 (wi::to_poly_offset (len
)
2700 << LOG2_BITS_PER_UNIT
)))
2702 if (data
->partial_defs
.is_empty ()
2703 && known_subrange_p (offset
, maxsize
, offset2
,
2704 wi::to_poly_offset (len
) << LOG2_BITS_PER_UNIT
))
2707 if (integer_zerop (gimple_call_arg (def_stmt
, 1)))
2708 val
= build_zero_cst (vr
->type
);
2709 else if (INTEGRAL_TYPE_P (vr
->type
)
2710 && known_eq (ref
->size
, 8)
2711 && offseti
% BITS_PER_UNIT
== 0)
2713 gimple_match_op
res_op (gimple_match_cond::UNCOND
, NOP_EXPR
,
2714 vr
->type
, gimple_call_arg (def_stmt
, 1));
2715 val
= vn_nary_build_or_lookup (&res_op
);
2717 || (TREE_CODE (val
) == SSA_NAME
2718 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2723 unsigned buflen
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr
->type
)) + 1;
2724 if (INTEGRAL_TYPE_P (vr
->type
))
2725 buflen
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr
->type
)) + 1;
2726 unsigned char *buf
= XALLOCAVEC (unsigned char, buflen
);
2727 memset (buf
, TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 1)),
2729 if (BYTES_BIG_ENDIAN
)
2732 = (((unsigned HOST_WIDE_INT
) offseti
+ sizei
)
2736 shift_bytes_in_array_right (buf
, buflen
,
2737 BITS_PER_UNIT
- amnt
);
2742 else if (offseti
% BITS_PER_UNIT
!= 0)
2745 = BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) offseti
2747 shift_bytes_in_array_left (buf
, buflen
, amnt
);
2751 val
= native_interpret_expr (vr
->type
, buf
, buflen
);
2755 return data
->finish (0, 0, val
);
2757 /* For now handle clearing memory with partial defs. */
2758 else if (known_eq (ref
->size
, maxsize
)
2759 && integer_zerop (gimple_call_arg (def_stmt
, 1))
2760 && tree_fits_poly_int64_p (len
)
2761 && tree_to_poly_int64 (len
).is_constant (&leni
)
2762 && leni
<= INTTYPE_MAXIMUM (HOST_WIDE_INT
) / BITS_PER_UNIT
2763 && offset
.is_constant (&offseti
)
2764 && offset2
.is_constant (&offset2i
)
2765 && maxsize
.is_constant (&maxsizei
)
2766 && ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
2767 leni
<< LOG2_BITS_PER_UNIT
))
2770 pd
.rhs
= build_constructor (NULL_TREE
, NULL
);
2771 pd
.offset
= offset2i
;
2772 pd
.size
= leni
<< LOG2_BITS_PER_UNIT
;
2773 return data
->push_partial_def (pd
, 0, 0, offseti
, maxsizei
);
2777 /* 2) Assignment from an empty CONSTRUCTOR. */
2778 else if (is_gimple_reg_type (vr
->type
)
2779 && gimple_assign_single_p (def_stmt
)
2780 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
2781 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
2784 poly_int64 offset2
, size2
, maxsize2
;
2785 HOST_WIDE_INT offset2i
, size2i
;
2786 gcc_assert (lhs_ref_ok
);
2787 base2
= ao_ref_base (&lhs_ref
);
2788 offset2
= lhs_ref
.offset
;
2789 size2
= lhs_ref
.size
;
2790 maxsize2
= lhs_ref
.max_size
;
2791 if (known_size_p (maxsize2
)
2792 && known_eq (maxsize2
, size2
)
2793 && adjust_offsets_for_equal_base_address (base
, &offset
,
2796 if (data
->partial_defs
.is_empty ()
2797 && known_subrange_p (offset
, maxsize
, offset2
, size2
))
2799 /* While technically undefined behavior do not optimize
2800 a full read from a clobber. */
2801 if (gimple_clobber_p (def_stmt
))
2803 tree val
= build_zero_cst (vr
->type
);
2804 return data
->finish (ao_ref_alias_set (&lhs_ref
),
2805 ao_ref_base_alias_set (&lhs_ref
), val
);
2807 else if (known_eq (ref
->size
, maxsize
)
2808 && maxsize
.is_constant (&maxsizei
)
2809 && offset
.is_constant (&offseti
)
2810 && offset2
.is_constant (&offset2i
)
2811 && size2
.is_constant (&size2i
)
2812 && ranges_known_overlap_p (offseti
, maxsizei
,
2815 /* Let clobbers be consumed by the partial-def tracker
2816 which can choose to ignore them if they are shadowed
2819 pd
.rhs
= gimple_assign_rhs1 (def_stmt
);
2820 pd
.offset
= offset2i
;
2822 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
2823 ao_ref_base_alias_set (&lhs_ref
),
2829 /* 3) Assignment from a constant. We can use folds native encode/interpret
2830 routines to extract the assigned bits. */
2831 else if (known_eq (ref
->size
, maxsize
)
2832 && is_gimple_reg_type (vr
->type
)
2833 && !contains_storage_order_barrier_p (vr
->operands
)
2834 && gimple_assign_single_p (def_stmt
)
2836 && BITS_PER_UNIT
== 8
2837 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
2838 /* native_encode and native_decode operate on arrays of bytes
2839 and so fundamentally need a compile-time size and offset. */
2840 && maxsize
.is_constant (&maxsizei
)
2841 && offset
.is_constant (&offseti
)
2842 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
2843 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
2844 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
2846 tree lhs
= gimple_assign_lhs (def_stmt
);
2848 poly_int64 offset2
, size2
, maxsize2
;
2849 HOST_WIDE_INT offset2i
, size2i
;
2851 gcc_assert (lhs_ref_ok
);
2852 base2
= ao_ref_base (&lhs_ref
);
2853 offset2
= lhs_ref
.offset
;
2854 size2
= lhs_ref
.size
;
2855 maxsize2
= lhs_ref
.max_size
;
2856 reverse
= reverse_storage_order_for_component_p (lhs
);
2859 && !storage_order_barrier_p (lhs
)
2860 && known_eq (maxsize2
, size2
)
2861 && adjust_offsets_for_equal_base_address (base
, &offset
,
2863 && offset
.is_constant (&offseti
)
2864 && offset2
.is_constant (&offset2i
)
2865 && size2
.is_constant (&size2i
))
2867 if (data
->partial_defs
.is_empty ()
2868 && known_subrange_p (offseti
, maxsizei
, offset2
, size2
))
2870 /* We support up to 512-bit values (for V8DFmode). */
2871 unsigned char buffer
[65];
2874 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2875 if (TREE_CODE (rhs
) == SSA_NAME
)
2876 rhs
= SSA_VAL (rhs
);
2877 len
= native_encode_expr (rhs
,
2878 buffer
, sizeof (buffer
) - 1,
2879 (offseti
- offset2i
) / BITS_PER_UNIT
);
2880 if (len
> 0 && len
* BITS_PER_UNIT
>= maxsizei
)
2882 tree type
= vr
->type
;
2883 unsigned char *buf
= buffer
;
2884 unsigned int amnt
= 0;
2885 /* Make sure to interpret in a type that has a range
2886 covering the whole access size. */
2887 if (INTEGRAL_TYPE_P (vr
->type
)
2888 && maxsizei
!= TYPE_PRECISION (vr
->type
))
2889 type
= build_nonstandard_integer_type (maxsizei
,
2890 TYPE_UNSIGNED (type
));
2891 if (BYTES_BIG_ENDIAN
)
2893 /* For big-endian native_encode_expr stored the rhs
2894 such that the LSB of it is the LSB of buffer[len - 1].
2895 That bit is stored into memory at position
2896 offset2 + size2 - 1, i.e. in byte
2897 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
2898 E.g. for offset2 1 and size2 14, rhs -1 and memory
2899 previously cleared that is:
2902 Now, if we want to extract offset 2 and size 12 from
2903 it using native_interpret_expr (which actually works
2904 for integral bitfield types in terms of byte size of
2905 the mode), the native_encode_expr stored the value
2908 and returned len 2 (the X bits are outside of
2910 Let sz be maxsize / BITS_PER_UNIT if not extracting
2911 a bitfield, and GET_MODE_SIZE otherwise.
2912 We need to align the LSB of the value we want to
2913 extract as the LSB of buf[sz - 1].
2914 The LSB from memory we need to read is at position
2915 offset + maxsize - 1. */
2916 HOST_WIDE_INT sz
= maxsizei
/ BITS_PER_UNIT
;
2917 if (INTEGRAL_TYPE_P (type
))
2918 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
2919 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
2920 - offseti
- maxsizei
) % BITS_PER_UNIT
;
2922 shift_bytes_in_array_right (buffer
, len
, amnt
);
2923 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
2924 - offseti
- maxsizei
- amnt
) / BITS_PER_UNIT
;
2925 if ((unsigned HOST_WIDE_INT
) sz
+ amnt
> (unsigned) len
)
2929 buf
= buffer
+ len
- sz
- amnt
;
2930 len
-= (buf
- buffer
);
2935 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
2936 - offseti
) % BITS_PER_UNIT
;
2940 shift_bytes_in_array_left (buffer
, len
+ 1, amnt
);
2944 tree val
= native_interpret_expr (type
, buf
, len
);
2945 /* If we chop off bits because the types precision doesn't
2946 match the memory access size this is ok when optimizing
2947 reads but not when called from the DSE code during
2950 && type
!= vr
->type
)
2952 if (! int_fits_type_p (val
, vr
->type
))
2955 val
= fold_convert (vr
->type
, val
);
2959 return data
->finish (ao_ref_alias_set (&lhs_ref
),
2960 ao_ref_base_alias_set (&lhs_ref
), val
);
2963 else if (ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
2967 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2968 if (TREE_CODE (rhs
) == SSA_NAME
)
2969 rhs
= SSA_VAL (rhs
);
2971 pd
.offset
= offset2i
;
2973 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
2974 ao_ref_base_alias_set (&lhs_ref
),
2980 /* 4) Assignment from an SSA name which definition we may be able
2981 to access pieces from or we can combine to a larger entity. */
2982 else if (known_eq (ref
->size
, maxsize
)
2983 && is_gimple_reg_type (vr
->type
)
2984 && !contains_storage_order_barrier_p (vr
->operands
)
2985 && gimple_assign_single_p (def_stmt
)
2986 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
2988 tree lhs
= gimple_assign_lhs (def_stmt
);
2990 poly_int64 offset2
, size2
, maxsize2
;
2991 HOST_WIDE_INT offset2i
, size2i
, offseti
;
2993 gcc_assert (lhs_ref_ok
);
2994 base2
= ao_ref_base (&lhs_ref
);
2995 offset2
= lhs_ref
.offset
;
2996 size2
= lhs_ref
.size
;
2997 maxsize2
= lhs_ref
.max_size
;
2998 reverse
= reverse_storage_order_for_component_p (lhs
);
2999 tree def_rhs
= gimple_assign_rhs1 (def_stmt
);
3001 && !storage_order_barrier_p (lhs
)
3002 && known_size_p (maxsize2
)
3003 && known_eq (maxsize2
, size2
)
3004 && adjust_offsets_for_equal_base_address (base
, &offset
,
3007 if (data
->partial_defs
.is_empty ()
3008 && known_subrange_p (offset
, maxsize
, offset2
, size2
)
3009 /* ??? We can't handle bitfield precision extracts without
3010 either using an alternate type for the BIT_FIELD_REF and
3011 then doing a conversion or possibly adjusting the offset
3012 according to endianness. */
3013 && (! INTEGRAL_TYPE_P (vr
->type
)
3014 || known_eq (ref
->size
, TYPE_PRECISION (vr
->type
)))
3015 && multiple_p (ref
->size
, BITS_PER_UNIT
))
3017 tree val
= NULL_TREE
;
3018 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs
))
3019 || type_has_mode_precision_p (TREE_TYPE (def_rhs
)))
3021 gimple_match_op
op (gimple_match_cond::UNCOND
,
3022 BIT_FIELD_REF
, vr
->type
,
3024 bitsize_int (ref
->size
),
3025 bitsize_int (offset
- offset2
));
3026 val
= vn_nary_build_or_lookup (&op
);
3028 else if (known_eq (ref
->size
, size2
))
3030 gimple_match_op
op (gimple_match_cond::UNCOND
,
3031 VIEW_CONVERT_EXPR
, vr
->type
,
3033 val
= vn_nary_build_or_lookup (&op
);
3036 && (TREE_CODE (val
) != SSA_NAME
3037 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
3038 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3039 ao_ref_base_alias_set (&lhs_ref
), val
);
3041 else if (maxsize
.is_constant (&maxsizei
)
3042 && offset
.is_constant (&offseti
)
3043 && offset2
.is_constant (&offset2i
)
3044 && size2
.is_constant (&size2i
)
3045 && ranges_known_overlap_p (offset
, maxsize
, offset2
, size2
))
3048 pd
.rhs
= SSA_VAL (def_rhs
);
3049 pd
.offset
= offset2i
;
3051 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3052 ao_ref_base_alias_set (&lhs_ref
),
3058 /* 5) For aggregate copies translate the reference through them if
3059 the copy kills ref. */
3060 else if (data
->vn_walk_kind
== VN_WALKREWRITE
3061 && gimple_assign_single_p (def_stmt
)
3062 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
3063 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
3064 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
3068 auto_vec
<vn_reference_op_s
> rhs
;
3069 vn_reference_op_t vro
;
3072 gcc_assert (lhs_ref_ok
);
3074 /* See if the assignment kills REF. */
3075 base2
= ao_ref_base (&lhs_ref
);
3076 if (!lhs_ref
.max_size_known_p ()
3078 && (TREE_CODE (base
) != MEM_REF
3079 || TREE_CODE (base2
) != MEM_REF
3080 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
3081 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
3082 TREE_OPERAND (base2
, 1))))
3083 || !stmt_kills_ref_p (def_stmt
, ref
))
3086 /* Find the common base of ref and the lhs. lhs_ops already
3087 contains valueized operands for the lhs. */
3088 i
= vr
->operands
.length () - 1;
3089 j
= lhs_ops
.length () - 1;
3090 while (j
>= 0 && i
>= 0
3091 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
3097 /* ??? The innermost op should always be a MEM_REF and we already
3098 checked that the assignment to the lhs kills vr. Thus for
3099 aggregate copies using char[] types the vn_reference_op_eq
3100 may fail when comparing types for compatibility. But we really
3101 don't care here - further lookups with the rewritten operands
3102 will simply fail if we messed up types too badly. */
3103 poly_int64 extra_off
= 0;
3104 if (j
== 0 && i
>= 0
3105 && lhs_ops
[0].opcode
== MEM_REF
3106 && maybe_ne (lhs_ops
[0].off
, -1))
3108 if (known_eq (lhs_ops
[0].off
, vr
->operands
[i
].off
))
3110 else if (vr
->operands
[i
].opcode
== MEM_REF
3111 && maybe_ne (vr
->operands
[i
].off
, -1))
3113 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
3118 /* i now points to the first additional op.
3119 ??? LHS may not be completely contained in VR, one or more
3120 VIEW_CONVERT_EXPRs could be in its way. We could at least
3121 try handling outermost VIEW_CONVERT_EXPRs. */
3125 /* Punt if the additional ops contain a storage order barrier. */
3126 for (k
= i
; k
>= 0; k
--)
3128 vro
= &vr
->operands
[k
];
3129 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
3133 /* Now re-write REF to be based on the rhs of the assignment. */
3134 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
3135 copy_reference_ops_from_ref (rhs1
, &rhs
);
3137 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3138 if (maybe_ne (extra_off
, 0))
3140 if (rhs
.length () < 2)
3142 int ix
= rhs
.length () - 2;
3143 if (rhs
[ix
].opcode
!= MEM_REF
3144 || known_eq (rhs
[ix
].off
, -1))
3146 rhs
[ix
].off
+= extra_off
;
3147 rhs
[ix
].op0
= int_const_binop (PLUS_EXPR
, rhs
[ix
].op0
,
3148 build_int_cst (TREE_TYPE (rhs
[ix
].op0
),
3152 /* Save the operands since we need to use the original ones for
3153 the hash entry we use. */
3154 if (!data
->saved_operands
.exists ())
3155 data
->saved_operands
= vr
->operands
.copy ();
3157 /* We need to pre-pend vr->operands[0..i] to rhs. */
3158 vec
<vn_reference_op_s
> old
= vr
->operands
;
3159 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
3160 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
3162 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
3163 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
3164 vr
->operands
[i
+ 1 + j
] = *vro
;
3165 vr
->operands
= valueize_refs (vr
->operands
);
3166 if (old
== shared_lookup_references
)
3167 shared_lookup_references
= vr
->operands
;
3168 vr
->hashcode
= vn_reference_compute_hash (vr
);
3170 /* Try folding the new reference to a constant. */
3171 tree val
= fully_constant_vn_reference_p (vr
);
3174 if (data
->partial_defs
.is_empty ())
3175 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3176 ao_ref_base_alias_set (&lhs_ref
), val
);
3177 /* This is the only interesting case for partial-def handling
3178 coming from targets that like to gimplify init-ctors as
3179 aggregate copies from constant data like aarch64 for
3181 if (maxsize
.is_constant (&maxsizei
) && known_eq (ref
->size
, maxsize
))
3187 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3188 ao_ref_base_alias_set (&lhs_ref
),
3193 /* Continuing with partial defs isn't easily possible here, we
3194 have to find a full def from further lookups from here. Probably
3195 not worth the special-casing everywhere. */
3196 if (!data
->partial_defs
.is_empty ())
3199 /* Adjust *ref from the new operands. */
3201 ao_ref_init (&rhs1_ref
, rhs1
);
3202 if (!ao_ref_init_from_vn_reference (&r
, ao_ref_alias_set (&rhs1_ref
),
3203 ao_ref_base_alias_set (&rhs1_ref
),
3204 vr
->type
, vr
->operands
))
3206 /* This can happen with bitfields. */
3207 if (maybe_ne (ref
->size
, r
.size
))
3211 /* Do not update last seen VUSE after translating. */
3212 data
->last_vuse_ptr
= NULL
;
3213 /* Invalidate the original access path since it now contains
3215 data
->orig_ref
.ref
= NULL_TREE
;
3216 /* Use the alias-set of this LHS for recording an eventual result. */
3217 if (data
->first_set
== -2)
3219 data
->first_set
= ao_ref_alias_set (&lhs_ref
);
3220 data
->first_base_set
= ao_ref_base_alias_set (&lhs_ref
);
3223 /* Keep looking for the adjusted *REF / VR pair. */
3227 /* 6) For memcpy copies translate the reference through them if
3228 the copy kills ref. */
3229 else if (data
->vn_walk_kind
== VN_WALKREWRITE
3230 && is_gimple_reg_type (vr
->type
)
3231 /* ??? Handle BCOPY as well. */
3232 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
3233 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY_CHK
)
3234 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
3235 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY_CHK
)
3236 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
)
3237 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE_CHK
))
3238 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
3239 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
3240 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
3241 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
3242 && (poly_int_tree_p (gimple_call_arg (def_stmt
, 2), ©_size
)
3243 || (TREE_CODE (gimple_call_arg (def_stmt
, 2)) == SSA_NAME
3244 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt
, 2)),
3246 /* Handling this is more complicated, give up for now. */
3247 && data
->partial_defs
.is_empty ())
3251 poly_int64 rhs_offset
, lhs_offset
;
3252 vn_reference_op_s op
;
3253 poly_uint64 mem_offset
;
3254 poly_int64 at
, byte_maxsize
;
3256 /* Only handle non-variable, addressable refs. */
3257 if (maybe_ne (ref
->size
, maxsize
)
3258 || !multiple_p (offset
, BITS_PER_UNIT
, &at
)
3259 || !multiple_p (maxsize
, BITS_PER_UNIT
, &byte_maxsize
))
3262 /* Extract a pointer base and an offset for the destination. */
3263 lhs
= gimple_call_arg (def_stmt
, 0);
3265 if (TREE_CODE (lhs
) == SSA_NAME
)
3267 lhs
= vn_valueize (lhs
);
3268 if (TREE_CODE (lhs
) == SSA_NAME
)
3270 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
3271 if (gimple_assign_single_p (def_stmt
)
3272 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
3273 lhs
= gimple_assign_rhs1 (def_stmt
);
3276 if (TREE_CODE (lhs
) == ADDR_EXPR
)
3278 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
3282 if (TREE_CODE (tem
) == MEM_REF
3283 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3285 lhs
= TREE_OPERAND (tem
, 0);
3286 if (TREE_CODE (lhs
) == SSA_NAME
)
3287 lhs
= vn_valueize (lhs
);
3288 lhs_offset
+= mem_offset
;
3290 else if (DECL_P (tem
))
3291 lhs
= build_fold_addr_expr (tem
);
3295 if (TREE_CODE (lhs
) != SSA_NAME
3296 && TREE_CODE (lhs
) != ADDR_EXPR
)
3299 /* Extract a pointer base and an offset for the source. */
3300 rhs
= gimple_call_arg (def_stmt
, 1);
3302 if (TREE_CODE (rhs
) == SSA_NAME
)
3303 rhs
= vn_valueize (rhs
);
3304 if (TREE_CODE (rhs
) == ADDR_EXPR
)
3306 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
3310 if (TREE_CODE (tem
) == MEM_REF
3311 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3313 rhs
= TREE_OPERAND (tem
, 0);
3314 rhs_offset
+= mem_offset
;
3316 else if (DECL_P (tem
)
3317 || TREE_CODE (tem
) == STRING_CST
)
3318 rhs
= build_fold_addr_expr (tem
);
3322 if (TREE_CODE (rhs
) == SSA_NAME
)
3323 rhs
= SSA_VAL (rhs
);
3324 else if (TREE_CODE (rhs
) != ADDR_EXPR
)
3327 /* The bases of the destination and the references have to agree. */
3328 if (TREE_CODE (base
) == MEM_REF
)
3330 if (TREE_OPERAND (base
, 0) != lhs
3331 || !poly_int_tree_p (TREE_OPERAND (base
, 1), &mem_offset
))
3335 else if (!DECL_P (base
)
3336 || TREE_CODE (lhs
) != ADDR_EXPR
3337 || TREE_OPERAND (lhs
, 0) != base
)
3340 /* If the access is completely outside of the memcpy destination
3341 area there is no aliasing. */
3342 if (!ranges_maybe_overlap_p (lhs_offset
, copy_size
, at
, byte_maxsize
))
3344 /* And the access has to be contained within the memcpy destination. */
3345 if (!known_subrange_p (at
, byte_maxsize
, lhs_offset
, copy_size
))
3348 /* Save the operands since we need to use the original ones for
3349 the hash entry we use. */
3350 if (!data
->saved_operands
.exists ())
3351 data
->saved_operands
= vr
->operands
.copy ();
3353 /* Make room for 2 operands in the new reference. */
3354 if (vr
->operands
.length () < 2)
3356 vec
<vn_reference_op_s
> old
= vr
->operands
;
3357 vr
->operands
.safe_grow_cleared (2);
3358 if (old
== shared_lookup_references
)
3359 shared_lookup_references
= vr
->operands
;
3362 vr
->operands
.truncate (2);
3364 /* The looked-through reference is a simple MEM_REF. */
3365 memset (&op
, 0, sizeof (op
));
3367 op
.opcode
= MEM_REF
;
3368 op
.op0
= build_int_cst (ptr_type_node
, at
- lhs_offset
+ rhs_offset
);
3369 op
.off
= at
- lhs_offset
+ rhs_offset
;
3370 vr
->operands
[0] = op
;
3371 op
.type
= TREE_TYPE (rhs
);
3372 op
.opcode
= TREE_CODE (rhs
);
3375 vr
->operands
[1] = op
;
3376 vr
->hashcode
= vn_reference_compute_hash (vr
);
3378 /* Try folding the new reference to a constant. */
3379 tree val
= fully_constant_vn_reference_p (vr
);
3381 return data
->finish (0, 0, val
);
3383 /* Adjust *ref from the new operands. */
3384 if (!ao_ref_init_from_vn_reference (&r
, 0, 0, vr
->type
, vr
->operands
))
3386 /* This can happen with bitfields. */
3387 if (maybe_ne (ref
->size
, r
.size
))
3391 /* Do not update last seen VUSE after translating. */
3392 data
->last_vuse_ptr
= NULL
;
3393 /* Invalidate the original access path since it now contains
3395 data
->orig_ref
.ref
= NULL_TREE
;
3396 /* Use the alias-set of this stmt for recording an eventual result. */
3397 if (data
->first_set
== -2)
3399 data
->first_set
= 0;
3400 data
->first_base_set
= 0;
3403 /* Keep looking for the adjusted *REF / VR pair. */
3407 /* Bail out and stop walking. */
3411 /* Return a reference op vector from OP that can be used for
3412 vn_reference_lookup_pieces. The caller is responsible for releasing
3415 vec
<vn_reference_op_s
>
3416 vn_reference_operands_for_lookup (tree op
)
3419 return valueize_shared_reference_ops_from_ref (op
, &valueized
).copy ();
3422 /* Lookup a reference operation by it's parts, in the current hash table.
3423 Returns the resulting value number if it exists in the hash table,
3424 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3425 vn_reference_t stored in the hashtable if something is found. */
3428 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
,
3429 alias_set_type base_set
, tree type
,
3430 vec
<vn_reference_op_s
> operands
,
3431 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
3433 struct vn_reference_s vr1
;
3441 vr1
.vuse
= vuse_ssa_val (vuse
);
3442 shared_lookup_references
.truncate (0);
3443 shared_lookup_references
.safe_grow (operands
.length ());
3444 memcpy (shared_lookup_references
.address (),
3445 operands
.address (),
3446 sizeof (vn_reference_op_s
)
3447 * operands
.length ());
3448 vr1
.operands
= operands
= shared_lookup_references
3449 = valueize_refs (shared_lookup_references
);
3452 vr1
.base_set
= base_set
;
3453 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
3454 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
3457 vn_reference_lookup_1 (&vr1
, vnresult
);
3459 && kind
!= VN_NOWALK
3463 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
3464 vn_walk_cb_data
data (&vr1
, NULL_TREE
, NULL
, kind
, true, NULL_TREE
);
3465 if (ao_ref_init_from_vn_reference (&r
, set
, base_set
, type
,
3469 walk_non_aliased_vuses (&r
, vr1
.vuse
, true, vn_reference_lookup_2
,
3470 vn_reference_lookup_3
, vuse_valueize
,
3472 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
3476 return (*vnresult
)->result
;
3481 /* Lookup OP in the current hash table, and return the resulting value
3482 number if it exists in the hash table. Return NULL_TREE if it does
3483 not exist in the hash table or if the result field of the structure
3484 was NULL.. VNRESULT will be filled in with the vn_reference_t
3485 stored in the hashtable if one exists. When TBAA_P is false assume
3486 we are looking up a store and treat it as having alias-set zero.
3487 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3488 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3489 load is bitwise anded with MASK and so we are only interested in a subset
3490 of the bits and can ignore if the other bits are uninitialized or
3491 not initialized with constants. */
3494 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
3495 vn_reference_t
*vnresult
, bool tbaa_p
,
3496 tree
*last_vuse_ptr
, tree mask
)
3498 vec
<vn_reference_op_s
> operands
;
3499 struct vn_reference_s vr1
;
3500 bool valuezied_anything
;
3505 vr1
.vuse
= vuse_ssa_val (vuse
);
3506 vr1
.operands
= operands
3507 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
3508 vr1
.type
= TREE_TYPE (op
);
3510 ao_ref_init (&op_ref
, op
);
3511 vr1
.set
= ao_ref_alias_set (&op_ref
);
3512 vr1
.base_set
= ao_ref_base_alias_set (&op_ref
);
3513 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
3514 if (mask
== NULL_TREE
)
3515 if (tree cst
= fully_constant_vn_reference_p (&vr1
))
3518 if (kind
!= VN_NOWALK
&& vr1
.vuse
)
3520 vn_reference_t wvnresult
;
3522 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
3523 /* Make sure to use a valueized reference if we valueized anything.
3524 Otherwise preserve the full reference for advanced TBAA. */
3525 if (!valuezied_anything
3526 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.base_set
,
3527 vr1
.type
, vr1
.operands
))
3528 ao_ref_init (&r
, op
);
3529 vn_walk_cb_data
data (&vr1
, r
.ref
? NULL_TREE
: op
,
3530 last_vuse_ptr
, kind
, tbaa_p
, mask
);
3534 walk_non_aliased_vuses (&r
, vr1
.vuse
, tbaa_p
, vn_reference_lookup_2
,
3535 vn_reference_lookup_3
, vuse_valueize
, limit
,
3537 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
3540 gcc_assert (mask
== NULL_TREE
);
3542 *vnresult
= wvnresult
;
3543 return wvnresult
->result
;
3546 return data
.masked_result
;
3552 *last_vuse_ptr
= vr1
.vuse
;
3555 return vn_reference_lookup_1 (&vr1
, vnresult
);
3558 /* Lookup CALL in the current hash table and return the entry in
3559 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3562 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
3568 tree vuse
= gimple_vuse (call
);
3570 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
3571 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
3572 vr
->type
= gimple_expr_type (call
);
3575 vr
->hashcode
= vn_reference_compute_hash (vr
);
3576 vn_reference_lookup_1 (vr
, vnresult
);
3579 /* Insert OP into the current hash table with a value number of RESULT. */
3582 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
3584 vn_reference_s
**slot
;
3588 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
3589 if (TREE_CODE (result
) == SSA_NAME
)
3590 vr1
->value_id
= VN_INFO (result
)->value_id
;
3592 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
3593 vr1
->vuse
= vuse_ssa_val (vuse
);
3594 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
3595 vr1
->type
= TREE_TYPE (op
);
3597 ao_ref_init (&op_ref
, op
);
3598 vr1
->set
= ao_ref_alias_set (&op_ref
);
3599 vr1
->base_set
= ao_ref_base_alias_set (&op_ref
);
3600 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
3601 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
3602 vr1
->result_vdef
= vdef
;
3604 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
3607 /* Because IL walking on reference lookup can end up visiting
3608 a def that is only to be visited later in iteration order
3609 when we are about to make an irreducible region reducible
3610 the def can be effectively processed and its ref being inserted
3611 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3612 but save a lookup if we deal with already inserted refs here. */
3615 /* We cannot assert that we have the same value either because
3616 when disentangling an irreducible region we may end up visiting
3617 a use before the corresponding def. That's a missed optimization
3618 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3619 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3620 && !operand_equal_p ((*slot
)->result
, vr1
->result
, 0))
3622 fprintf (dump_file
, "Keeping old value ");
3623 print_generic_expr (dump_file
, (*slot
)->result
);
3624 fprintf (dump_file
, " because of collision\n");
3626 free_reference (vr1
);
3627 obstack_free (&vn_tables_obstack
, vr1
);
3632 vr1
->next
= last_inserted_ref
;
3633 last_inserted_ref
= vr1
;
3636 /* Insert a reference by it's pieces into the current hash table with
3637 a value number of RESULT. Return the resulting reference
3638 structure we created. */
3641 vn_reference_insert_pieces (tree vuse
, alias_set_type set
,
3642 alias_set_type base_set
, tree type
,
3643 vec
<vn_reference_op_s
> operands
,
3644 tree result
, unsigned int value_id
)
3647 vn_reference_s
**slot
;
3650 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
3651 vr1
->value_id
= value_id
;
3652 vr1
->vuse
= vuse_ssa_val (vuse
);
3653 vr1
->operands
= valueize_refs (operands
);
3656 vr1
->base_set
= base_set
;
3657 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
3658 if (result
&& TREE_CODE (result
) == SSA_NAME
)
3659 result
= SSA_VAL (result
);
3660 vr1
->result
= result
;
3662 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
3665 /* At this point we should have all the things inserted that we have
3666 seen before, and we should never try inserting something that
3668 gcc_assert (!*slot
);
3671 vr1
->next
= last_inserted_ref
;
3672 last_inserted_ref
= vr1
;
3676 /* Compute and return the hash value for nary operation VBO1. */
3679 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
3681 inchash::hash hstate
;
3684 for (i
= 0; i
< vno1
->length
; ++i
)
3685 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
3686 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
3688 if (((vno1
->length
== 2
3689 && commutative_tree_code (vno1
->opcode
))
3690 || (vno1
->length
== 3
3691 && commutative_ternary_tree_code (vno1
->opcode
)))
3692 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
3693 std::swap (vno1
->op
[0], vno1
->op
[1]);
3694 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
3695 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
3697 std::swap (vno1
->op
[0], vno1
->op
[1]);
3698 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
3701 hstate
.add_int (vno1
->opcode
);
3702 for (i
= 0; i
< vno1
->length
; ++i
)
3703 inchash::add_expr (vno1
->op
[i
], hstate
);
3705 return hstate
.end ();
3708 /* Compare nary operations VNO1 and VNO2 and return true if they are
3712 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
3716 if (vno1
->hashcode
!= vno2
->hashcode
)
3719 if (vno1
->length
!= vno2
->length
)
3722 if (vno1
->opcode
!= vno2
->opcode
3723 || !types_compatible_p (vno1
->type
, vno2
->type
))
3726 for (i
= 0; i
< vno1
->length
; ++i
)
3727 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
3730 /* BIT_INSERT_EXPR has an implict operand as the type precision
3731 of op1. Need to check to make sure they are the same. */
3732 if (vno1
->opcode
== BIT_INSERT_EXPR
3733 && TREE_CODE (vno1
->op
[1]) == INTEGER_CST
3734 && TYPE_PRECISION (TREE_TYPE (vno1
->op
[1]))
3735 != TYPE_PRECISION (TREE_TYPE (vno2
->op
[1])))
3741 /* Initialize VNO from the pieces provided. */
3744 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
3745 enum tree_code code
, tree type
, tree
*ops
)
3748 vno
->length
= length
;
3750 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
3753 /* Return the number of operands for a vn_nary ops structure from STMT. */
3756 vn_nary_length_from_stmt (gimple
*stmt
)
3758 switch (gimple_assign_rhs_code (stmt
))
3762 case VIEW_CONVERT_EXPR
:
3769 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
3772 return gimple_num_ops (stmt
) - 1;
3776 /* Initialize VNO from STMT. */
3779 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
3783 vno
->opcode
= gimple_assign_rhs_code (stmt
);
3784 vno
->type
= gimple_expr_type (stmt
);
3785 switch (vno
->opcode
)
3789 case VIEW_CONVERT_EXPR
:
3791 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
3796 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
3797 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
3798 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
3802 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
3803 for (i
= 0; i
< vno
->length
; ++i
)
3804 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
3808 gcc_checking_assert (!gimple_assign_single_p (stmt
));
3809 vno
->length
= gimple_num_ops (stmt
) - 1;
3810 for (i
= 0; i
< vno
->length
; ++i
)
3811 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
3815 /* Compute the hashcode for VNO and look for it in the hash table;
3816 return the resulting value number if it exists in the hash table.
3817 Return NULL_TREE if it does not exist in the hash table or if the
3818 result field of the operation is NULL. VNRESULT will contain the
3819 vn_nary_op_t from the hashtable if it exists. */
3822 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
3824 vn_nary_op_s
**slot
;
3829 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
3830 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
, NO_INSERT
);
3835 return (*slot
)->predicated_values
? NULL_TREE
: (*slot
)->u
.result
;
3838 /* Lookup a n-ary operation by its pieces and return the resulting value
3839 number if it exists in the hash table. Return NULL_TREE if it does
3840 not exist in the hash table or if the result field of the operation
3841 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3845 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
3846 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
3848 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
3849 sizeof_vn_nary_op (length
));
3850 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3851 return vn_nary_op_lookup_1 (vno1
, vnresult
);
3854 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3855 value number if it exists in the hash table. Return NULL_TREE if
3856 it does not exist in the hash table. VNRESULT will contain the
3857 vn_nary_op_t from the hashtable if it exists. */
3860 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
3863 = XALLOCAVAR (struct vn_nary_op_s
,
3864 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
3865 init_vn_nary_op_from_stmt (vno1
, stmt
);
3866 return vn_nary_op_lookup_1 (vno1
, vnresult
);
3869 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3872 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
3874 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
3877 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3881 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
3883 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
, &vn_tables_obstack
);
3885 vno1
->value_id
= value_id
;
3886 vno1
->length
= length
;
3887 vno1
->predicated_values
= 0;
3888 vno1
->u
.result
= result
;
3893 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3894 VNO->HASHCODE first. */
3897 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
3900 vn_nary_op_s
**slot
;
3904 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
3905 gcc_assert (! vno
->predicated_values
3906 || (! vno
->u
.values
->next
3907 && vno
->u
.values
->n
== 1));
3910 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
3911 vno
->unwind_to
= *slot
;
3914 /* Prefer non-predicated values.
3915 ??? Only if those are constant, otherwise, with constant predicated
3916 value, turn them into predicated values with entry-block validity
3917 (??? but we always find the first valid result currently). */
3918 if ((*slot
)->predicated_values
3919 && ! vno
->predicated_values
)
3921 /* ??? We cannot remove *slot from the unwind stack list.
3922 For the moment we deal with this by skipping not found
3923 entries but this isn't ideal ... */
3925 /* ??? Maintain a stack of states we can unwind in
3926 vn_nary_op_s? But how far do we unwind? In reality
3927 we need to push change records somewhere... Or not
3928 unwind vn_nary_op_s and linking them but instead
3929 unwind the results "list", linking that, which also
3930 doesn't move on hashtable resize. */
3931 /* We can also have a ->unwind_to recording *slot there.
3932 That way we can make u.values a fixed size array with
3933 recording the number of entries but of course we then
3934 have always N copies for each unwind_to-state. Or we
3935 make sure to only ever append and each unwinding will
3936 pop off one entry (but how to deal with predicated
3937 replaced with non-predicated here?) */
3938 vno
->next
= last_inserted_nary
;
3939 last_inserted_nary
= vno
;
3942 else if (vno
->predicated_values
3943 && ! (*slot
)->predicated_values
)
3945 else if (vno
->predicated_values
3946 && (*slot
)->predicated_values
)
3948 /* ??? Factor this all into a insert_single_predicated_value
3950 gcc_assert (!vno
->u
.values
->next
&& vno
->u
.values
->n
== 1);
3952 = BASIC_BLOCK_FOR_FN (cfun
, vno
->u
.values
->valid_dominated_by_p
[0]);
3953 vn_pval
*nval
= vno
->u
.values
;
3954 vn_pval
**next
= &vno
->u
.values
;
3956 for (vn_pval
*val
= (*slot
)->u
.values
; val
; val
= val
->next
)
3958 if (expressions_equal_p (val
->result
, vno
->u
.values
->result
))
3961 for (unsigned i
= 0; i
< val
->n
; ++i
)
3964 = BASIC_BLOCK_FOR_FN (cfun
,
3965 val
->valid_dominated_by_p
[i
]);
3966 if (dominated_by_p (CDI_DOMINATORS
, vno_bb
, val_bb
))
3967 /* Value registered with more generic predicate. */
3969 else if (dominated_by_p (CDI_DOMINATORS
, val_bb
, vno_bb
))
3970 /* Shouldn't happen, we insert in RPO order. */
3974 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3976 + val
->n
* sizeof (int));
3977 (*next
)->next
= NULL
;
3978 (*next
)->result
= val
->result
;
3979 (*next
)->n
= val
->n
+ 1;
3980 memcpy ((*next
)->valid_dominated_by_p
,
3981 val
->valid_dominated_by_p
,
3982 val
->n
* sizeof (int));
3983 (*next
)->valid_dominated_by_p
[val
->n
] = vno_bb
->index
;
3984 next
= &(*next
)->next
;
3985 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3986 fprintf (dump_file
, "Appending predicate to value.\n");
3989 /* Copy other predicated values. */
3990 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3992 + (val
->n
-1) * sizeof (int));
3993 memcpy (*next
, val
, sizeof (vn_pval
) + (val
->n
-1) * sizeof (int));
3994 (*next
)->next
= NULL
;
3995 next
= &(*next
)->next
;
4001 vno
->next
= last_inserted_nary
;
4002 last_inserted_nary
= vno
;
4006 /* While we do not want to insert things twice it's awkward to
4007 avoid it in the case where visit_nary_op pattern-matches stuff
4008 and ends up simplifying the replacement to itself. We then
4009 get two inserts, one from visit_nary_op and one from
4010 vn_nary_build_or_lookup.
4011 So allow inserts with the same value number. */
4012 if ((*slot
)->u
.result
== vno
->u
.result
)
4016 /* ??? There's also optimistic vs. previous commited state merging
4017 that is problematic for the case of unwinding. */
4019 /* ??? We should return NULL if we do not use 'vno' and have the
4020 caller release it. */
4021 gcc_assert (!*slot
);
4024 vno
->next
= last_inserted_nary
;
4025 last_inserted_nary
= vno
;
4029 /* Insert a n-ary operation into the current hash table using it's
4030 pieces. Return the vn_nary_op_t structure we created and put in
4034 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
4035 tree type
, tree
*ops
,
4036 tree result
, unsigned int value_id
)
4038 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
4039 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
4040 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
4044 vn_nary_op_insert_pieces_predicated (unsigned int length
, enum tree_code code
,
4045 tree type
, tree
*ops
,
4046 tree result
, unsigned int value_id
,
4049 /* ??? Currently tracking BBs. */
4050 if (! single_pred_p (pred_e
->dest
))
4052 /* Never record for backedges. */
4053 if (pred_e
->flags
& EDGE_DFS_BACK
)
4058 /* Ignore backedges. */
4059 FOR_EACH_EDGE (e
, ei
, pred_e
->dest
->preds
)
4060 if (! dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
4065 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
4066 /* ??? Fix dumping, but currently we only get comparisons. */
4067 && TREE_CODE_CLASS (code
) == tcc_comparison
)
4069 fprintf (dump_file
, "Recording on edge %d->%d ", pred_e
->src
->index
,
4070 pred_e
->dest
->index
);
4071 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
4072 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
4073 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
4074 fprintf (dump_file
, " == %s\n",
4075 integer_zerop (result
) ? "false" : "true");
4077 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, NULL_TREE
, value_id
);
4078 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
4079 vno1
->predicated_values
= 1;
4080 vno1
->u
.values
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
4082 vno1
->u
.values
->next
= NULL
;
4083 vno1
->u
.values
->result
= result
;
4084 vno1
->u
.values
->n
= 1;
4085 vno1
->u
.values
->valid_dominated_by_p
[0] = pred_e
->dest
->index
;
4086 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
4090 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
);
4093 vn_nary_op_get_predicated_value (vn_nary_op_t vno
, basic_block bb
)
4095 if (! vno
->predicated_values
)
4096 return vno
->u
.result
;
4097 for (vn_pval
*val
= vno
->u
.values
; val
; val
= val
->next
)
4098 for (unsigned i
= 0; i
< val
->n
; ++i
)
4099 if (dominated_by_p_w_unex (bb
,
4101 (cfun
, val
->valid_dominated_by_p
[i
])))
4106 /* Insert the rhs of STMT into the current hash table with a value number of
4110 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
4113 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
4114 result
, VN_INFO (result
)->value_id
);
4115 init_vn_nary_op_from_stmt (vno1
, stmt
);
4116 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
4119 /* Compute a hashcode for PHI operation VP1 and return it. */
4121 static inline hashval_t
4122 vn_phi_compute_hash (vn_phi_t vp1
)
4124 inchash::hash
hstate (EDGE_COUNT (vp1
->block
->preds
) > 2
4125 ? vp1
->block
->index
: EDGE_COUNT (vp1
->block
->preds
));
4131 /* If all PHI arguments are constants we need to distinguish
4132 the PHI node via its type. */
4134 hstate
.merge_hash (vn_hash_type (type
));
4136 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
4138 /* Don't hash backedge values they need to be handled as VN_TOP
4139 for optimistic value-numbering. */
4140 if (e
->flags
& EDGE_DFS_BACK
)
4143 phi1op
= vp1
->phiargs
[e
->dest_idx
];
4144 if (phi1op
== VN_TOP
)
4146 inchash::add_expr (phi1op
, hstate
);
4149 return hstate
.end ();
4153 /* Return true if COND1 and COND2 represent the same condition, set
4154 *INVERTED_P if one needs to be inverted to make it the same as
4158 cond_stmts_equal_p (gcond
*cond1
, tree lhs1
, tree rhs1
,
4159 gcond
*cond2
, tree lhs2
, tree rhs2
, bool *inverted_p
)
4161 enum tree_code code1
= gimple_cond_code (cond1
);
4162 enum tree_code code2
= gimple_cond_code (cond2
);
4164 *inverted_p
= false;
4167 else if (code1
== swap_tree_comparison (code2
))
4168 std::swap (lhs2
, rhs2
);
4169 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
4171 else if (code1
== invert_tree_comparison
4172 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
4174 std::swap (lhs2
, rhs2
);
4180 return ((expressions_equal_p (lhs1
, lhs2
)
4181 && expressions_equal_p (rhs1
, rhs2
))
4182 || (commutative_tree_code (code1
)
4183 && expressions_equal_p (lhs1
, rhs2
)
4184 && expressions_equal_p (rhs1
, lhs2
)));
4187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4190 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
4192 if (vp1
->hashcode
!= vp2
->hashcode
)
4195 if (vp1
->block
!= vp2
->block
)
4197 if (EDGE_COUNT (vp1
->block
->preds
) != EDGE_COUNT (vp2
->block
->preds
))
4200 switch (EDGE_COUNT (vp1
->block
->preds
))
4203 /* Single-arg PHIs are just copies. */
4208 /* Rule out backedges into the PHI. */
4209 if (vp1
->block
->loop_father
->header
== vp1
->block
4210 || vp2
->block
->loop_father
->header
== vp2
->block
)
4213 /* If the PHI nodes do not have compatible types
4214 they are not the same. */
4215 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4219 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4221 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
4222 /* If the immediate dominator end in switch stmts multiple
4223 values may end up in the same PHI arg via intermediate
4225 if (EDGE_COUNT (idom1
->succs
) != 2
4226 || EDGE_COUNT (idom2
->succs
) != 2)
4229 /* Verify the controlling stmt is the same. */
4230 gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
));
4231 gcond
*last2
= safe_dyn_cast
<gcond
*> (last_stmt (idom2
));
4232 if (! last1
|| ! last2
)
4235 if (! cond_stmts_equal_p (last1
, vp1
->cclhs
, vp1
->ccrhs
,
4236 last2
, vp2
->cclhs
, vp2
->ccrhs
,
4240 /* Get at true/false controlled edges into the PHI. */
4241 edge te1
, te2
, fe1
, fe2
;
4242 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
4244 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
4248 /* Swap edges if the second condition is the inverted of the
4251 std::swap (te2
, fe2
);
4253 /* ??? Handle VN_TOP specially. */
4254 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
4255 vp2
->phiargs
[te2
->dest_idx
])
4256 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
4257 vp2
->phiargs
[fe2
->dest_idx
]))
4268 /* If the PHI nodes do not have compatible types
4269 they are not the same. */
4270 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4273 /* Any phi in the same block will have it's arguments in the
4274 same edge order, because of how we store phi nodes. */
4275 for (unsigned i
= 0; i
< EDGE_COUNT (vp1
->block
->preds
); ++i
)
4277 tree phi1op
= vp1
->phiargs
[i
];
4278 tree phi2op
= vp2
->phiargs
[i
];
4279 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
4281 if (!expressions_equal_p (phi1op
, phi2op
))
4288 /* Lookup PHI in the current hash table, and return the resulting
4289 value number if it exists in the hash table. Return NULL_TREE if
4290 it does not exist in the hash table. */
4293 vn_phi_lookup (gimple
*phi
, bool backedges_varying_p
)
4296 struct vn_phi_s
*vp1
;
4300 vp1
= XALLOCAVAR (struct vn_phi_s
,
4301 sizeof (struct vn_phi_s
)
4302 + (gimple_phi_num_args (phi
) - 1) * sizeof (tree
));
4304 /* Canonicalize the SSA_NAME's to their value number. */
4305 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4307 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4308 if (TREE_CODE (def
) == SSA_NAME
4309 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4310 def
= SSA_VAL (def
);
4311 vp1
->phiargs
[e
->dest_idx
] = def
;
4313 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4314 vp1
->block
= gimple_bb (phi
);
4315 /* Extract values of the controlling condition. */
4316 vp1
->cclhs
= NULL_TREE
;
4317 vp1
->ccrhs
= NULL_TREE
;
4318 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4319 if (EDGE_COUNT (idom1
->succs
) == 2)
4320 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
4322 /* ??? We want to use SSA_VAL here. But possibly not
4324 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4325 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4327 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4328 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, NO_INSERT
);
4331 return (*slot
)->result
;
4334 /* Insert PHI into the current hash table with a value number of
4338 vn_phi_insert (gimple
*phi
, tree result
, bool backedges_varying_p
)
4341 vn_phi_t vp1
= (vn_phi_t
) obstack_alloc (&vn_tables_obstack
,
4343 + ((gimple_phi_num_args (phi
) - 1)
4348 /* Canonicalize the SSA_NAME's to their value number. */
4349 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4351 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4352 if (TREE_CODE (def
) == SSA_NAME
4353 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4354 def
= SSA_VAL (def
);
4355 vp1
->phiargs
[e
->dest_idx
] = def
;
4357 vp1
->value_id
= VN_INFO (result
)->value_id
;
4358 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4359 vp1
->block
= gimple_bb (phi
);
4360 /* Extract values of the controlling condition. */
4361 vp1
->cclhs
= NULL_TREE
;
4362 vp1
->ccrhs
= NULL_TREE
;
4363 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4364 if (EDGE_COUNT (idom1
->succs
) == 2)
4365 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
4367 /* ??? We want to use SSA_VAL here. But possibly not
4369 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4370 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4372 vp1
->result
= result
;
4373 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4375 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
4376 gcc_assert (!*slot
);
4379 vp1
->next
= last_inserted_phi
;
4380 last_inserted_phi
= vp1
;
4385 /* Return true if BB1 is dominated by BB2 taking into account edges
4386 that are not executable. */
4389 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
)
4394 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4397 /* Before iterating we'd like to know if there exists a
4398 (executable) path from bb2 to bb1 at all, if not we can
4399 directly return false. For now simply iterate once. */
4401 /* Iterate to the single executable bb1 predecessor. */
4402 if (EDGE_COUNT (bb1
->preds
) > 1)
4405 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
4406 if (e
->flags
& EDGE_EXECUTABLE
)
4419 /* Re-do the dominance check with changed bb1. */
4420 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4425 /* Iterate to the single executable bb2 successor. */
4427 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
4428 if (e
->flags
& EDGE_EXECUTABLE
)
4439 /* Verify the reached block is only reached through succe.
4440 If there is only one edge we can spare us the dominator
4441 check and iterate directly. */
4442 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
4444 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
4446 && (e
->flags
& EDGE_EXECUTABLE
))
4456 /* Re-do the dominance check with changed bb2. */
4457 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4462 /* We could now iterate updating bb1 / bb2. */
4466 /* Set the value number of FROM to TO, return true if it has changed
4470 set_ssa_val_to (tree from
, tree to
)
4472 vn_ssa_aux_t from_info
= VN_INFO (from
);
4473 tree currval
= from_info
->valnum
; // SSA_VAL (from)
4474 poly_int64 toff
, coff
;
4475 bool curr_undefined
= false;
4476 bool curr_invariant
= false;
4478 /* The only thing we allow as value numbers are ssa_names
4479 and invariants. So assert that here. We don't allow VN_TOP
4480 as visiting a stmt should produce a value-number other than
4482 ??? Still VN_TOP can happen for unreachable code, so force
4483 it to varying in that case. Not all code is prepared to
4484 get VN_TOP on valueization. */
4487 /* ??? When iterating and visiting PHI <undef, backedge-value>
4488 for the first time we rightfully get VN_TOP and we need to
4489 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4490 With SCCVN we were simply lucky we iterated the other PHI
4491 cycles first and thus visited the backedge-value DEF. */
4492 if (currval
== VN_TOP
)
4494 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4495 fprintf (dump_file
, "Forcing value number to varying on "
4496 "receiving VN_TOP\n");
4500 gcc_checking_assert (to
!= NULL_TREE
4501 && ((TREE_CODE (to
) == SSA_NAME
4502 && (to
== from
|| SSA_VAL (to
) == to
))
4503 || is_gimple_min_invariant (to
)));
4507 if (currval
== from
)
4509 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4511 fprintf (dump_file
, "Not changing value number of ");
4512 print_generic_expr (dump_file
, from
);
4513 fprintf (dump_file
, " from VARYING to ");
4514 print_generic_expr (dump_file
, to
);
4515 fprintf (dump_file
, "\n");
4519 curr_invariant
= is_gimple_min_invariant (currval
);
4520 curr_undefined
= (TREE_CODE (currval
) == SSA_NAME
4521 && ssa_undefined_value_p (currval
, false));
4522 if (currval
!= VN_TOP
4525 && is_gimple_min_invariant (to
))
4527 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4529 fprintf (dump_file
, "Forcing VARYING instead of changing "
4530 "value number of ");
4531 print_generic_expr (dump_file
, from
);
4532 fprintf (dump_file
, " from ");
4533 print_generic_expr (dump_file
, currval
);
4534 fprintf (dump_file
, " (non-constant) to ");
4535 print_generic_expr (dump_file
, to
);
4536 fprintf (dump_file
, " (constant)\n");
4540 else if (currval
!= VN_TOP
4542 && TREE_CODE (to
) == SSA_NAME
4543 && ssa_undefined_value_p (to
, false))
4545 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4547 fprintf (dump_file
, "Forcing VARYING instead of changing "
4548 "value number of ");
4549 print_generic_expr (dump_file
, from
);
4550 fprintf (dump_file
, " from ");
4551 print_generic_expr (dump_file
, currval
);
4552 fprintf (dump_file
, " (non-undefined) to ");
4553 print_generic_expr (dump_file
, to
);
4554 fprintf (dump_file
, " (undefined)\n");
4558 else if (TREE_CODE (to
) == SSA_NAME
4559 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
4564 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4566 fprintf (dump_file
, "Setting value number of ");
4567 print_generic_expr (dump_file
, from
);
4568 fprintf (dump_file
, " to ");
4569 print_generic_expr (dump_file
, to
);
4573 && !operand_equal_p (currval
, to
, 0)
4574 /* Different undefined SSA names are not actually different. See
4575 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4577 && TREE_CODE (to
) == SSA_NAME
4578 && ssa_undefined_value_p (to
, false))
4579 /* ??? For addresses involving volatile objects or types operand_equal_p
4580 does not reliably detect ADDR_EXPRs as equal. We know we are only
4581 getting invariant gimple addresses here, so can use
4582 get_addr_base_and_unit_offset to do this comparison. */
4583 && !(TREE_CODE (currval
) == ADDR_EXPR
4584 && TREE_CODE (to
) == ADDR_EXPR
4585 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
4586 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
4587 && known_eq (coff
, toff
)))
4590 && currval
!= VN_TOP
4592 /* We do not want to allow lattice transitions from one value
4593 to another since that may lead to not terminating iteration
4594 (see PR95049). Since there's no convenient way to check
4595 for the allowed transition of VAL -> PHI (loop entry value,
4596 same on two PHIs, to same PHI result) we restrict the check
4599 && is_gimple_min_invariant (to
))
4601 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4602 fprintf (dump_file
, " forced VARYING");
4605 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4606 fprintf (dump_file
, " (changed)\n");
4607 from_info
->valnum
= to
;
4610 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4611 fprintf (dump_file
, "\n");
4615 /* Set all definitions in STMT to value number to themselves.
4616 Return true if a value number changed. */
4619 defs_to_varying (gimple
*stmt
)
4621 bool changed
= false;
4625 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
4627 tree def
= DEF_FROM_PTR (defp
);
4628 changed
|= set_ssa_val_to (def
, def
);
4633 /* Visit a copy between LHS and RHS, return true if the value number
4637 visit_copy (tree lhs
, tree rhs
)
4640 rhs
= SSA_VAL (rhs
);
4642 return set_ssa_val_to (lhs
, rhs
);
4645 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4649 valueized_wider_op (tree wide_type
, tree op
)
4651 if (TREE_CODE (op
) == SSA_NAME
)
4652 op
= vn_valueize (op
);
4654 /* Either we have the op widened available. */
4657 tree tem
= vn_nary_op_lookup_pieces (1, NOP_EXPR
,
4658 wide_type
, ops
, NULL
);
4662 /* Or the op is truncated from some existing value. */
4663 if (TREE_CODE (op
) == SSA_NAME
)
4665 gimple
*def
= SSA_NAME_DEF_STMT (op
);
4666 if (is_gimple_assign (def
)
4667 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
4669 tem
= gimple_assign_rhs1 (def
);
4670 if (useless_type_conversion_p (wide_type
, TREE_TYPE (tem
)))
4672 if (TREE_CODE (tem
) == SSA_NAME
)
4673 tem
= vn_valueize (tem
);
4679 /* For constants simply extend it. */
4680 if (TREE_CODE (op
) == INTEGER_CST
)
4681 return wide_int_to_tree (wide_type
, wi::to_wide (op
));
4686 /* Visit a nary operator RHS, value number it, and return true if the
4687 value number of LHS has changed as a result. */
4690 visit_nary_op (tree lhs
, gassign
*stmt
)
4692 vn_nary_op_t vnresult
;
4693 tree result
= vn_nary_op_lookup_stmt (stmt
, &vnresult
);
4694 if (! result
&& vnresult
)
4695 result
= vn_nary_op_get_predicated_value (vnresult
, gimple_bb (stmt
));
4697 return set_ssa_val_to (lhs
, result
);
4699 /* Do some special pattern matching for redundancies of operations
4700 in different types. */
4701 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4702 tree type
= TREE_TYPE (lhs
);
4703 tree rhs1
= gimple_assign_rhs1 (stmt
);
4707 /* Match arithmetic done in a different type where we can easily
4708 substitute the result from some earlier sign-changed or widened
4710 if (INTEGRAL_TYPE_P (type
)
4711 && TREE_CODE (rhs1
) == SSA_NAME
4712 /* We only handle sign-changes, zero-extension -> & mask or
4713 sign-extension if we know the inner operation doesn't
4715 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1
))
4716 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
4717 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
4718 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (rhs1
)))
4719 || TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (rhs1
))))
4721 gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
4723 && (gimple_assign_rhs_code (def
) == PLUS_EXPR
4724 || gimple_assign_rhs_code (def
) == MINUS_EXPR
4725 || gimple_assign_rhs_code (def
) == MULT_EXPR
))
4728 /* Either we have the op widened available. */
4729 ops
[0] = valueized_wider_op (type
,
4730 gimple_assign_rhs1 (def
));
4732 ops
[1] = valueized_wider_op (type
,
4733 gimple_assign_rhs2 (def
));
4734 if (ops
[0] && ops
[1])
4736 ops
[0] = vn_nary_op_lookup_pieces
4737 (2, gimple_assign_rhs_code (def
), type
, ops
, NULL
);
4738 /* We have wider operation available. */
4740 /* If the leader is a wrapping operation we can
4741 insert it for code hoisting w/o introducing
4742 undefined overflow. If it is not it has to
4743 be available. See PR86554. */
4744 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops
[0]))
4745 || (rpo_avail
&& vn_context_bb
4746 && rpo_avail
->eliminate_avail (vn_context_bb
,
4749 unsigned lhs_prec
= TYPE_PRECISION (type
);
4750 unsigned rhs_prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
4751 if (lhs_prec
== rhs_prec
4752 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
4753 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
4755 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
4756 NOP_EXPR
, type
, ops
[0]);
4757 result
= vn_nary_build_or_lookup (&match_op
);
4760 bool changed
= set_ssa_val_to (lhs
, result
);
4761 vn_nary_op_insert_stmt (stmt
, result
);
4767 tree mask
= wide_int_to_tree
4768 (type
, wi::mask (rhs_prec
, false, lhs_prec
));
4769 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
4773 result
= vn_nary_build_or_lookup (&match_op
);
4776 bool changed
= set_ssa_val_to (lhs
, result
);
4777 vn_nary_op_insert_stmt (stmt
, result
);
4787 if (INTEGRAL_TYPE_P (type
)
4788 && TREE_CODE (rhs1
) == SSA_NAME
4789 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
4790 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)
4791 && default_vn_walk_kind
!= VN_NOWALK
4793 && BITS_PER_UNIT
== 8
4794 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
4795 && !integer_all_onesp (gimple_assign_rhs2 (stmt
))
4796 && !integer_zerop (gimple_assign_rhs2 (stmt
)))
4798 gassign
*ass
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
4800 && !gimple_has_volatile_ops (ass
)
4801 && vn_get_stmt_kind (ass
) == VN_REFERENCE
)
4803 tree last_vuse
= gimple_vuse (ass
);
4804 tree op
= gimple_assign_rhs1 (ass
);
4805 tree result
= vn_reference_lookup (op
, gimple_vuse (ass
),
4806 default_vn_walk_kind
,
4807 NULL
, true, &last_vuse
,
4808 gimple_assign_rhs2 (stmt
));
4810 && useless_type_conversion_p (TREE_TYPE (result
),
4812 return set_ssa_val_to (lhs
, result
);
4820 bool changed
= set_ssa_val_to (lhs
, lhs
);
4821 vn_nary_op_insert_stmt (stmt
, lhs
);
4825 /* Visit a call STMT storing into LHS. Return true if the value number
4826 of the LHS has changed as a result. */
4829 visit_reference_op_call (tree lhs
, gcall
*stmt
)
4831 bool changed
= false;
4832 struct vn_reference_s vr1
;
4833 vn_reference_t vnresult
= NULL
;
4834 tree vdef
= gimple_vdef (stmt
);
4836 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
4837 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
4840 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
4843 if (vnresult
->result_vdef
&& vdef
)
4844 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
4846 /* If the call was discovered to be pure or const reflect
4847 that as far as possible. */
4848 changed
|= set_ssa_val_to (vdef
, vuse_ssa_val (gimple_vuse (stmt
)));
4850 if (!vnresult
->result
&& lhs
)
4851 vnresult
->result
= lhs
;
4853 if (vnresult
->result
&& lhs
)
4854 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
4859 vn_reference_s
**slot
;
4860 tree vdef_val
= vdef
;
4863 /* If we value numbered an indirect functions function to
4864 one not clobbering memory value number its VDEF to its
4866 tree fn
= gimple_call_fn (stmt
);
4867 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
4870 if (TREE_CODE (fn
) == ADDR_EXPR
4871 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
4872 && (flags_from_decl_or_type (TREE_OPERAND (fn
, 0))
4873 & (ECF_CONST
| ECF_PURE
)))
4874 vdef_val
= vuse_ssa_val (gimple_vuse (stmt
));
4876 changed
|= set_ssa_val_to (vdef
, vdef_val
);
4879 changed
|= set_ssa_val_to (lhs
, lhs
);
4880 vr2
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
4881 vr2
->vuse
= vr1
.vuse
;
4882 /* As we are not walking the virtual operand chain we know the
4883 shared_lookup_references are still original so we can re-use
4885 vr2
->operands
= vr1
.operands
.copy ();
4886 vr2
->type
= vr1
.type
;
4888 vr2
->base_set
= vr1
.base_set
;
4889 vr2
->hashcode
= vr1
.hashcode
;
4891 vr2
->result_vdef
= vdef_val
;
4893 slot
= valid_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
4895 gcc_assert (!*slot
);
4897 vr2
->next
= last_inserted_ref
;
4898 last_inserted_ref
= vr2
;
4904 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4905 and return true if the value number of the LHS has changed as a result. */
4908 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
4910 bool changed
= false;
4914 last_vuse
= gimple_vuse (stmt
);
4915 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
4916 default_vn_walk_kind
, NULL
, true, &last_vuse
);
4918 /* We handle type-punning through unions by value-numbering based
4919 on offset and size of the access. Be prepared to handle a
4920 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
4922 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
4924 /* Avoid the type punning in case the result mode has padding where
4925 the op we lookup has not. */
4926 if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result
))),
4927 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op
)))))
4931 /* We will be setting the value number of lhs to the value number
4932 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4933 So first simplify and lookup this expression to see if it
4934 is already available. */
4935 gimple_match_op
res_op (gimple_match_cond::UNCOND
,
4936 VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
4937 result
= vn_nary_build_or_lookup (&res_op
);
4940 /* When building the conversion fails avoid inserting the reference
4943 return set_ssa_val_to (lhs
, lhs
);
4947 changed
= set_ssa_val_to (lhs
, result
);
4950 changed
= set_ssa_val_to (lhs
, lhs
);
4951 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
4958 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4959 and return true if the value number of the LHS has changed as a result. */
4962 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
4964 bool changed
= false;
4965 vn_reference_t vnresult
= NULL
;
4967 bool resultsame
= false;
4968 tree vuse
= gimple_vuse (stmt
);
4969 tree vdef
= gimple_vdef (stmt
);
4971 if (TREE_CODE (op
) == SSA_NAME
)
4974 /* First we want to lookup using the *vuses* from the store and see
4975 if there the last store to this location with the same address
4978 The vuses represent the memory state before the store. If the
4979 memory state, address, and value of the store is the same as the
4980 last store to this location, then this store will produce the
4981 same memory state as that store.
4983 In this case the vdef versions for this store are value numbered to those
4984 vuse versions, since they represent the same memory state after
4987 Otherwise, the vdefs for the store are used when inserting into
4988 the table, since the store generates a new memory state. */
4990 vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, &vnresult
, false);
4992 && vnresult
->result
)
4994 tree result
= vnresult
->result
;
4995 gcc_checking_assert (TREE_CODE (result
) != SSA_NAME
4996 || result
== SSA_VAL (result
));
4997 resultsame
= expressions_equal_p (result
, op
);
5000 /* If the TBAA state isn't compatible for downstream reads
5001 we cannot value-number the VDEFs the same. */
5003 ao_ref_init (&lhs_ref
, lhs
);
5004 alias_set_type set
= ao_ref_alias_set (&lhs_ref
);
5005 alias_set_type base_set
= ao_ref_base_alias_set (&lhs_ref
);
5006 if ((vnresult
->set
!= set
5007 && ! alias_set_subset_of (set
, vnresult
->set
))
5008 || (vnresult
->base_set
!= base_set
5009 && ! alias_set_subset_of (base_set
, vnresult
->base_set
)))
5016 /* Only perform the following when being called from PRE
5017 which embeds tail merging. */
5018 if (default_vn_walk_kind
== VN_WALK
)
5020 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
5021 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
5024 VN_INFO (vdef
)->visited
= true;
5025 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
5029 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5031 fprintf (dump_file
, "No store match\n");
5032 fprintf (dump_file
, "Value numbering store ");
5033 print_generic_expr (dump_file
, lhs
);
5034 fprintf (dump_file
, " to ");
5035 print_generic_expr (dump_file
, op
);
5036 fprintf (dump_file
, "\n");
5038 /* Have to set value numbers before insert, since insert is
5039 going to valueize the references in-place. */
5041 changed
|= set_ssa_val_to (vdef
, vdef
);
5043 /* Do not insert structure copies into the tables. */
5044 if (is_gimple_min_invariant (op
)
5045 || is_gimple_reg (op
))
5046 vn_reference_insert (lhs
, op
, vdef
, NULL
);
5048 /* Only perform the following when being called from PRE
5049 which embeds tail merging. */
5050 if (default_vn_walk_kind
== VN_WALK
)
5052 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
5053 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
5058 /* We had a match, so value number the vdef to have the value
5059 number of the vuse it came from. */
5061 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5062 fprintf (dump_file
, "Store matched earlier value, "
5063 "value numbering store vdefs to matching vuses.\n");
5065 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
5071 /* Visit and value number PHI, return true if the value number
5072 changed. When BACKEDGES_VARYING_P is true then assume all
5073 backedge values are varying. When INSERTED is not NULL then
5074 this is just a ahead query for a possible iteration, set INSERTED
5075 to true if we'd insert into the hashtable. */
5078 visit_phi (gimple
*phi
, bool *inserted
, bool backedges_varying_p
)
5080 tree result
, sameval
= VN_TOP
, seen_undef
= NULL_TREE
;
5081 tree backedge_val
= NULL_TREE
;
5082 bool seen_non_backedge
= false;
5083 tree sameval_base
= NULL_TREE
;
5084 poly_int64 soff
, doff
;
5085 unsigned n_executable
= 0;
5089 /* TODO: We could check for this in initialization, and replace this
5090 with a gcc_assert. */
5091 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
5092 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
5094 /* We track whether a PHI was CSEd to to avoid excessive iterations
5095 that would be necessary only because the PHI changed arguments
5098 gimple_set_plf (phi
, GF_PLF_1
, false);
5100 /* See if all non-TOP arguments have the same value. TOP is
5101 equivalent to everything, so we can ignore it. */
5102 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
5103 if (e
->flags
& EDGE_EXECUTABLE
)
5105 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5108 if (TREE_CODE (def
) == SSA_NAME
)
5110 if (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
))
5111 def
= SSA_VAL (def
);
5112 if (e
->flags
& EDGE_DFS_BACK
)
5115 if (!(e
->flags
& EDGE_DFS_BACK
))
5116 seen_non_backedge
= true;
5119 /* Ignore undefined defs for sameval but record one. */
5120 else if (TREE_CODE (def
) == SSA_NAME
5121 && ! virtual_operand_p (def
)
5122 && ssa_undefined_value_p (def
, false))
5124 else if (sameval
== VN_TOP
)
5126 else if (!expressions_equal_p (def
, sameval
))
5128 /* We know we're arriving only with invariant addresses here,
5129 try harder comparing them. We can do some caching here
5130 which we cannot do in expressions_equal_p. */
5131 if (TREE_CODE (def
) == ADDR_EXPR
5132 && TREE_CODE (sameval
) == ADDR_EXPR
5133 && sameval_base
!= (void *)-1)
5136 sameval_base
= get_addr_base_and_unit_offset
5137 (TREE_OPERAND (sameval
, 0), &soff
);
5139 sameval_base
= (tree
)(void *)-1;
5140 else if ((get_addr_base_and_unit_offset
5141 (TREE_OPERAND (def
, 0), &doff
) == sameval_base
)
5142 && known_eq (soff
, doff
))
5145 sameval
= NULL_TREE
;
5150 /* If the value we want to use is flowing over the backedge and we
5151 should take it as VARYING but it has a non-VARYING value drop to
5153 If we value-number a virtual operand never value-number to the
5154 value from the backedge as that confuses the alias-walking code.
5155 See gcc.dg/torture/pr87176.c. If the value is the same on a
5156 non-backedge everything is OK though. */
5159 && !seen_non_backedge
5160 && TREE_CODE (backedge_val
) == SSA_NAME
5161 && sameval
== backedge_val
5162 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val
)
5163 || SSA_VAL (backedge_val
) != backedge_val
))
5164 /* Do not value-number a virtual operand to sth not visited though
5165 given that allows us to escape a region in alias walking. */
5167 && TREE_CODE (sameval
) == SSA_NAME
5168 && !SSA_NAME_IS_DEFAULT_DEF (sameval
)
5169 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval
)
5170 && (SSA_VAL (sameval
, &visited_p
), !visited_p
)))
5171 /* Note this just drops to VARYING without inserting the PHI into
5173 result
= PHI_RESULT (phi
);
5174 /* If none of the edges was executable keep the value-number at VN_TOP,
5175 if only a single edge is exectuable use its value. */
5176 else if (n_executable
<= 1)
5177 result
= seen_undef
? seen_undef
: sameval
;
5178 /* If we saw only undefined values and VN_TOP use one of the
5179 undefined values. */
5180 else if (sameval
== VN_TOP
)
5181 result
= seen_undef
? seen_undef
: sameval
;
5182 /* First see if it is equivalent to a phi node in this block. We prefer
5183 this as it allows IV elimination - see PRs 66502 and 67167. */
5184 else if ((result
= vn_phi_lookup (phi
, backedges_varying_p
)))
5187 && TREE_CODE (result
) == SSA_NAME
5188 && gimple_code (SSA_NAME_DEF_STMT (result
)) == GIMPLE_PHI
)
5190 gimple_set_plf (SSA_NAME_DEF_STMT (result
), GF_PLF_1
, true);
5191 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5193 fprintf (dump_file
, "Marking CSEd to PHI node ");
5194 print_gimple_expr (dump_file
, SSA_NAME_DEF_STMT (result
),
5196 fprintf (dump_file
, "\n");
5200 /* If all values are the same use that, unless we've seen undefined
5201 values as well and the value isn't constant.
5202 CCP/copyprop have the same restriction to not remove uninit warnings. */
5204 && (! seen_undef
|| is_gimple_min_invariant (sameval
)))
5208 result
= PHI_RESULT (phi
);
5209 /* Only insert PHIs that are varying, for constant value numbers
5210 we mess up equivalences otherwise as we are only comparing
5211 the immediate controlling predicates. */
5212 vn_phi_insert (phi
, result
, backedges_varying_p
);
5217 return set_ssa_val_to (PHI_RESULT (phi
), result
);
5220 /* Try to simplify RHS using equivalences and constant folding. */
5223 try_to_simplify (gassign
*stmt
)
5225 enum tree_code code
= gimple_assign_rhs_code (stmt
);
5228 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
5229 in this case, there is no point in doing extra work. */
5230 if (code
== SSA_NAME
)
5233 /* First try constant folding based on our current lattice. */
5234 mprts_hook
= vn_lookup_simplify_result
;
5235 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
5238 && (TREE_CODE (tem
) == SSA_NAME
5239 || is_gimple_min_invariant (tem
)))
5245 /* Visit and value number STMT, return true if the value number
5249 visit_stmt (gimple
*stmt
, bool backedges_varying_p
= false)
5251 bool changed
= false;
5253 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5255 fprintf (dump_file
, "Value numbering stmt = ");
5256 print_gimple_stmt (dump_file
, stmt
, 0);
5259 if (gimple_code (stmt
) == GIMPLE_PHI
)
5260 changed
= visit_phi (stmt
, NULL
, backedges_varying_p
);
5261 else if (gimple_has_volatile_ops (stmt
))
5262 changed
= defs_to_varying (stmt
);
5263 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
5265 enum tree_code code
= gimple_assign_rhs_code (ass
);
5266 tree lhs
= gimple_assign_lhs (ass
);
5267 tree rhs1
= gimple_assign_rhs1 (ass
);
5270 /* Shortcut for copies. Simplifying copies is pointless,
5271 since we copy the expression and value they represent. */
5272 if (code
== SSA_NAME
5273 && TREE_CODE (lhs
) == SSA_NAME
)
5275 changed
= visit_copy (lhs
, rhs1
);
5278 simplified
= try_to_simplify (ass
);
5281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5283 fprintf (dump_file
, "RHS ");
5284 print_gimple_expr (dump_file
, ass
, 0);
5285 fprintf (dump_file
, " simplified to ");
5286 print_generic_expr (dump_file
, simplified
);
5287 fprintf (dump_file
, "\n");
5290 /* Setting value numbers to constants will occasionally
5291 screw up phi congruence because constants are not
5292 uniquely associated with a single ssa name that can be
5295 && is_gimple_min_invariant (simplified
)
5296 && TREE_CODE (lhs
) == SSA_NAME
)
5298 changed
= set_ssa_val_to (lhs
, simplified
);
5302 && TREE_CODE (simplified
) == SSA_NAME
5303 && TREE_CODE (lhs
) == SSA_NAME
)
5305 changed
= visit_copy (lhs
, simplified
);
5309 if ((TREE_CODE (lhs
) == SSA_NAME
5310 /* We can substitute SSA_NAMEs that are live over
5311 abnormal edges with their constant value. */
5312 && !(gimple_assign_copy_p (ass
)
5313 && is_gimple_min_invariant (rhs1
))
5315 && is_gimple_min_invariant (simplified
))
5316 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
5317 /* Stores or copies from SSA_NAMEs that are live over
5318 abnormal edges are a problem. */
5319 || (code
== SSA_NAME
5320 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
5321 changed
= defs_to_varying (ass
);
5322 else if (REFERENCE_CLASS_P (lhs
)
5324 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
5325 else if (TREE_CODE (lhs
) == SSA_NAME
)
5327 if ((gimple_assign_copy_p (ass
)
5328 && is_gimple_min_invariant (rhs1
))
5330 && is_gimple_min_invariant (simplified
)))
5333 changed
= set_ssa_val_to (lhs
, simplified
);
5335 changed
= set_ssa_val_to (lhs
, rhs1
);
5339 /* Visit the original statement. */
5340 switch (vn_get_stmt_kind (ass
))
5343 changed
= visit_nary_op (lhs
, ass
);
5346 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
5349 changed
= defs_to_varying (ass
);
5355 changed
= defs_to_varying (ass
);
5357 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
5359 tree lhs
= gimple_call_lhs (call_stmt
);
5360 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5362 /* Try constant folding based on our current lattice. */
5363 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
5367 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5369 fprintf (dump_file
, "call ");
5370 print_gimple_expr (dump_file
, call_stmt
, 0);
5371 fprintf (dump_file
, " simplified to ");
5372 print_generic_expr (dump_file
, simplified
);
5373 fprintf (dump_file
, "\n");
5376 /* Setting value numbers to constants will occasionally
5377 screw up phi congruence because constants are not
5378 uniquely associated with a single ssa name that can be
5381 && is_gimple_min_invariant (simplified
))
5383 changed
= set_ssa_val_to (lhs
, simplified
);
5384 if (gimple_vdef (call_stmt
))
5385 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
5386 SSA_VAL (gimple_vuse (call_stmt
)));
5390 && TREE_CODE (simplified
) == SSA_NAME
)
5392 changed
= visit_copy (lhs
, simplified
);
5393 if (gimple_vdef (call_stmt
))
5394 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
5395 SSA_VAL (gimple_vuse (call_stmt
)));
5398 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
5400 changed
= defs_to_varying (call_stmt
);
5405 /* Pick up flags from a devirtualization target. */
5406 tree fn
= gimple_call_fn (stmt
);
5407 int extra_fnflags
= 0;
5408 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
5411 if (TREE_CODE (fn
) == ADDR_EXPR
5412 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
5413 extra_fnflags
= flags_from_decl_or_type (TREE_OPERAND (fn
, 0));
5415 if (!gimple_call_internal_p (call_stmt
)
5416 && (/* Calls to the same function with the same vuse
5417 and the same operands do not necessarily return the same
5418 value, unless they're pure or const. */
5419 ((gimple_call_flags (call_stmt
) | extra_fnflags
)
5420 & (ECF_PURE
| ECF_CONST
))
5421 /* If calls have a vdef, subsequent calls won't have
5422 the same incoming vuse. So, if 2 calls with vdef have the
5423 same vuse, we know they're not subsequent.
5424 We can value number 2 calls to the same function with the
5425 same vuse and the same operands which are not subsequent
5426 the same, because there is no code in the program that can
5427 compare the 2 values... */
5428 || (gimple_vdef (call_stmt
)
5429 /* ... unless the call returns a pointer which does
5430 not alias with anything else. In which case the
5431 information that the values are distinct are encoded
5433 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
5434 /* Only perform the following when being called from PRE
5435 which embeds tail merging. */
5436 && default_vn_walk_kind
== VN_WALK
)))
5437 changed
= visit_reference_op_call (lhs
, call_stmt
);
5439 changed
= defs_to_varying (call_stmt
);
5442 changed
= defs_to_varying (stmt
);
5448 /* Allocate a value number table. */
5451 allocate_vn_table (vn_tables_t table
, unsigned size
)
5453 table
->phis
= new vn_phi_table_type (size
);
5454 table
->nary
= new vn_nary_op_table_type (size
);
5455 table
->references
= new vn_reference_table_type (size
);
5458 /* Free a value number table. */
5461 free_vn_table (vn_tables_t table
)
5463 /* Walk over elements and release vectors. */
5464 vn_reference_iterator_type hir
;
5466 FOR_EACH_HASH_TABLE_ELEMENT (*table
->references
, vr
, vn_reference_t
, hir
)
5467 vr
->operands
.release ();
5472 delete table
->references
;
5473 table
->references
= NULL
;
5476 /* Set *ID according to RESULT. */
5479 set_value_id_for_result (tree result
, unsigned int *id
)
5481 if (result
&& TREE_CODE (result
) == SSA_NAME
)
5482 *id
= VN_INFO (result
)->value_id
;
5483 else if (result
&& is_gimple_min_invariant (result
))
5484 *id
= get_or_alloc_constant_value_id (result
);
5486 *id
= get_next_value_id ();
5489 /* Set the value ids in the valid hash tables. */
5492 set_hashtable_value_ids (void)
5494 vn_nary_op_iterator_type hin
;
5495 vn_phi_iterator_type hip
;
5496 vn_reference_iterator_type hir
;
5501 /* Now set the value ids of the things we had put in the hash
5504 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
5505 if (! vno
->predicated_values
)
5506 set_value_id_for_result (vno
->u
.result
, &vno
->value_id
);
5508 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
5509 set_value_id_for_result (vp
->result
, &vp
->value_id
);
5511 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
5513 set_value_id_for_result (vr
->result
, &vr
->value_id
);
5516 /* Return the maximum value id we have ever seen. */
5519 get_max_value_id (void)
5521 return next_value_id
;
5524 /* Return the next unique value id. */
5527 get_next_value_id (void)
5529 return next_value_id
++;
5533 /* Compare two expressions E1 and E2 and return true if they are equal. */
5536 expressions_equal_p (tree e1
, tree e2
)
5538 /* The obvious case. */
5542 /* If either one is VN_TOP consider them equal. */
5543 if (e1
== VN_TOP
|| e2
== VN_TOP
)
5546 /* If only one of them is null, they cannot be equal. */
5550 /* Now perform the actual comparison. */
5551 if (TREE_CODE (e1
) == TREE_CODE (e2
)
5552 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
5559 /* Return true if the nary operation NARY may trap. This is a copy
5560 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5563 vn_nary_may_trap (vn_nary_op_t nary
)
5566 tree rhs2
= NULL_TREE
;
5567 bool honor_nans
= false;
5568 bool honor_snans
= false;
5569 bool fp_operation
= false;
5570 bool honor_trapv
= false;
5574 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
5575 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
5576 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
5579 fp_operation
= FLOAT_TYPE_P (type
);
5582 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
5583 honor_snans
= flag_signaling_nans
!= 0;
5585 else if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_TRAPS (type
))
5588 if (nary
->length
>= 2)
5590 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
5591 honor_trapv
, honor_nans
, honor_snans
,
5596 for (i
= 0; i
< nary
->length
; ++i
)
5597 if (tree_could_trap_p (nary
->op
[i
]))
5603 /* Return true if the reference operation REF may trap. */
5606 vn_reference_may_trap (vn_reference_t ref
)
5608 switch (ref
->operands
[0].opcode
)
5612 /* We do not handle calls. */
5614 /* And toplevel address computations never trap. */
5619 vn_reference_op_t op
;
5621 FOR_EACH_VEC_ELT (ref
->operands
, i
, op
)
5625 case WITH_SIZE_EXPR
:
5626 case TARGET_MEM_REF
:
5627 /* Always variable. */
5630 if (op
->op1
&& TREE_CODE (op
->op1
) == SSA_NAME
)
5633 case ARRAY_RANGE_REF
:
5635 if (TREE_CODE (op
->op0
) == SSA_NAME
)
5639 /* Nothing interesting in itself, the base is separate. */
5641 /* The following are the address bases. */
5646 return tree_could_trap_p (TREE_OPERAND (op
->op0
, 0));
5654 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction
,
5655 bitmap inserted_exprs_
)
5656 : dom_walker (direction
), do_pre (inserted_exprs_
!= NULL
),
5657 el_todo (0), eliminations (0), insertions (0),
5658 inserted_exprs (inserted_exprs_
)
5660 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
5661 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
5664 eliminate_dom_walker::~eliminate_dom_walker ()
5666 BITMAP_FREE (need_eh_cleanup
);
5667 BITMAP_FREE (need_ab_cleanup
);
5670 /* Return a leader for OP that is available at the current point of the
5671 eliminate domwalk. */
5674 eliminate_dom_walker::eliminate_avail (basic_block
, tree op
)
5676 tree valnum
= VN_INFO (op
)->valnum
;
5677 if (TREE_CODE (valnum
) == SSA_NAME
)
5679 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
5681 if (avail
.length () > SSA_NAME_VERSION (valnum
))
5682 return avail
[SSA_NAME_VERSION (valnum
)];
5684 else if (is_gimple_min_invariant (valnum
))
5689 /* At the current point of the eliminate domwalk make OP available. */
5692 eliminate_dom_walker::eliminate_push_avail (basic_block
, tree op
)
5694 tree valnum
= VN_INFO (op
)->valnum
;
5695 if (TREE_CODE (valnum
) == SSA_NAME
)
5697 if (avail
.length () <= SSA_NAME_VERSION (valnum
))
5698 avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
5700 if (avail
[SSA_NAME_VERSION (valnum
)])
5701 pushop
= avail
[SSA_NAME_VERSION (valnum
)];
5702 avail_stack
.safe_push (pushop
);
5703 avail
[SSA_NAME_VERSION (valnum
)] = op
;
5707 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5708 the leader for the expression if insertion was successful. */
5711 eliminate_dom_walker::eliminate_insert (basic_block bb
,
5712 gimple_stmt_iterator
*gsi
, tree val
)
5714 /* We can insert a sequence with a single assignment only. */
5715 gimple_seq stmts
= VN_INFO (val
)->expr
;
5716 if (!gimple_seq_singleton_p (stmts
))
5718 gassign
*stmt
= dyn_cast
<gassign
*> (gimple_seq_first_stmt (stmts
));
5720 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
5721 && gimple_assign_rhs_code (stmt
) != VIEW_CONVERT_EXPR
5722 && gimple_assign_rhs_code (stmt
) != BIT_FIELD_REF
5723 && (gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
5724 || TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)))
5727 tree op
= gimple_assign_rhs1 (stmt
);
5728 if (gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
5729 || gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
5730 op
= TREE_OPERAND (op
, 0);
5731 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (bb
, op
) : op
;
5737 if (gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
5738 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
5739 TREE_TYPE (val
), leader
,
5740 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1),
5741 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2));
5742 else if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
)
5743 res
= gimple_build (&stmts
, BIT_AND_EXPR
,
5744 TREE_TYPE (val
), leader
, gimple_assign_rhs2 (stmt
));
5746 res
= gimple_build (&stmts
, gimple_assign_rhs_code (stmt
),
5747 TREE_TYPE (val
), leader
);
5748 if (TREE_CODE (res
) != SSA_NAME
5749 || SSA_NAME_IS_DEFAULT_DEF (res
)
5750 || gimple_bb (SSA_NAME_DEF_STMT (res
)))
5752 gimple_seq_discard (stmts
);
5754 /* During propagation we have to treat SSA info conservatively
5755 and thus we can end up simplifying the inserted expression
5756 at elimination time to sth not defined in stmts. */
5757 /* But then this is a redundancy we failed to detect. Which means
5758 res now has two values. That doesn't play well with how
5759 we track availability here, so give up. */
5760 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5762 if (TREE_CODE (res
) == SSA_NAME
)
5763 res
= eliminate_avail (bb
, res
);
5766 fprintf (dump_file
, "Failed to insert expression for value ");
5767 print_generic_expr (dump_file
, val
);
5768 fprintf (dump_file
, " which is really fully redundant to ");
5769 print_generic_expr (dump_file
, res
);
5770 fprintf (dump_file
, "\n");
5778 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
5779 VN_INFO (res
)->valnum
= val
;
5780 VN_INFO (res
)->visited
= true;
5784 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5786 fprintf (dump_file
, "Inserted ");
5787 print_gimple_stmt (dump_file
, SSA_NAME_DEF_STMT (res
), 0);
5794 eliminate_dom_walker::eliminate_stmt (basic_block b
, gimple_stmt_iterator
*gsi
)
5796 tree sprime
= NULL_TREE
;
5797 gimple
*stmt
= gsi_stmt (*gsi
);
5798 tree lhs
= gimple_get_lhs (stmt
);
5799 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
5800 && !gimple_has_volatile_ops (stmt
)
5801 /* See PR43491. Do not replace a global register variable when
5802 it is a the RHS of an assignment. Do replace local register
5803 variables since gcc does not guarantee a local variable will
5804 be allocated in register.
5805 ??? The fix isn't effective here. This should instead
5806 be ensured by not value-numbering them the same but treating
5807 them like volatiles? */
5808 && !(gimple_assign_single_p (stmt
)
5809 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
5810 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
5811 && is_global_var (gimple_assign_rhs1 (stmt
)))))
5813 sprime
= eliminate_avail (b
, lhs
);
5816 /* If there is no existing usable leader but SCCVN thinks
5817 it has an expression it wants to use as replacement,
5819 tree val
= VN_INFO (lhs
)->valnum
;
5821 && TREE_CODE (val
) == SSA_NAME
5822 && VN_INFO (val
)->needs_insertion
5823 && VN_INFO (val
)->expr
!= NULL
5824 && (sprime
= eliminate_insert (b
, gsi
, val
)) != NULL_TREE
)
5825 eliminate_push_avail (b
, sprime
);
5828 /* If this now constitutes a copy duplicate points-to
5829 and range info appropriately. This is especially
5830 important for inserted code. See tree-ssa-copy.c
5831 for similar code. */
5833 && TREE_CODE (sprime
) == SSA_NAME
)
5835 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
5836 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
5837 && SSA_NAME_PTR_INFO (lhs
)
5838 && ! SSA_NAME_PTR_INFO (sprime
))
5840 duplicate_ssa_name_ptr_info (sprime
,
5841 SSA_NAME_PTR_INFO (lhs
));
5843 mark_ptr_info_alignment_unknown
5844 (SSA_NAME_PTR_INFO (sprime
));
5846 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
5847 && SSA_NAME_RANGE_INFO (lhs
)
5848 && ! SSA_NAME_RANGE_INFO (sprime
)
5850 duplicate_ssa_name_range_info (sprime
,
5851 SSA_NAME_RANGE_TYPE (lhs
),
5852 SSA_NAME_RANGE_INFO (lhs
));
5855 /* Inhibit the use of an inserted PHI on a loop header when
5856 the address of the memory reference is a simple induction
5857 variable. In other cases the vectorizer won't do anything
5858 anyway (either it's loop invariant or a complicated
5861 && TREE_CODE (sprime
) == SSA_NAME
5863 && (flag_tree_loop_vectorize
|| flag_tree_parallelize_loops
> 1)
5864 && loop_outer (b
->loop_father
)
5865 && has_zero_uses (sprime
)
5866 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
5867 && gimple_assign_load_p (stmt
))
5869 gimple
*def_stmt
= SSA_NAME_DEF_STMT (sprime
);
5870 basic_block def_bb
= gimple_bb (def_stmt
);
5871 if (gimple_code (def_stmt
) == GIMPLE_PHI
5872 && def_bb
->loop_father
->header
== def_bb
)
5874 loop_p loop
= def_bb
->loop_father
;
5878 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5881 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
5883 && flow_bb_inside_loop_p (loop
, def_bb
)
5884 && simple_iv (loop
, loop
, op
, &iv
, true))
5892 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5894 fprintf (dump_file
, "Not replacing ");
5895 print_gimple_expr (dump_file
, stmt
, 0);
5896 fprintf (dump_file
, " with ");
5897 print_generic_expr (dump_file
, sprime
);
5898 fprintf (dump_file
, " which would add a loop"
5899 " carried dependence to loop %d\n",
5902 /* Don't keep sprime available. */
5910 /* If we can propagate the value computed for LHS into
5911 all uses don't bother doing anything with this stmt. */
5912 if (may_propagate_copy (lhs
, sprime
))
5914 /* Mark it for removal. */
5915 to_remove
.safe_push (stmt
);
5917 /* ??? Don't count copy/constant propagations. */
5918 if (gimple_assign_single_p (stmt
)
5919 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
5920 || gimple_assign_rhs1 (stmt
) == sprime
))
5923 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5925 fprintf (dump_file
, "Replaced ");
5926 print_gimple_expr (dump_file
, stmt
, 0);
5927 fprintf (dump_file
, " with ");
5928 print_generic_expr (dump_file
, sprime
);
5929 fprintf (dump_file
, " in all uses of ");
5930 print_gimple_stmt (dump_file
, stmt
, 0);
5937 /* If this is an assignment from our leader (which
5938 happens in the case the value-number is a constant)
5939 then there is nothing to do. Likewise if we run into
5940 inserted code that needed a conversion because of
5941 our type-agnostic value-numbering of loads. */
5942 if ((gimple_assign_single_p (stmt
)
5943 || (is_gimple_assign (stmt
)
5944 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
5945 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)))
5946 && sprime
== gimple_assign_rhs1 (stmt
))
5949 /* Else replace its RHS. */
5950 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5952 fprintf (dump_file
, "Replaced ");
5953 print_gimple_expr (dump_file
, stmt
, 0);
5954 fprintf (dump_file
, " with ");
5955 print_generic_expr (dump_file
, sprime
);
5956 fprintf (dump_file
, " in ");
5957 print_gimple_stmt (dump_file
, stmt
, 0);
5961 bool can_make_abnormal_goto
= (is_gimple_call (stmt
)
5962 && stmt_can_make_abnormal_goto (stmt
));
5963 gimple
*orig_stmt
= stmt
;
5964 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
5965 TREE_TYPE (sprime
)))
5967 /* We preserve conversions to but not from function or method
5968 types. This asymmetry makes it necessary to re-instantiate
5969 conversions here. */
5970 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
5971 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs
))))
5972 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
5976 tree vdef
= gimple_vdef (stmt
);
5977 tree vuse
= gimple_vuse (stmt
);
5978 propagate_tree_value_into_stmt (gsi
, sprime
);
5979 stmt
= gsi_stmt (*gsi
);
5981 /* In case the VDEF on the original stmt was released, value-number
5982 it to the VUSE. This is to make vuse_ssa_val able to skip
5983 released virtual operands. */
5984 if (vdef
!= gimple_vdef (stmt
))
5986 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef
));
5987 VN_INFO (vdef
)->valnum
= vuse
;
5990 /* If we removed EH side-effects from the statement, clean
5991 its EH information. */
5992 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
5994 bitmap_set_bit (need_eh_cleanup
,
5995 gimple_bb (stmt
)->index
);
5996 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5997 fprintf (dump_file
, " Removed EH side-effects.\n");
6000 /* Likewise for AB side-effects. */
6001 if (can_make_abnormal_goto
6002 && !stmt_can_make_abnormal_goto (stmt
))
6004 bitmap_set_bit (need_ab_cleanup
,
6005 gimple_bb (stmt
)->index
);
6006 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6007 fprintf (dump_file
, " Removed AB side-effects.\n");
6014 /* If the statement is a scalar store, see if the expression
6015 has the same value number as its rhs. If so, the store is
6017 if (gimple_assign_single_p (stmt
)
6018 && !gimple_has_volatile_ops (stmt
)
6019 && !is_gimple_reg (gimple_assign_lhs (stmt
))
6020 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
6021 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
6023 tree rhs
= gimple_assign_rhs1 (stmt
);
6024 vn_reference_t vnresult
;
6025 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
6026 typed load of a byte known to be 0x11 as 1 so a store of
6027 a boolean 1 is detected as redundant. Because of this we
6028 have to make sure to lookup with a ref where its size
6029 matches the precision. */
6030 tree lookup_lhs
= lhs
;
6031 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6032 && (TREE_CODE (lhs
) != COMPONENT_REF
6033 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6034 && !type_has_mode_precision_p (TREE_TYPE (lhs
)))
6036 if (TREE_CODE (lhs
) == COMPONENT_REF
6037 || TREE_CODE (lhs
) == MEM_REF
)
6039 tree ltype
= build_nonstandard_integer_type
6040 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs
))),
6041 TYPE_UNSIGNED (TREE_TYPE (lhs
)));
6042 if (TREE_CODE (lhs
) == COMPONENT_REF
)
6044 tree foff
= component_ref_field_offset (lhs
);
6045 tree f
= TREE_OPERAND (lhs
, 1);
6046 if (!poly_int_tree_p (foff
))
6047 lookup_lhs
= NULL_TREE
;
6049 lookup_lhs
= build3 (BIT_FIELD_REF
, ltype
,
6050 TREE_OPERAND (lhs
, 0),
6051 TYPE_SIZE (TREE_TYPE (lhs
)),
6053 (foff
, DECL_FIELD_BIT_OFFSET (f
)));
6056 lookup_lhs
= build2 (MEM_REF
, ltype
,
6057 TREE_OPERAND (lhs
, 0),
6058 TREE_OPERAND (lhs
, 1));
6061 lookup_lhs
= NULL_TREE
;
6063 tree val
= NULL_TREE
;
6065 val
= vn_reference_lookup (lookup_lhs
, gimple_vuse (stmt
),
6066 VN_WALKREWRITE
, &vnresult
, false);
6067 if (TREE_CODE (rhs
) == SSA_NAME
)
6068 rhs
= VN_INFO (rhs
)->valnum
;
6070 && (operand_equal_p (val
, rhs
, 0)
6071 /* Due to the bitfield lookups above we can get bit
6072 interpretations of the same RHS as values here. Those
6073 are redundant as well. */
6074 || (TREE_CODE (val
) == SSA_NAME
6075 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val
))
6076 && (val
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val
)))
6077 && TREE_CODE (val
) == VIEW_CONVERT_EXPR
6078 && TREE_OPERAND (val
, 0) == rhs
)))
6080 /* We can only remove the later store if the former aliases
6081 at least all accesses the later one does or if the store
6082 was to readonly memory storing the same value. */
6084 ao_ref_init (&lhs_ref
, lhs
);
6085 alias_set_type set
= ao_ref_alias_set (&lhs_ref
);
6086 alias_set_type base_set
= ao_ref_base_alias_set (&lhs_ref
);
6088 || ((vnresult
->set
== set
6089 || alias_set_subset_of (set
, vnresult
->set
))
6090 && (vnresult
->base_set
== base_set
6091 || alias_set_subset_of (base_set
, vnresult
->base_set
))))
6093 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6095 fprintf (dump_file
, "Deleted redundant store ");
6096 print_gimple_stmt (dump_file
, stmt
, 0);
6099 /* Queue stmt for removal. */
6100 to_remove
.safe_push (stmt
);
6106 /* If this is a control statement value numbering left edges
6107 unexecuted on force the condition in a way consistent with
6109 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
6111 if ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
)
6112 ^ (EDGE_SUCC (b
, 1)->flags
& EDGE_EXECUTABLE
))
6114 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6116 fprintf (dump_file
, "Removing unexecutable edge from ");
6117 print_gimple_stmt (dump_file
, stmt
, 0);
6119 if (((EDGE_SUCC (b
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
6120 == ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
6121 gimple_cond_make_true (cond
);
6123 gimple_cond_make_false (cond
);
6125 el_todo
|= TODO_cleanup_cfg
;
6130 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
6131 bool was_noreturn
= (is_gimple_call (stmt
)
6132 && gimple_call_noreturn_p (stmt
));
6133 tree vdef
= gimple_vdef (stmt
);
6134 tree vuse
= gimple_vuse (stmt
);
6136 /* If we didn't replace the whole stmt (or propagate the result
6137 into all uses), replace all uses on this stmt with their
6139 bool modified
= false;
6140 use_operand_p use_p
;
6142 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
6144 tree use
= USE_FROM_PTR (use_p
);
6145 /* ??? The call code above leaves stmt operands un-updated. */
6146 if (TREE_CODE (use
) != SSA_NAME
)
6149 if (SSA_NAME_IS_DEFAULT_DEF (use
))
6150 /* ??? For default defs BB shouldn't matter, but we have to
6151 solve the inconsistency between rpo eliminate and
6152 dom eliminate avail valueization first. */
6153 sprime
= eliminate_avail (b
, use
);
6155 /* Look for sth available at the definition block of the argument.
6156 This avoids inconsistencies between availability there which
6157 decides if the stmt can be removed and availability at the
6158 use site. The SSA property ensures that things available
6159 at the definition are also available at uses. */
6160 sprime
= eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use
)), use
);
6161 if (sprime
&& sprime
!= use
6162 && may_propagate_copy (use
, sprime
)
6163 /* We substitute into debug stmts to avoid excessive
6164 debug temporaries created by removed stmts, but we need
6165 to avoid doing so for inserted sprimes as we never want
6166 to create debug temporaries for them. */
6168 || TREE_CODE (sprime
) != SSA_NAME
6169 || !is_gimple_debug (stmt
)
6170 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
6172 propagate_value (use_p
, sprime
);
6177 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6178 into which is a requirement for the IPA devirt machinery. */
6179 gimple
*old_stmt
= stmt
;
6182 /* If a formerly non-invariant ADDR_EXPR is turned into an
6183 invariant one it was on a separate stmt. */
6184 if (gimple_assign_single_p (stmt
)
6185 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
6186 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
6187 gimple_stmt_iterator prev
= *gsi
;
6189 if (fold_stmt (gsi
))
6191 /* fold_stmt may have created new stmts inbetween
6192 the previous stmt and the folded stmt. Mark
6193 all defs created there as varying to not confuse
6194 the SCCVN machinery as we're using that even during
6196 if (gsi_end_p (prev
))
6197 prev
= gsi_start_bb (b
);
6200 if (gsi_stmt (prev
) != gsi_stmt (*gsi
))
6205 FOR_EACH_SSA_TREE_OPERAND (def
, gsi_stmt (prev
),
6206 dit
, SSA_OP_ALL_DEFS
)
6207 /* As existing DEFs may move between stmts
6208 only process new ones. */
6209 if (! has_VN_INFO (def
))
6211 VN_INFO (def
)->valnum
= def
;
6212 VN_INFO (def
)->visited
= true;
6214 if (gsi_stmt (prev
) == gsi_stmt (*gsi
))
6220 stmt
= gsi_stmt (*gsi
);
6221 /* In case we folded the stmt away schedule the NOP for removal. */
6222 if (gimple_nop_p (stmt
))
6223 to_remove
.safe_push (stmt
);
6226 /* Visit indirect calls and turn them into direct calls if
6227 possible using the devirtualization machinery. Do this before
6228 checking for required EH/abnormal/noreturn cleanup as devird
6229 may expose more of those. */
6230 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
6232 tree fn
= gimple_call_fn (call_stmt
);
6234 && flag_devirtualize
6235 && virtual_method_call_p (fn
))
6237 tree otr_type
= obj_type_ref_class (fn
);
6238 unsigned HOST_WIDE_INT otr_tok
6239 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn
));
6241 ipa_polymorphic_call_context
context (current_function_decl
,
6242 fn
, stmt
, &instance
);
6243 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
),
6244 otr_type
, stmt
, NULL
);
6246 vec
<cgraph_node
*> targets
6247 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
6248 otr_tok
, context
, &final
);
6250 dump_possible_polymorphic_call_targets (dump_file
,
6251 obj_type_ref_class (fn
),
6253 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
6256 if (targets
.length () == 1)
6257 fn
= targets
[0]->decl
;
6259 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
6260 if (dump_enabled_p ())
6262 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
6263 "converting indirect call to "
6265 lang_hooks
.decl_printable_name (fn
, 2));
6267 gimple_call_set_fndecl (call_stmt
, fn
);
6268 /* If changing the call to __builtin_unreachable
6269 or similar noreturn function, adjust gimple_call_fntype
6271 if (gimple_call_noreturn_p (call_stmt
)
6272 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn
)))
6273 && TYPE_ARG_TYPES (TREE_TYPE (fn
))
6274 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn
)))
6276 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fn
));
6277 maybe_remove_unused_call_args (cfun
, call_stmt
);
6285 /* When changing a call into a noreturn call, cfg cleanup
6286 is needed to fix up the noreturn call. */
6288 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
6289 to_fixup
.safe_push (stmt
);
6290 /* When changing a condition or switch into one we know what
6291 edge will be executed, schedule a cfg cleanup. */
6292 if ((gimple_code (stmt
) == GIMPLE_COND
6293 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
6294 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
6295 || (gimple_code (stmt
) == GIMPLE_SWITCH
6296 && TREE_CODE (gimple_switch_index
6297 (as_a
<gswitch
*> (stmt
))) == INTEGER_CST
))
6298 el_todo
|= TODO_cleanup_cfg
;
6299 /* If we removed EH side-effects from the statement, clean
6300 its EH information. */
6301 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
6303 bitmap_set_bit (need_eh_cleanup
,
6304 gimple_bb (stmt
)->index
);
6305 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6306 fprintf (dump_file
, " Removed EH side-effects.\n");
6308 /* Likewise for AB side-effects. */
6309 if (can_make_abnormal_goto
6310 && !stmt_can_make_abnormal_goto (stmt
))
6312 bitmap_set_bit (need_ab_cleanup
,
6313 gimple_bb (stmt
)->index
);
6314 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6315 fprintf (dump_file
, " Removed AB side-effects.\n");
6318 /* In case the VDEF on the original stmt was released, value-number
6319 it to the VUSE. This is to make vuse_ssa_val able to skip
6320 released virtual operands. */
6321 if (vdef
&& SSA_NAME_IN_FREE_LIST (vdef
))
6322 VN_INFO (vdef
)->valnum
= vuse
;
6325 /* Make new values available - for fully redundant LHS we
6326 continue with the next stmt above and skip this. */
6328 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
6329 eliminate_push_avail (b
, DEF_FROM_PTR (defp
));
6332 /* Perform elimination for the basic-block B during the domwalk. */
6335 eliminate_dom_walker::before_dom_children (basic_block b
)
6338 avail_stack
.safe_push (NULL_TREE
);
6340 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
6341 if (!(b
->flags
& BB_EXECUTABLE
))
6346 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
6348 gphi
*phi
= gsi
.phi ();
6349 tree res
= PHI_RESULT (phi
);
6351 if (virtual_operand_p (res
))
6357 tree sprime
= eliminate_avail (b
, res
);
6361 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6363 fprintf (dump_file
, "Replaced redundant PHI node defining ");
6364 print_generic_expr (dump_file
, res
);
6365 fprintf (dump_file
, " with ");
6366 print_generic_expr (dump_file
, sprime
);
6367 fprintf (dump_file
, "\n");
6370 /* If we inserted this PHI node ourself, it's not an elimination. */
6371 if (! inserted_exprs
6372 || ! bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
6375 /* If we will propagate into all uses don't bother to do
6377 if (may_propagate_copy (res
, sprime
))
6379 /* Mark the PHI for removal. */
6380 to_remove
.safe_push (phi
);
6385 remove_phi_node (&gsi
, false);
6387 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
6388 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
6389 gimple
*stmt
= gimple_build_assign (res
, sprime
);
6390 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
6391 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
6395 eliminate_push_avail (b
, res
);
6399 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
6402 eliminate_stmt (b
, &gsi
);
6404 /* Replace destination PHI arguments. */
6407 FOR_EACH_EDGE (e
, ei
, b
->succs
)
6408 if (e
->flags
& EDGE_EXECUTABLE
)
6409 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
6413 gphi
*phi
= gsi
.phi ();
6414 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
6415 tree arg
= USE_FROM_PTR (use_p
);
6416 if (TREE_CODE (arg
) != SSA_NAME
6417 || virtual_operand_p (arg
))
6419 tree sprime
= eliminate_avail (b
, arg
);
6420 if (sprime
&& may_propagate_copy (arg
, sprime
))
6421 propagate_value (use_p
, sprime
);
6424 vn_context_bb
= NULL
;
6429 /* Make no longer available leaders no longer available. */
6432 eliminate_dom_walker::after_dom_children (basic_block
)
6435 while ((entry
= avail_stack
.pop ()) != NULL_TREE
)
6437 tree valnum
= VN_INFO (entry
)->valnum
;
6438 tree old
= avail
[SSA_NAME_VERSION (valnum
)];
6440 avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
6442 avail
[SSA_NAME_VERSION (valnum
)] = entry
;
6446 /* Remove queued stmts and perform delayed cleanups. */
6449 eliminate_dom_walker::eliminate_cleanup (bool region_p
)
6451 statistics_counter_event (cfun
, "Eliminated", eliminations
);
6452 statistics_counter_event (cfun
, "Insertions", insertions
);
6454 /* We cannot remove stmts during BB walk, especially not release SSA
6455 names there as this confuses the VN machinery. The stmts ending
6456 up in to_remove are either stores or simple copies.
6457 Remove stmts in reverse order to make debug stmt creation possible. */
6458 while (!to_remove
.is_empty ())
6460 bool do_release_defs
= true;
6461 gimple
*stmt
= to_remove
.pop ();
6463 /* When we are value-numbering a region we do not require exit PHIs to
6464 be present so we have to make sure to deal with uses outside of the
6465 region of stmts that we thought are eliminated.
6466 ??? Note we may be confused by uses in dead regions we didn't run
6467 elimination on. Rather than checking individual uses we accept
6468 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6469 contains such example). */
6472 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
6474 tree lhs
= gimple_phi_result (phi
);
6475 if (!has_zero_uses (lhs
))
6477 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6478 fprintf (dump_file
, "Keeping eliminated stmt live "
6479 "as copy because of out-of-region uses\n");
6480 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
6481 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
6482 gimple_stmt_iterator gsi
6483 = gsi_after_labels (gimple_bb (stmt
));
6484 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
6485 do_release_defs
= false;
6488 else if (tree lhs
= gimple_get_lhs (stmt
))
6489 if (TREE_CODE (lhs
) == SSA_NAME
6490 && !has_zero_uses (lhs
))
6492 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6493 fprintf (dump_file
, "Keeping eliminated stmt live "
6494 "as copy because of out-of-region uses\n");
6495 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
6496 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
6497 if (is_gimple_assign (stmt
))
6499 gimple_assign_set_rhs_from_tree (&gsi
, sprime
);
6500 stmt
= gsi_stmt (gsi
);
6502 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
6503 bitmap_set_bit (need_eh_cleanup
, gimple_bb (stmt
)->index
);
6508 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
6509 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
6510 do_release_defs
= false;
6515 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6517 fprintf (dump_file
, "Removing dead stmt ");
6518 print_gimple_stmt (dump_file
, stmt
, 0, TDF_NONE
);
6521 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
6522 if (gimple_code (stmt
) == GIMPLE_PHI
)
6523 remove_phi_node (&gsi
, do_release_defs
);
6526 basic_block bb
= gimple_bb (stmt
);
6527 unlink_stmt_vdef (stmt
);
6528 if (gsi_remove (&gsi
, true))
6529 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
6530 if (is_gimple_call (stmt
) && stmt_can_make_abnormal_goto (stmt
))
6531 bitmap_set_bit (need_ab_cleanup
, bb
->index
);
6532 if (do_release_defs
)
6533 release_defs (stmt
);
6536 /* Removing a stmt may expose a forwarder block. */
6537 el_todo
|= TODO_cleanup_cfg
;
6540 /* Fixup stmts that became noreturn calls. This may require splitting
6541 blocks and thus isn't possible during the dominator walk. Do this
6542 in reverse order so we don't inadvertedly remove a stmt we want to
6543 fixup by visiting a dominating now noreturn call first. */
6544 while (!to_fixup
.is_empty ())
6546 gimple
*stmt
= to_fixup
.pop ();
6548 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6550 fprintf (dump_file
, "Fixing up noreturn call ");
6551 print_gimple_stmt (dump_file
, stmt
, 0);
6554 if (fixup_noreturn_call (stmt
))
6555 el_todo
|= TODO_cleanup_cfg
;
6558 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
6559 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
6562 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
6565 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
6567 if (do_eh_cleanup
|| do_ab_cleanup
)
6568 el_todo
|= TODO_cleanup_cfg
;
6573 /* Eliminate fully redundant computations. */
6576 eliminate_with_rpo_vn (bitmap inserted_exprs
)
6578 eliminate_dom_walker
walker (CDI_DOMINATORS
, inserted_exprs
);
6580 eliminate_dom_walker
*saved_rpo_avail
= rpo_avail
;
6581 rpo_avail
= &walker
;
6582 walker
.walk (cfun
->cfg
->x_entry_block_ptr
);
6583 rpo_avail
= saved_rpo_avail
;
6585 return walker
.eliminate_cleanup ();
6589 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
6590 bool iterate
, bool eliminate
);
6593 run_rpo_vn (vn_lookup_kind kind
)
6595 default_vn_walk_kind
= kind
;
6596 do_rpo_vn (cfun
, NULL
, NULL
, true, false);
6598 /* ??? Prune requirement of these. */
6599 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
6600 constant_value_ids
= BITMAP_ALLOC (NULL
);
6602 /* Initialize the value ids and prune out remaining VN_TOPs
6606 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6608 vn_ssa_aux_t info
= VN_INFO (name
);
6610 || info
->valnum
== VN_TOP
)
6611 info
->valnum
= name
;
6612 if (info
->valnum
== name
)
6613 info
->value_id
= get_next_value_id ();
6614 else if (is_gimple_min_invariant (info
->valnum
))
6615 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
6619 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6621 vn_ssa_aux_t info
= VN_INFO (name
);
6622 if (TREE_CODE (info
->valnum
) == SSA_NAME
6623 && info
->valnum
!= name
6624 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
6625 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
6628 set_hashtable_value_ids ();
6630 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6632 fprintf (dump_file
, "Value numbers:\n");
6633 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6635 if (VN_INFO (name
)->visited
6636 && SSA_VAL (name
) != name
)
6638 print_generic_expr (dump_file
, name
);
6639 fprintf (dump_file
, " = ");
6640 print_generic_expr (dump_file
, SSA_VAL (name
));
6641 fprintf (dump_file
, " (%04d)\n", VN_INFO (name
)->value_id
);
6647 /* Free VN associated data structures. */
6652 free_vn_table (valid_info
);
6653 XDELETE (valid_info
);
6654 obstack_free (&vn_tables_obstack
, NULL
);
6655 obstack_free (&vn_tables_insert_obstack
, NULL
);
6657 vn_ssa_aux_iterator_type it
;
6659 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash
, info
, vn_ssa_aux_t
, it
)
6660 if (info
->needs_insertion
)
6661 release_ssa_name (info
->name
);
6662 obstack_free (&vn_ssa_aux_obstack
, NULL
);
6663 delete vn_ssa_aux_hash
;
6665 delete constant_to_value_id
;
6666 constant_to_value_id
= NULL
;
6667 BITMAP_FREE (constant_value_ids
);
6670 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6673 vn_lookup_simplify_result (gimple_match_op
*res_op
)
6675 if (!res_op
->code
.is_tree_code ())
6677 tree
*ops
= res_op
->ops
;
6678 unsigned int length
= res_op
->num_ops
;
6679 if (res_op
->code
== CONSTRUCTOR
6680 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6681 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
6682 && TREE_CODE (res_op
->ops
[0]) == CONSTRUCTOR
)
6684 length
= CONSTRUCTOR_NELTS (res_op
->ops
[0]);
6685 ops
= XALLOCAVEC (tree
, length
);
6686 for (unsigned i
= 0; i
< length
; ++i
)
6687 ops
[i
] = CONSTRUCTOR_ELT (res_op
->ops
[0], i
)->value
;
6689 vn_nary_op_t vnresult
= NULL
;
6690 tree res
= vn_nary_op_lookup_pieces (length
, (tree_code
) res_op
->code
,
6691 res_op
->type
, ops
, &vnresult
);
6692 /* If this is used from expression simplification make sure to
6693 return an available expression. */
6694 if (res
&& TREE_CODE (res
) == SSA_NAME
&& mprts_hook
&& rpo_avail
)
6695 res
= rpo_avail
->eliminate_avail (vn_context_bb
, res
);
6699 /* Return a leader for OPs value that is valid at BB. */
6702 rpo_elim::eliminate_avail (basic_block bb
, tree op
)
6705 tree valnum
= SSA_VAL (op
, &visited
);
6706 /* If we didn't visit OP then it must be defined outside of the
6707 region we process and also dominate it. So it is available. */
6710 if (TREE_CODE (valnum
) == SSA_NAME
)
6712 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
6714 vn_avail
*av
= VN_INFO (valnum
)->avail
;
6717 if (av
->location
== bb
->index
)
6718 /* On tramp3d 90% of the cases are here. */
6719 return ssa_name (av
->leader
);
6722 basic_block abb
= BASIC_BLOCK_FOR_FN (cfun
, av
->location
);
6723 /* ??? During elimination we have to use availability at the
6724 definition site of a use we try to replace. This
6725 is required to not run into inconsistencies because
6726 of dominated_by_p_w_unex behavior and removing a definition
6727 while not replacing all uses.
6728 ??? We could try to consistently walk dominators
6729 ignoring non-executable regions. The nearest common
6730 dominator of bb and abb is where we can stop walking. We
6731 may also be able to "pre-compute" (bits of) the next immediate
6732 (non-)dominator during the RPO walk when marking edges as
6734 if (dominated_by_p_w_unex (bb
, abb
))
6736 tree leader
= ssa_name (av
->leader
);
6737 /* Prevent eliminations that break loop-closed SSA. */
6738 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
)
6739 && ! SSA_NAME_IS_DEFAULT_DEF (leader
)
6740 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6741 (leader
))->loop_father
,
6744 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6746 print_generic_expr (dump_file
, leader
);
6747 fprintf (dump_file
, " is available for ");
6748 print_generic_expr (dump_file
, valnum
);
6749 fprintf (dump_file
, "\n");
6751 /* On tramp3d 99% of the _remaining_ cases succeed at
6755 /* ??? Can we somehow skip to the immediate dominator
6756 RPO index (bb_to_rpo)? Again, maybe not worth, on
6757 tramp3d the worst number of elements in the vector is 9. */
6762 else if (valnum
!= VN_TOP
)
6763 /* valnum is is_gimple_min_invariant. */
6768 /* Make LEADER a leader for its value at BB. */
6771 rpo_elim::eliminate_push_avail (basic_block bb
, tree leader
)
6773 tree valnum
= VN_INFO (leader
)->valnum
;
6774 if (valnum
== VN_TOP
6775 || is_gimple_min_invariant (valnum
))
6777 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6779 fprintf (dump_file
, "Making available beyond BB%d ", bb
->index
);
6780 print_generic_expr (dump_file
, leader
);
6781 fprintf (dump_file
, " for value ");
6782 print_generic_expr (dump_file
, valnum
);
6783 fprintf (dump_file
, "\n");
6785 vn_ssa_aux_t value
= VN_INFO (valnum
);
6787 if (m_avail_freelist
)
6789 av
= m_avail_freelist
;
6790 m_avail_freelist
= m_avail_freelist
->next
;
6793 av
= XOBNEW (&vn_ssa_aux_obstack
, vn_avail
);
6794 av
->location
= bb
->index
;
6795 av
->leader
= SSA_NAME_VERSION (leader
);
6796 av
->next
= value
->avail
;
6800 /* Valueization hook for RPO VN plus required state. */
6803 rpo_vn_valueize (tree name
)
6805 if (TREE_CODE (name
) == SSA_NAME
)
6807 vn_ssa_aux_t val
= VN_INFO (name
);
6810 tree tem
= val
->valnum
;
6811 if (tem
!= VN_TOP
&& tem
!= name
)
6813 if (TREE_CODE (tem
) != SSA_NAME
)
6815 /* For all values we only valueize to an available leader
6816 which means we can use SSA name info without restriction. */
6817 tem
= rpo_avail
->eliminate_avail (vn_context_bb
, tem
);
6826 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
6827 inverted condition. */
6830 insert_related_predicates_on_edge (enum tree_code code
, tree
*ops
, edge pred_e
)
6835 /* a < b -> a {!,<}= b */
6836 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
6837 ops
, boolean_true_node
, 0, pred_e
);
6838 vn_nary_op_insert_pieces_predicated (2, LE_EXPR
, boolean_type_node
,
6839 ops
, boolean_true_node
, 0, pred_e
);
6840 /* a < b -> ! a {>,=} b */
6841 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
6842 ops
, boolean_false_node
, 0, pred_e
);
6843 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
6844 ops
, boolean_false_node
, 0, pred_e
);
6847 /* a > b -> a {!,>}= b */
6848 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
6849 ops
, boolean_true_node
, 0, pred_e
);
6850 vn_nary_op_insert_pieces_predicated (2, GE_EXPR
, boolean_type_node
,
6851 ops
, boolean_true_node
, 0, pred_e
);
6852 /* a > b -> ! a {<,=} b */
6853 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
6854 ops
, boolean_false_node
, 0, pred_e
);
6855 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
6856 ops
, boolean_false_node
, 0, pred_e
);
6859 /* a == b -> ! a {<,>} b */
6860 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
6861 ops
, boolean_false_node
, 0, pred_e
);
6862 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
6863 ops
, boolean_false_node
, 0, pred_e
);
6868 /* Nothing besides inverted condition. */
6874 /* Main stmt worker for RPO VN, process BB. */
6877 process_bb (rpo_elim
&avail
, basic_block bb
,
6878 bool bb_visited
, bool iterate_phis
, bool iterate
, bool eliminate
,
6879 bool do_region
, bitmap exit_bbs
, bool skip_phis
)
6887 /* If we are in loop-closed SSA preserve this state. This is
6888 relevant when called on regions from outside of FRE/PRE. */
6889 bool lc_phi_nodes
= false;
6891 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
6892 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6893 if (e
->src
->loop_father
!= e
->dest
->loop_father
6894 && flow_loop_nested_p (e
->dest
->loop_father
,
6895 e
->src
->loop_father
))
6897 lc_phi_nodes
= true;
6901 /* When we visit a loop header substitute into loop info. */
6902 if (!iterate
&& eliminate
&& bb
->loop_father
->header
== bb
)
6904 /* Keep fields in sync with substitute_in_loop_info. */
6905 if (bb
->loop_father
->nb_iterations
)
6906 bb
->loop_father
->nb_iterations
6907 = simplify_replace_tree (bb
->loop_father
->nb_iterations
,
6908 NULL_TREE
, NULL_TREE
, &vn_valueize_wrapper
);
6911 /* Value-number all defs in the basic-block. */
6913 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
6916 gphi
*phi
= gsi
.phi ();
6917 tree res
= PHI_RESULT (phi
);
6918 vn_ssa_aux_t res_info
= VN_INFO (res
);
6921 gcc_assert (!res_info
->visited
);
6922 res_info
->valnum
= VN_TOP
;
6923 res_info
->visited
= true;
6926 /* When not iterating force backedge values to varying. */
6927 visit_stmt (phi
, !iterate_phis
);
6928 if (virtual_operand_p (res
))
6932 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6933 how we handle backedges and availability.
6934 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
6935 tree val
= res_info
->valnum
;
6936 if (res
!= val
&& !iterate
&& eliminate
)
6938 if (tree leader
= avail
.eliminate_avail (bb
, res
))
6941 /* Preserve loop-closed SSA form. */
6943 || is_gimple_min_invariant (leader
)))
6945 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6947 fprintf (dump_file
, "Replaced redundant PHI node "
6949 print_generic_expr (dump_file
, res
);
6950 fprintf (dump_file
, " with ");
6951 print_generic_expr (dump_file
, leader
);
6952 fprintf (dump_file
, "\n");
6954 avail
.eliminations
++;
6956 if (may_propagate_copy (res
, leader
))
6958 /* Schedule for removal. */
6959 avail
.to_remove
.safe_push (phi
);
6962 /* ??? Else generate a copy stmt. */
6966 /* Only make defs available that not already are. But make
6967 sure loop-closed SSA PHI node defs are picked up for
6971 || ! avail
.eliminate_avail (bb
, res
))
6972 avail
.eliminate_push_avail (bb
, res
);
6975 /* For empty BBs mark outgoing edges executable. For non-empty BBs
6976 we do this when processing the last stmt as we have to do this
6977 before elimination which otherwise forces GIMPLE_CONDs to
6978 if (1 != 0) style when seeing non-executable edges. */
6979 if (gsi_end_p (gsi_start_bb (bb
)))
6981 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6983 if (!(e
->flags
& EDGE_EXECUTABLE
))
6985 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6987 "marking outgoing edge %d -> %d executable\n",
6988 e
->src
->index
, e
->dest
->index
);
6989 e
->flags
|= EDGE_EXECUTABLE
;
6990 e
->dest
->flags
|= BB_EXECUTABLE
;
6992 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
6994 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6996 "marking destination block %d reachable\n",
6998 e
->dest
->flags
|= BB_EXECUTABLE
;
7002 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
7003 !gsi_end_p (gsi
); gsi_next (&gsi
))
7009 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
7011 vn_ssa_aux_t op_info
= VN_INFO (op
);
7012 gcc_assert (!op_info
->visited
);
7013 op_info
->valnum
= VN_TOP
;
7014 op_info
->visited
= true;
7017 /* We somehow have to deal with uses that are not defined
7018 in the processed region. Forcing unvisited uses to
7019 varying here doesn't play well with def-use following during
7020 expression simplification, so we deal with this by checking
7021 the visited flag in SSA_VAL. */
7024 visit_stmt (gsi_stmt (gsi
));
7026 gimple
*last
= gsi_stmt (gsi
);
7028 switch (gimple_code (last
))
7031 e
= find_taken_edge (bb
, vn_valueize (gimple_switch_index
7032 (as_a
<gswitch
*> (last
))));
7036 tree lhs
= vn_valueize (gimple_cond_lhs (last
));
7037 tree rhs
= vn_valueize (gimple_cond_rhs (last
));
7038 tree val
= gimple_simplify (gimple_cond_code (last
),
7039 boolean_type_node
, lhs
, rhs
,
7041 /* If the condition didn't simplfy see if we have recorded
7042 an expression from sofar taken edges. */
7043 if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
7045 vn_nary_op_t vnresult
;
7049 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (last
),
7050 boolean_type_node
, ops
,
7052 /* Did we get a predicated value? */
7053 if (! val
&& vnresult
&& vnresult
->predicated_values
)
7055 val
= vn_nary_op_get_predicated_value (vnresult
, bb
);
7056 if (val
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
7058 fprintf (dump_file
, "Got predicated value ");
7059 print_generic_expr (dump_file
, val
, TDF_NONE
);
7060 fprintf (dump_file
, " for ");
7061 print_gimple_stmt (dump_file
, last
, TDF_SLIM
);
7066 e
= find_taken_edge (bb
, val
);
7069 /* If we didn't manage to compute the taken edge then
7070 push predicated expressions for the condition itself
7071 and related conditions to the hashtables. This allows
7072 simplification of redundant conditions which is
7073 important as early cleanup. */
7074 edge true_e
, false_e
;
7075 extract_true_false_edges_from_block (bb
, &true_e
, &false_e
);
7076 enum tree_code code
= gimple_cond_code (last
);
7077 enum tree_code icode
7078 = invert_tree_comparison (code
, HONOR_NANS (lhs
));
7083 && bitmap_bit_p (exit_bbs
, true_e
->dest
->index
))
7086 && bitmap_bit_p (exit_bbs
, false_e
->dest
->index
))
7089 vn_nary_op_insert_pieces_predicated
7090 (2, code
, boolean_type_node
, ops
,
7091 boolean_true_node
, 0, true_e
);
7093 vn_nary_op_insert_pieces_predicated
7094 (2, code
, boolean_type_node
, ops
,
7095 boolean_false_node
, 0, false_e
);
7096 if (icode
!= ERROR_MARK
)
7099 vn_nary_op_insert_pieces_predicated
7100 (2, icode
, boolean_type_node
, ops
,
7101 boolean_false_node
, 0, true_e
);
7103 vn_nary_op_insert_pieces_predicated
7104 (2, icode
, boolean_type_node
, ops
,
7105 boolean_true_node
, 0, false_e
);
7107 /* Relax for non-integers, inverted condition handled
7109 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
7112 insert_related_predicates_on_edge (code
, ops
, true_e
);
7114 insert_related_predicates_on_edge (icode
, ops
, false_e
);
7120 e
= find_taken_edge (bb
, vn_valueize (gimple_goto_dest (last
)));
7127 todo
= TODO_cleanup_cfg
;
7128 if (!(e
->flags
& EDGE_EXECUTABLE
))
7130 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7132 "marking known outgoing %sedge %d -> %d executable\n",
7133 e
->flags
& EDGE_DFS_BACK
? "back-" : "",
7134 e
->src
->index
, e
->dest
->index
);
7135 e
->flags
|= EDGE_EXECUTABLE
;
7136 e
->dest
->flags
|= BB_EXECUTABLE
;
7138 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
7140 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7142 "marking destination block %d reachable\n",
7144 e
->dest
->flags
|= BB_EXECUTABLE
;
7147 else if (gsi_one_before_end_p (gsi
))
7149 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7151 if (!(e
->flags
& EDGE_EXECUTABLE
))
7153 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7155 "marking outgoing edge %d -> %d executable\n",
7156 e
->src
->index
, e
->dest
->index
);
7157 e
->flags
|= EDGE_EXECUTABLE
;
7158 e
->dest
->flags
|= BB_EXECUTABLE
;
7160 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
7162 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7164 "marking destination block %d reachable\n",
7166 e
->dest
->flags
|= BB_EXECUTABLE
;
7171 /* Eliminate. That also pushes to avail. */
7172 if (eliminate
&& ! iterate
)
7173 avail
.eliminate_stmt (bb
, &gsi
);
7175 /* If not eliminating, make all not already available defs
7177 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_DEF
)
7178 if (! avail
.eliminate_avail (bb
, op
))
7179 avail
.eliminate_push_avail (bb
, op
);
7182 /* Eliminate in destination PHI arguments. Always substitute in dest
7183 PHIs, even for non-executable edges. This handles region
7185 if (!iterate
&& eliminate
)
7186 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7187 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
7188 !gsi_end_p (gsi
); gsi_next (&gsi
))
7190 gphi
*phi
= gsi
.phi ();
7191 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
7192 tree arg
= USE_FROM_PTR (use_p
);
7193 if (TREE_CODE (arg
) != SSA_NAME
7194 || virtual_operand_p (arg
))
7197 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
7199 sprime
= SSA_VAL (arg
);
7200 gcc_assert (TREE_CODE (sprime
) != SSA_NAME
7201 || SSA_NAME_IS_DEFAULT_DEF (sprime
));
7204 /* Look for sth available at the definition block of the argument.
7205 This avoids inconsistencies between availability there which
7206 decides if the stmt can be removed and availability at the
7207 use site. The SSA property ensures that things available
7208 at the definition are also available at uses. */
7209 sprime
= avail
.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg
)),
7213 && may_propagate_copy (arg
, sprime
))
7214 propagate_value (use_p
, sprime
);
7217 vn_context_bb
= NULL
;
7221 /* Unwind state per basic-block. */
7225 /* Times this block has been visited. */
7227 /* Whether to handle this as iteration point or whether to treat
7228 incoming backedge PHI values as varying. */
7230 /* Maximum RPO index this block is reachable from. */
7234 vn_reference_t ref_top
;
7236 vn_nary_op_t nary_top
;
7239 /* Unwind the RPO VN state for iteration. */
7242 do_unwind (unwind_state
*to
, int rpo_idx
, rpo_elim
&avail
, int *bb_to_rpo
)
7244 gcc_assert (to
->iterate
);
7245 for (; last_inserted_nary
!= to
->nary_top
;
7246 last_inserted_nary
= last_inserted_nary
->next
)
7249 slot
= valid_info
->nary
->find_slot_with_hash
7250 (last_inserted_nary
, last_inserted_nary
->hashcode
, NO_INSERT
);
7251 /* Predication causes the need to restore previous state. */
7252 if ((*slot
)->unwind_to
)
7253 *slot
= (*slot
)->unwind_to
;
7255 valid_info
->nary
->clear_slot (slot
);
7257 for (; last_inserted_phi
!= to
->phi_top
;
7258 last_inserted_phi
= last_inserted_phi
->next
)
7261 slot
= valid_info
->phis
->find_slot_with_hash
7262 (last_inserted_phi
, last_inserted_phi
->hashcode
, NO_INSERT
);
7263 valid_info
->phis
->clear_slot (slot
);
7265 for (; last_inserted_ref
!= to
->ref_top
;
7266 last_inserted_ref
= last_inserted_ref
->next
)
7268 vn_reference_t
*slot
;
7269 slot
= valid_info
->references
->find_slot_with_hash
7270 (last_inserted_ref
, last_inserted_ref
->hashcode
, NO_INSERT
);
7271 (*slot
)->operands
.release ();
7272 valid_info
->references
->clear_slot (slot
);
7274 obstack_free (&vn_tables_obstack
, to
->ob_top
);
7276 /* Prune [rpo_idx, ] from avail. */
7277 /* ??? This is O(number-of-values-in-region) which is
7278 O(region-size) rather than O(iteration-piece). */
7279 for (hash_table
<vn_ssa_aux_hasher
>::iterator i
= vn_ssa_aux_hash
->begin ();
7280 i
!= vn_ssa_aux_hash
->end (); ++i
)
7284 if (bb_to_rpo
[(*i
)->avail
->location
] < rpo_idx
)
7286 vn_avail
*av
= (*i
)->avail
;
7287 (*i
)->avail
= (*i
)->avail
->next
;
7288 av
->next
= avail
.m_avail_freelist
;
7289 avail
.m_avail_freelist
= av
;
7294 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7295 If ITERATE is true then treat backedges optimistically as not
7296 executed and iterate. If ELIMINATE is true then perform
7297 elimination, otherwise leave that to the caller. */
7300 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
7301 bool iterate
, bool eliminate
)
7305 /* We currently do not support region-based iteration when
7306 elimination is requested. */
7307 gcc_assert (!entry
|| !iterate
|| !eliminate
);
7308 /* When iterating we need loop info up-to-date. */
7309 gcc_assert (!iterate
|| !loops_state_satisfies_p (LOOPS_NEED_FIXUP
));
7311 bool do_region
= entry
!= NULL
;
7314 entry
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn
));
7315 exit_bbs
= BITMAP_ALLOC (NULL
);
7316 bitmap_set_bit (exit_bbs
, EXIT_BLOCK
);
7319 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7320 re-mark those that are contained in the region. */
7323 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
7324 e
->flags
&= ~EDGE_DFS_BACK
;
7326 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
);
7327 int n
= rev_post_order_and_mark_dfs_back_seme
7328 (fn
, entry
, exit_bbs
, !loops_state_satisfies_p (LOOPS_NEED_FIXUP
), rpo
);
7329 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
7330 for (int i
= 0; i
< n
/ 2; ++i
)
7331 std::swap (rpo
[i
], rpo
[n
-i
-1]);
7334 BITMAP_FREE (exit_bbs
);
7336 /* If there are any non-DFS_BACK edges into entry->dest skip
7337 processing PHI nodes for that block. This supports
7338 value-numbering loop bodies w/o the actual loop. */
7339 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
7341 && !(e
->flags
& EDGE_DFS_BACK
))
7343 bool skip_entry_phis
= e
!= NULL
;
7344 if (skip_entry_phis
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
7345 fprintf (dump_file
, "Region does not contain all edges into "
7346 "the entry block, skipping its PHIs.\n");
7348 int *bb_to_rpo
= XNEWVEC (int, last_basic_block_for_fn (fn
));
7349 for (int i
= 0; i
< n
; ++i
)
7350 bb_to_rpo
[rpo
[i
]] = i
;
7352 unwind_state
*rpo_state
= XNEWVEC (unwind_state
, n
);
7354 rpo_elim
avail (entry
->dest
);
7357 /* Verify we have no extra entries into the region. */
7358 if (flag_checking
&& do_region
)
7360 auto_bb_flag
bb_in_region (fn
);
7361 for (int i
= 0; i
< n
; ++i
)
7363 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7364 bb
->flags
|= bb_in_region
;
7366 /* We can't merge the first two loops because we cannot rely
7367 on EDGE_DFS_BACK for edges not within the region. But if
7368 we decide to always have the bb_in_region flag we can
7369 do the checking during the RPO walk itself (but then it's
7370 also easy to handle MEME conservatively). */
7371 for (int i
= 0; i
< n
; ++i
)
7373 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7376 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7377 gcc_assert (e
== entry
7378 || (skip_entry_phis
&& bb
== entry
->dest
)
7379 || (e
->src
->flags
& bb_in_region
));
7381 for (int i
= 0; i
< n
; ++i
)
7383 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7384 bb
->flags
&= ~bb_in_region
;
7388 /* Create the VN state. For the initial size of the various hashtables
7389 use a heuristic based on region size and number of SSA names. */
7390 unsigned region_size
= (((unsigned HOST_WIDE_INT
)n
* num_ssa_names
)
7391 / (n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
));
7392 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
7395 vn_ssa_aux_hash
= new hash_table
<vn_ssa_aux_hasher
> (region_size
* 2);
7396 gcc_obstack_init (&vn_ssa_aux_obstack
);
7398 gcc_obstack_init (&vn_tables_obstack
);
7399 gcc_obstack_init (&vn_tables_insert_obstack
);
7400 valid_info
= XCNEW (struct vn_tables_s
);
7401 allocate_vn_table (valid_info
, region_size
);
7402 last_inserted_ref
= NULL
;
7403 last_inserted_phi
= NULL
;
7404 last_inserted_nary
= NULL
;
7406 vn_valueize
= rpo_vn_valueize
;
7408 /* Initialize the unwind state and edge/BB executable state. */
7409 bool need_max_rpo_iterate
= false;
7410 for (int i
= 0; i
< n
; ++i
)
7412 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7413 rpo_state
[i
].visited
= 0;
7414 rpo_state
[i
].max_rpo
= i
;
7415 bb
->flags
&= ~BB_EXECUTABLE
;
7416 bool has_backedges
= false;
7419 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7421 if (e
->flags
& EDGE_DFS_BACK
)
7422 has_backedges
= true;
7423 e
->flags
&= ~EDGE_EXECUTABLE
;
7424 if (iterate
|| e
== entry
|| (skip_entry_phis
&& bb
== entry
->dest
))
7426 if (bb_to_rpo
[e
->src
->index
] > i
)
7428 rpo_state
[i
].max_rpo
= MAX (rpo_state
[i
].max_rpo
,
7429 bb_to_rpo
[e
->src
->index
]);
7430 need_max_rpo_iterate
= true;
7433 rpo_state
[i
].max_rpo
7434 = MAX (rpo_state
[i
].max_rpo
,
7435 rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
);
7437 rpo_state
[i
].iterate
= iterate
&& has_backedges
;
7439 entry
->flags
|= EDGE_EXECUTABLE
;
7440 entry
->dest
->flags
|= BB_EXECUTABLE
;
7442 /* When there are irreducible regions the simplistic max_rpo computation
7443 above for the case of backedges doesn't work and we need to iterate
7444 until there are no more changes. */
7446 while (need_max_rpo_iterate
)
7449 need_max_rpo_iterate
= false;
7450 for (int i
= 0; i
< n
; ++i
)
7452 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7455 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7457 if (e
== entry
|| (skip_entry_phis
&& bb
== entry
->dest
))
7459 int max_rpo
= MAX (rpo_state
[i
].max_rpo
,
7460 rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
);
7461 if (rpo_state
[i
].max_rpo
!= max_rpo
)
7463 rpo_state
[i
].max_rpo
= max_rpo
;
7464 need_max_rpo_iterate
= true;
7469 statistics_histogram_event (cfun
, "RPO max_rpo iterations", nit
);
7471 /* As heuristic to improve compile-time we handle only the N innermost
7472 loops and the outermost one optimistically. */
7476 unsigned max_depth
= param_rpo_vn_max_loop_depth
;
7477 FOR_EACH_LOOP (loop
, LI_ONLY_INNERMOST
)
7478 if (loop_depth (loop
) > max_depth
)
7479 for (unsigned i
= 2;
7480 i
< loop_depth (loop
) - max_depth
; ++i
)
7482 basic_block header
= superloop_at_depth (loop
, i
)->header
;
7483 bool non_latch_backedge
= false;
7486 FOR_EACH_EDGE (e
, ei
, header
->preds
)
7487 if (e
->flags
& EDGE_DFS_BACK
)
7489 /* There can be a non-latch backedge into the header
7490 which is part of an outer irreducible region. We
7491 cannot avoid iterating this block then. */
7492 if (!dominated_by_p (CDI_DOMINATORS
,
7495 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7496 fprintf (dump_file
, "non-latch backedge %d -> %d "
7497 "forces iteration of loop %d\n",
7498 e
->src
->index
, e
->dest
->index
, loop
->num
);
7499 non_latch_backedge
= true;
7502 e
->flags
|= EDGE_EXECUTABLE
;
7504 rpo_state
[bb_to_rpo
[header
->index
]].iterate
= non_latch_backedge
;
7511 /* Go and process all blocks, iterating as necessary. */
7514 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
7516 /* If the block has incoming backedges remember unwind state. This
7517 is required even for non-executable blocks since in irreducible
7518 regions we might reach them via the backedge and re-start iterating
7520 Note we can individually mark blocks with incoming backedges to
7521 not iterate where we then handle PHIs conservatively. We do that
7522 heuristically to reduce compile-time for degenerate cases. */
7523 if (rpo_state
[idx
].iterate
)
7525 rpo_state
[idx
].ob_top
= obstack_alloc (&vn_tables_obstack
, 0);
7526 rpo_state
[idx
].ref_top
= last_inserted_ref
;
7527 rpo_state
[idx
].phi_top
= last_inserted_phi
;
7528 rpo_state
[idx
].nary_top
= last_inserted_nary
;
7531 if (!(bb
->flags
& BB_EXECUTABLE
))
7533 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7534 fprintf (dump_file
, "Block %d: BB%d found not executable\n",
7540 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7541 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
7543 todo
|= process_bb (avail
, bb
,
7544 rpo_state
[idx
].visited
!= 0,
7545 rpo_state
[idx
].iterate
,
7546 iterate
, eliminate
, do_region
, exit_bbs
, false);
7547 rpo_state
[idx
].visited
++;
7549 /* Verify if changed values flow over executable outgoing backedges
7550 and those change destination PHI values (that's the thing we
7551 can easily verify). Reduce over all such edges to the farthest
7553 int iterate_to
= -1;
7556 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7557 if ((e
->flags
& (EDGE_DFS_BACK
|EDGE_EXECUTABLE
))
7558 == (EDGE_DFS_BACK
|EDGE_EXECUTABLE
)
7559 && rpo_state
[bb_to_rpo
[e
->dest
->index
]].iterate
)
7561 int destidx
= bb_to_rpo
[e
->dest
->index
];
7562 if (!rpo_state
[destidx
].visited
)
7564 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7565 fprintf (dump_file
, "Unvisited destination %d\n",
7567 if (iterate_to
== -1 || destidx
< iterate_to
)
7568 iterate_to
= destidx
;
7571 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7572 fprintf (dump_file
, "Looking for changed values of backedge"
7573 " %d->%d destination PHIs\n",
7574 e
->src
->index
, e
->dest
->index
);
7575 vn_context_bb
= e
->dest
;
7577 for (gsi
= gsi_start_phis (e
->dest
);
7578 !gsi_end_p (gsi
); gsi_next (&gsi
))
7580 bool inserted
= false;
7581 /* While we'd ideally just iterate on value changes
7582 we CSE PHIs and do that even across basic-block
7583 boundaries. So even hashtable state changes can
7584 be important (which is roughly equivalent to
7585 PHI argument value changes). To not excessively
7586 iterate because of that we track whether a PHI
7587 was CSEd to with GF_PLF_1. */
7588 bool phival_changed
;
7589 if ((phival_changed
= visit_phi (gsi
.phi (),
7591 || (inserted
&& gimple_plf (gsi
.phi (), GF_PLF_1
)))
7594 && dump_file
&& (dump_flags
& TDF_DETAILS
))
7595 fprintf (dump_file
, "PHI was CSEd and hashtable "
7596 "state (changed)\n");
7597 if (iterate_to
== -1 || destidx
< iterate_to
)
7598 iterate_to
= destidx
;
7602 vn_context_bb
= NULL
;
7604 if (iterate_to
!= -1)
7606 do_unwind (&rpo_state
[iterate_to
], iterate_to
, avail
, bb_to_rpo
);
7608 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7609 fprintf (dump_file
, "Iterating to %d BB%d\n",
7610 iterate_to
, rpo
[iterate_to
]);
7620 /* Process all blocks greedily with a worklist that enforces RPO
7621 processing of reachable blocks. */
7622 auto_bitmap worklist
;
7623 bitmap_set_bit (worklist
, 0);
7624 while (!bitmap_empty_p (worklist
))
7626 int idx
= bitmap_first_set_bit (worklist
);
7627 bitmap_clear_bit (worklist
, idx
);
7628 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
7629 gcc_assert ((bb
->flags
& BB_EXECUTABLE
)
7630 && !rpo_state
[idx
].visited
);
7632 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7633 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
7635 /* When we run into predecessor edges where we cannot trust its
7636 executable state mark them executable so PHI processing will
7638 ??? Do we need to force arguments flowing over that edge
7639 to be varying or will they even always be? */
7642 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7643 if (!(e
->flags
& EDGE_EXECUTABLE
)
7644 && (bb
== entry
->dest
7645 || (!rpo_state
[bb_to_rpo
[e
->src
->index
]].visited
7646 && (rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
7649 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7650 fprintf (dump_file
, "Cannot trust state of predecessor "
7651 "edge %d -> %d, marking executable\n",
7652 e
->src
->index
, e
->dest
->index
);
7653 e
->flags
|= EDGE_EXECUTABLE
;
7657 todo
|= process_bb (avail
, bb
, false, false, false, eliminate
,
7658 do_region
, exit_bbs
,
7659 skip_entry_phis
&& bb
== entry
->dest
);
7660 rpo_state
[idx
].visited
++;
7662 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7663 if ((e
->flags
& EDGE_EXECUTABLE
)
7664 && e
->dest
->index
!= EXIT_BLOCK
7665 && (!do_region
|| !bitmap_bit_p (exit_bbs
, e
->dest
->index
))
7666 && !rpo_state
[bb_to_rpo
[e
->dest
->index
]].visited
)
7667 bitmap_set_bit (worklist
, bb_to_rpo
[e
->dest
->index
]);
7671 /* If statistics or dump file active. */
7673 unsigned max_visited
= 1;
7674 for (int i
= 0; i
< n
; ++i
)
7676 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7677 if (bb
->flags
& BB_EXECUTABLE
)
7679 statistics_histogram_event (cfun
, "RPO block visited times",
7680 rpo_state
[i
].visited
);
7681 if (rpo_state
[i
].visited
> max_visited
)
7682 max_visited
= rpo_state
[i
].visited
;
7684 unsigned nvalues
= 0, navail
= 0;
7685 for (hash_table
<vn_ssa_aux_hasher
>::iterator i
= vn_ssa_aux_hash
->begin ();
7686 i
!= vn_ssa_aux_hash
->end (); ++i
)
7689 vn_avail
*av
= (*i
)->avail
;
7696 statistics_counter_event (cfun
, "RPO blocks", n
);
7697 statistics_counter_event (cfun
, "RPO blocks visited", nblk
);
7698 statistics_counter_event (cfun
, "RPO blocks executable", nex
);
7699 statistics_histogram_event (cfun
, "RPO iterations", 10*nblk
/ nex
);
7700 statistics_histogram_event (cfun
, "RPO num values", nvalues
);
7701 statistics_histogram_event (cfun
, "RPO num avail", navail
);
7702 statistics_histogram_event (cfun
, "RPO num lattice",
7703 vn_ssa_aux_hash
->elements ());
7704 if (dump_file
&& (dump_flags
& (TDF_DETAILS
|TDF_STATS
)))
7706 fprintf (dump_file
, "RPO iteration over %d blocks visited %" PRIu64
7707 " blocks in total discovering %d executable blocks iterating "
7708 "%d.%d times, a block was visited max. %u times\n",
7710 (int)((10*nblk
/ nex
)/10), (int)((10*nblk
/ nex
)%10),
7712 fprintf (dump_file
, "RPO tracked %d values available at %d locations "
7713 "and %" PRIu64
" lattice elements\n",
7714 nvalues
, navail
, (uint64_t) vn_ssa_aux_hash
->elements ());
7719 /* When !iterate we already performed elimination during the RPO
7723 /* Elimination for region-based VN needs to be done within the
7725 gcc_assert (! do_region
);
7726 /* Note we can't use avail.walk here because that gets confused
7727 by the existing availability and it will be less efficient
7729 todo
|= eliminate_with_rpo_vn (NULL
);
7732 todo
|= avail
.eliminate_cleanup (do_region
);
7738 XDELETEVEC (bb_to_rpo
);
7740 XDELETEVEC (rpo_state
);
7745 /* Region-based entry for RPO VN. Performs value-numbering and elimination
7746 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
7747 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7748 are not considered. */
7751 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
)
7753 default_vn_walk_kind
= VN_WALKREWRITE
;
7754 unsigned todo
= do_rpo_vn (fn
, entry
, exit_bbs
, false, true);
7762 const pass_data pass_data_fre
=
7764 GIMPLE_PASS
, /* type */
7766 OPTGROUP_NONE
, /* optinfo_flags */
7767 TV_TREE_FRE
, /* tv_id */
7768 ( PROP_cfg
| PROP_ssa
), /* properties_required */
7769 0, /* properties_provided */
7770 0, /* properties_destroyed */
7771 0, /* todo_flags_start */
7772 0, /* todo_flags_finish */
7775 class pass_fre
: public gimple_opt_pass
7778 pass_fre (gcc::context
*ctxt
)
7779 : gimple_opt_pass (pass_data_fre
, ctxt
), may_iterate (true)
7782 /* opt_pass methods: */
7783 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
7784 void set_pass_param (unsigned int n
, bool param
)
7786 gcc_assert (n
== 0);
7787 may_iterate
= param
;
7789 virtual bool gate (function
*)
7791 return flag_tree_fre
!= 0 && (may_iterate
|| optimize
> 1);
7793 virtual unsigned int execute (function
*);
7797 }; // class pass_fre
7800 pass_fre::execute (function
*fun
)
7804 /* At -O[1g] use the cheap non-iterating mode. */
7805 bool iterate_p
= may_iterate
&& (optimize
> 1);
7806 calculate_dominance_info (CDI_DOMINATORS
);
7808 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
7810 default_vn_walk_kind
= VN_WALKREWRITE
;
7811 todo
= do_rpo_vn (fun
, NULL
, NULL
, iterate_p
, true);
7815 loop_optimizer_finalize ();
7817 /* For late FRE after IVOPTs and unrolling, see if we can
7818 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
7820 todo
|= TODO_update_address_taken
;
7828 make_pass_fre (gcc::context
*ctxt
)
7830 return new pass_fre (ctxt
);
7833 #undef BB_EXECUTABLE