]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
tree-ssa-pre.c: Include alias.h.
authorRichard Biener <rguenther@suse.de>
Thu, 7 Jul 2016 07:43:35 +0000 (07:43 +0000)
committerRichard Biener <rguenth@gcc.gnu.org>
Thu, 7 Jul 2016 07:43:35 +0000 (07:43 +0000)
2016-07-07  Richard Biener  <rguenther@suse.de>

* tree-ssa-pre.c: Include alias.h.
(compute_avail): If we have multiple VN_REFERENCEs with the
same hashtable entry adjust that to make it a valid replacement
for all of them with respect to alignment and aliasing
when doing insertion.
* tree-ssa-sccvn.h (vn_reference_operands_for_lookup): Declare.
* tree-ssa-sccvn.c (vn_reference_operands_for_lookup): New function.

From-SVN: r238078

gcc/ChangeLog
gcc/tree-ssa-pre.c
gcc/tree-ssa-sccvn.c
gcc/tree-ssa-sccvn.h

index cbb4dcf49365a16d03d96a39ac88c65a58ed0d3f..4fa2c4dcee4d1414df8cc5735ed90768cb9c24d1 100644 (file)
@@ -1,3 +1,13 @@
+2016-07-07  Richard Biener  <rguenther@suse.de>
+
+       * tree-ssa-pre.c: Include alias.h.
+       (compute_avail): If we have multiple VN_REFERENCEs with the
+       same hashtable entry adjust that to make it a valid replacement
+       for all of them with respect to alignment and aliasing
+       when doing insertion.
+       * tree-ssa-sccvn.h (vn_reference_operands_for_lookup): Declare.
+       * tree-ssa-sccvn.c (vn_reference_operands_for_lookup): New function.
+
 2016-07-06  Segher Boessenkool  <segher@kernel.crashing.org>
 
        PR target/70098
index 3ce87d9d23f4865541c360b996cb79e475c2d193..0c97f4fbcce68ffd2efb3b66f605bb58087d9888 100644 (file)
@@ -53,6 +53,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "ipa-utils.h"
 #include "tree-cfgcleanup.h"
 #include "langhooks.h"
+#include "alias.h"
 
 /* TODO:
 
@@ -3724,12 +3725,19 @@ compute_avail (void)
 
                  case VN_REFERENCE:
                    {
+                     tree rhs1 = gimple_assign_rhs1 (stmt);
+                     alias_set_type set = get_alias_set (rhs1);
+                     vec<vn_reference_op_s> operands
+                       = vn_reference_operands_for_lookup (rhs1);
                      vn_reference_t ref;
-                     vn_reference_lookup (gimple_assign_rhs1 (stmt),
-                                          gimple_vuse (stmt),
-                                          VN_WALK, &ref, true);
+                     vn_reference_lookup_pieces (gimple_vuse (stmt), set,
+                                                 TREE_TYPE (rhs1),
+                                                 operands, &ref, VN_WALK);
                      if (!ref)
-                       continue;
+                       {
+                         operands.release ();
+                         continue;
+                       }
 
                      /* If the value of the reference is not invalidated in
                         this block until it is computed, add the expression
@@ -3753,7 +3761,68 @@ compute_avail (void)
                                = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
                            }
                          if (!ok)
-                           continue;
+                           {
+                             operands.release ();
+                             continue;
+                           }
+                       }
+
+                     /* If the load was value-numbered to another
+                        load make sure we do not use its expression
+                        for insertion if it wouldn't be a valid
+                        replacement.  */
+                     /* At the momemt we have a testcase
+                        for hoist insertion of aligned vs. misaligned
+                        variants in gcc.dg/torture/pr65270-1.c thus
+                        with just alignment to be considered we can
+                        simply replace the expression in the hashtable
+                        with the most conservative one.  */
+                     vn_reference_op_t ref1 = &ref->operands.last ();
+                     while (ref1->opcode != TARGET_MEM_REF
+                            && ref1->opcode != MEM_REF
+                            && ref1 != &ref->operands[0])
+                       --ref1;
+                     vn_reference_op_t ref2 = &operands.last ();
+                     while (ref2->opcode != TARGET_MEM_REF
+                            && ref2->opcode != MEM_REF
+                            && ref2 != &operands[0])
+                       --ref2;
+                     if ((ref1->opcode == TARGET_MEM_REF
+                          || ref1->opcode == MEM_REF)
+                         && (TYPE_ALIGN (ref1->type)
+                             > TYPE_ALIGN (ref2->type)))
+                       {
+                         ref->operands.release ();
+                         ref->operands = operands;
+                         ref1 = ref2;
+                       }
+                     else
+                       operands.release ();
+                     /* TBAA behavior is an obvious part so make sure
+                        that the hashtable one covers this as well
+                        by adjusting the ref alias set and its base.  */
+                     if (ref->set == set
+                         || alias_set_subset_of (set, ref->set))
+                       ;
+                     else if (alias_set_subset_of (ref->set, set))
+                       {
+                         ref->set = set;
+                         if (ref1->opcode == MEM_REF)
+                           ref1->op0 = fold_convert (TREE_TYPE (ref2->op0),
+                                                     ref1->op0);
+                         else
+                           ref1->op2 = fold_convert (TREE_TYPE (ref2->op2),
+                                                     ref1->op2);
+                       }
+                     else
+                       {
+                         ref->set = 0;
+                         if (ref1->opcode == MEM_REF)
+                           ref1->op0 = fold_convert (ptr_type_node,
+                                                     ref1->op0);
+                         else
+                           ref1->op2 = fold_convert (ptr_type_node,
+                                                     ref1->op2);
                        }
 
                      result = pre_expr_pool.allocate ();
index 0cbd2cd56f24bba09f95fe9d6ae50a62535cca6d..e9e18526a0b36aff01b1281b531c6c901657af61 100644 (file)
@@ -2285,6 +2285,17 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
   return (void *)-1;
 }
 
+/* Return a reference op vector from OP that can be used for
+   vn_reference_lookup_pieces.  The caller is responsible for releasing
+   the vector.  */
+
+vec<vn_reference_op_s>
+vn_reference_operands_for_lookup (tree op)
+{
+  bool valueized;
+  return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
+}
+
 /* Lookup a reference operation by it's parts, in the current hash table.
    Returns the resulting value number if it exists in the hash table,
    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
index 1f6af4043faf02ff21f09fa6f2b9924d4c5b44eb..069590a0b01c2c2469daf369e8872017dd21a2bb 100644 (file)
@@ -214,6 +214,7 @@ vn_nary_op_t vn_nary_op_insert_pieces (unsigned int, enum tree_code,
                                       tree, tree *, tree, unsigned int);
 bool ao_ref_init_from_vn_reference (ao_ref *, alias_set_type, tree,
                                    vec<vn_reference_op_s> );
+vec<vn_reference_op_s> vn_reference_operands_for_lookup (tree);
 tree vn_reference_lookup_pieces (tree, alias_set_type, tree,
                                 vec<vn_reference_op_s> ,
                                 vn_reference_t *, vn_lookup_kind);