]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
backport: [multiple changes]
authorRichard Biener <rguenther@suse.de>
Fri, 17 May 2019 08:10:58 +0000 (08:10 +0000)
committerRichard Biener <rguenth@gcc.gnu.org>
Fri, 17 May 2019 08:10:58 +0000 (08:10 +0000)
2019-05-17  Richard Biener  <rguenther@suse.de>

Backport from mainline
2019-05-07  Richard Biener  <rguenther@suse.de>

PR tree-optimization/90316
* tree-ssa-alias.h (get_continuation_for_phi): Take walking
limit by reference.
(walk_non_aliased_vuses): Take walking limit argument.
* tree-ssa-alias.c (maybe_skip_until): Take limit and abort
walking if it is reached instead of just counting.
(get_continuation_for_phi): Likewise.
(walk_non_aliased_vuses): Likewise, instead of leaving counter
limiting to the callback.
* tree-ssa-sccvn.c (vn_reference_lookup_2): Adjust.
(vn_reference_lookup_3): Likewise.
(vn_reference_lookup_pieces): Likewise.
(vn_reference_lookup): Likewise.
* tree-ssa-pre.c (translate_vuse_through_block): Limit walking.
* tree-ssa-scopedtables.c (vuse_eq): Adjust.
(avail_exprs_stack::lookup_avail_expr): Likewise.

2019-05-06  Richard Biener  <rguenther@suse.de>

PR tree-optimization/90316
* tree-ssa-alias.c (maybe_skip_until): Pass in target BB,
compute target on demand.
(get_continuation_for_phi): Remove code walking stmts to
get to a target virtual operand which could end up being
quadratic.

From-SVN: r271314

gcc/ChangeLog
gcc/tree-ssa-alias.c
gcc/tree-ssa-alias.h
gcc/tree-ssa-pre.c
gcc/tree-ssa-sccvn.c
gcc/tree-ssa-scopedtables.c

index 20a80ecab263544464a3e37ff117ad8cbdc1af29..85a51841830e612c4c0147df95ef50903f3628aa 100644 (file)
@@ -1,3 +1,34 @@
+2019-05-17  Richard Biener  <rguenther@suse.de>
+
+       Backport from mainline
+       2019-05-07  Richard Biener  <rguenther@suse.de>
+
+       PR tree-optimization/90316
+       * tree-ssa-alias.h (get_continuation_for_phi): Take walking
+       limit by reference.
+       (walk_non_aliased_vuses): Take walking limit argument.
+       * tree-ssa-alias.c (maybe_skip_until): Take limit and abort
+       walking if it is reached instead of just counting.
+       (get_continuation_for_phi): Likewise.
+       (walk_non_aliased_vuses): Likewise, instead of leaving counter
+       limiting to the callback.
+       * tree-ssa-sccvn.c (vn_reference_lookup_2): Adjust.
+       (vn_reference_lookup_3): Likewise.
+       (vn_reference_lookup_pieces): Likewise.
+       (vn_reference_lookup): Likewise.
+       * tree-ssa-pre.c (translate_vuse_through_block): Limit walking.
+       * tree-ssa-scopedtables.c (vuse_eq): Adjust.
+       (avail_exprs_stack::lookup_avail_expr): Likewise.
+
+       2019-05-06  Richard Biener  <rguenther@suse.de>
+
+       PR tree-optimization/90316
+       * tree-ssa-alias.c (maybe_skip_until): Pass in target BB,
+       compute target on demand.
+       (get_continuation_for_phi): Remove code walking stmts to
+       get to a target virtual operand which could end up being
+       quadratic.
+
 2019-05-15  Li Jia He  <helijia@linux.ibm.com>
 
        Backport from mainline.
index c0f67d1e17ab209d49f86c8c1780e4b3ebcf7743..aa0398bf2485a5a33a1e3e125c47e748c24a8e9f 100644 (file)
@@ -2598,8 +2598,8 @@ stmt_kills_ref_p (gimple *stmt, tree ref)
    case false is returned.  The walk starts with VUSE, one argument of PHI.  */
 
 static bool
-maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
-                 tree vuse, unsigned int *cnt, bitmap *visited,
+maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
+                 ao_ref *ref, tree vuse, unsigned int &limit, bitmap *visited,
                  bool abort_on_visited,
                  void *(*translate)(ao_ref *, tree, void *, bool *),
                  void *data)
@@ -2615,13 +2615,26 @@ maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
   while (vuse != target)
     {
       gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
+      /* If we are searching for the target VUSE by walking up to
+         TARGET_BB dominating the original PHI we are finished once
+        we reach a default def or a definition in a block dominating
+        that block.  Update TARGET and return.  */
+      if (!target
+         && (gimple_nop_p (def_stmt)
+             || dominated_by_p (CDI_DOMINATORS,
+                                target_bb, gimple_bb (def_stmt))))
+       {
+         target = vuse;
+         return true;
+       }
+
       /* Recurse for PHI nodes.  */
       if (gimple_code (def_stmt) == GIMPLE_PHI)
        {
          /* An already visited PHI node ends the walk successfully.  */
          if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
            return !abort_on_visited;
-         vuse = get_continuation_for_phi (def_stmt, ref, cnt,
+         vuse = get_continuation_for_phi (def_stmt, ref, limit,
                                           visited, abort_on_visited,
                                           translate, data);
          if (!vuse)
@@ -2633,7 +2646,9 @@ maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
       else
        {
          /* A clobbering statement or the end of the IL ends it failing.  */
-         ++*cnt;
+         if ((int)limit <= 0)
+           return false;
+         --limit;
          if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
            {
              bool disambiguate_only = true;
@@ -2661,12 +2676,13 @@ maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
 /* Starting from a PHI node for the virtual operand of the memory reference
    REF find a continuation virtual operand that allows to continue walking
    statements dominating PHI skipping only statements that cannot possibly
-   clobber REF.  Increments *CNT for each alias disambiguation done.
+   clobber REF.  Decrements LIMIT for each alias disambiguation done
+   and aborts the walk, returning NULL_TREE if it reaches zero.
    Returns NULL_TREE if no suitable virtual operand can be found.  */
 
 tree
 get_continuation_for_phi (gimple *phi, ao_ref *ref,
-                         unsigned int *cnt, bitmap *visited,
+                         unsigned int &limit, bitmap *visited,
                          bool abort_on_visited,
                          void *(*translate)(ao_ref *, tree, void *, bool *),
                          void *data)
@@ -2698,49 +2714,17 @@ get_continuation_for_phi (gimple *phi, ao_ref *ref,
       arg0 = NULL_TREE;
     }
   /* If not, look if we can reach such candidate by walking defs
-     of a PHI arg without crossing other PHIs.  */
-  if (! arg0)
-    for (i = 0; i < nargs; ++i)
-      {
-       arg0 = PHI_ARG_DEF (phi, i);
-       gimple *def = SSA_NAME_DEF_STMT (arg0);
-       /* Backedges can't work.  */
-       if (dominated_by_p (CDI_DOMINATORS,
-                           gimple_bb (def), phi_bb))
-         continue;
-       /* See below.  */
-       if (gimple_code (def) == GIMPLE_PHI)
-         continue;
-       while (! dominated_by_p (CDI_DOMINATORS,
-                                phi_bb, gimple_bb (def)))
-         {
-           arg0 = gimple_vuse (def);
-           if (SSA_NAME_IS_DEFAULT_DEF (arg0))
-             break;
-           def = SSA_NAME_DEF_STMT (arg0);
-           if (gimple_code (def) == GIMPLE_PHI)
-             {
-               /* Do not try to look through arbitrarily complicated
-                  CFGs.  For those looking for the first VUSE starting
-                  from the end of the immediate dominator of phi_bb
-                  is likely faster.  */
-               arg0 = NULL_TREE;
-               goto next;
-             }
-         }
-       break;
-next:;
-      }
-  if (! arg0)
-    return NULL_TREE;
+     until we hit the immediate dominator.  maybe_skip_until will
+     do that for us.  */
+  basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
 
-  /* Then check against the found candidate.  */
+  /* Then check against the (to be) found candidate.  */
   for (i = 0; i < nargs; ++i)
     {
       arg1 = PHI_ARG_DEF (phi, i);
       if (arg1 == arg0)
        ;
-      else if (! maybe_skip_until (phi, arg0, ref, arg1, cnt, visited,
+      else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, limit, visited,
                                   abort_on_visited,
                                   /* Do not translate when walking over
                                      backedges.  */
@@ -2776,18 +2760,22 @@ next:;
    implement optimistic value-numbering for example.  Note that the
    VUSE argument is assumed to be valueized already.
 
+   LIMIT specifies the number of alias queries we are allowed to do,
+   the walk stops when it reaches zero and NULL is returned.  LIMIT
+   is decremented by the number of alias queries (plus adjustments
+   done by the callbacks) upon return.
+
    TODO: Cache the vector of equivalent vuses per ref, vuse pair.  */
 
 void *
 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
-                       void *(*walker)(ao_ref *, tree, unsigned int, void *),
+                       void *(*walker)(ao_ref *, tree, void *),
                        void *(*translate)(ao_ref *, tree, void *, bool *),
                        tree (*valueize)(tree),
-                       void *data)
+                       unsigned &limit, void *data)
 {
   bitmap visited = NULL;
   void *res;
-  unsigned int cnt = 0;
   bool translated = false;
 
   timevar_push (TV_ALIAS_STMT_WALK);
@@ -2797,7 +2785,7 @@ walk_non_aliased_vuses (ao_ref *ref, tree vuse,
       gimple *def_stmt;
 
       /* ???  Do we want to account this to TV_ALIAS_STMT_WALK?  */
-      res = (*walker) (ref, vuse, cnt, data);
+      res = (*walker) (ref, vuse, data);
       /* Abort walk.  */
       if (res == (void *)-1)
        {
@@ -2821,11 +2809,15 @@ walk_non_aliased_vuses (ao_ref *ref, tree vuse,
       if (gimple_nop_p (def_stmt))
        break;
       else if (gimple_code (def_stmt) == GIMPLE_PHI)
-       vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
+       vuse = get_continuation_for_phi (def_stmt, ref, limit,
                                         &visited, translated, translate, data);
       else
        {
-         cnt++;
+         if ((int)limit <= 0)
+           {
+             res = NULL;
+             break;
+           }
          if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
            {
              if (!translate)
index a5293cdea89dc75030982b2291812cddca8fc9eb..cee844973d9c960733b8093b6e7fa25c2545d333 100644 (file)
@@ -132,15 +132,13 @@ extern bool call_may_clobber_ref_p_1 (gcall *, ao_ref *);
 extern bool stmt_kills_ref_p (gimple *, tree);
 extern bool stmt_kills_ref_p (gimple *, ao_ref *);
 extern tree get_continuation_for_phi (gimple *, ao_ref *,
-                                     unsigned int *, bitmap *, bool,
+                                     unsigned int &, bitmap *, bool,
                                      void *(*)(ao_ref *, tree, void *, bool *),
                                      void *);
 extern void *walk_non_aliased_vuses (ao_ref *, tree,
-                                    void *(*)(ao_ref *, tree,
-                                              unsigned int, void *),
+                                    void *(*)(ao_ref *, tree, void *),
                                     void *(*)(ao_ref *, tree, void *, bool *),
-                                    tree (*)(tree),
-                                    void *);
+                                    tree (*)(tree), unsigned &, void *);
 extern int walk_aliased_vdefs (ao_ref *, tree,
                               bool (*)(ao_ref *, tree, void *),
                               void *, bitmap *,
index e1c75f8e90642e9190907734412cc40ef95859de..646feb6085f0b07949a9a150039ba06b117a2ff4 100644 (file)
@@ -1151,6 +1151,7 @@ translate_vuse_through_block (vec<vn_reference_op_s> operands,
   if (gimple_bb (phi) != phiblock)
     return vuse;
 
+  unsigned int cnt = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
   use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
 
   /* Use the alias-oracle to find either the PHI node in this block,
@@ -1159,8 +1160,10 @@ translate_vuse_through_block (vec<vn_reference_op_s> operands,
   if (gimple_code (phi) == GIMPLE_PHI)
     e = find_edge (block, phiblock);
   else if (use_oracle)
-    while (!stmt_may_clobber_ref_p_1 (phi, &ref))
+    while (cnt > 0
+          && !stmt_may_clobber_ref_p_1 (phi, &ref))
       {
+       --cnt;
        vuse = gimple_vuse (phi);
        phi = SSA_NAME_DEF_STMT (vuse);
        if (gimple_bb (phi) != phiblock)
@@ -1179,10 +1182,9 @@ translate_vuse_through_block (vec<vn_reference_op_s> operands,
       if (use_oracle)
        {
          bitmap visited = NULL;
-         unsigned int cnt;
          /* Try to find a vuse that dominates this phi node by skipping
             non-clobbering statements.  */
-         vuse = get_continuation_for_phi (phi, &ref, &cnt, &visited, false,
+         vuse = get_continuation_for_phi (phi, &ref, cnt, &visited, false,
                                           NULL, NULL);
          if (visited)
            BITMAP_FREE (visited);
index 9d51573920ed8661b075dc6b355865c4bd0ef45b..c3ca49bd8678d338a355ccc7bc1b6bd3dec4b2d5 100644 (file)
@@ -1671,19 +1671,12 @@ vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
    with the current VUSE and performs the expression lookup.  */
 
 static void *
-vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
-                      unsigned int cnt, void *vr_)
+vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
 {
   vn_reference_t vr = (vn_reference_t)vr_;
   vn_reference_s **slot;
   hashval_t hash;
 
-  /* This bounds the stmt walks we perform on reference lookups
-     to O(1) instead of O(N) where N is the number of dominating
-     stores.  */
-  if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
-    return (void *)-1;
-
   if (last_vuse_ptr)
     *last_vuse_ptr = vuse;
 
@@ -2023,8 +2016,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
          last_vuse_ptr = NULL;
          tree saved_vuse = vr->vuse;
          hashval_t saved_hashcode = vr->hashcode;
-         void *res = vn_reference_lookup_2 (ref,
-                                            gimple_vuse (def_stmt), 0, vr);
+         void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), vr);
          /* Need to restore vr->vuse and vr->hashcode.  */
          vr->vuse = saved_vuse;
          vr->hashcode = saved_hashcode;
@@ -2671,13 +2663,14 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
       && vr1.vuse)
     {
       ao_ref r;
+      unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
       vn_walk_kind = kind;
       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
        *vnresult =
          (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
                                                  vn_reference_lookup_2,
                                                  vn_reference_lookup_3,
-                                                 vuse_valueize, &vr1);
+                                                 vuse_valueize, limit, &vr1);
       gcc_checking_assert (vr1.operands == shared_lookup_references);
     }
 
@@ -2720,6 +2713,7 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
     {
       vn_reference_t wvnresult;
       ao_ref r;
+      unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
       /* Make sure to use a valueized reference if we valueized anything.
          Otherwise preserve the full reference for advanced TBAA.  */
       if (!valuezied_anything
@@ -2733,7 +2727,7 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
        (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
                                                vn_reference_lookup_2,
                                                vn_reference_lookup_3,
-                                               vuse_valueize, &vr1);
+                                               vuse_valueize, limit, &vr1);
       gcc_checking_assert (vr1.operands == shared_lookup_references);
       if (wvnresult)
        {
index 2f3ba18d9852b84e816f3e8c3b1b2edbc7302d41..0614afc3be8dbc520d76014b8fbed79542f11d20 100644 (file)
@@ -100,19 +100,12 @@ avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
    the desired memory state.  */
 
 static void *
-vuse_eq (ao_ref *, tree vuse1, unsigned int cnt, void *data)
+vuse_eq (ao_ref *, tree vuse1, void *data)
 {
   tree vuse2 = (tree) data;
   if (vuse1 == vuse2)
     return data;
 
-  /* This bounds the stmt walks we perform on reference lookups
-     to O(1) instead of O(N) where N is the number of dominating
-     stores leading to a candidate.  We re-use the SCCVN param
-     for this as it is basically the same complexity.  */
-  if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
-    return (void *)-1;
-
   return NULL;
 }
 
@@ -299,13 +292,14 @@ avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
         up the virtual use-def chain using walk_non_aliased_vuses.
         But don't do this when removing expressions from the hash.  */
       ao_ref ref;
+      unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
       if (!(vuse1 && vuse2
            && gimple_assign_single_p (stmt)
            && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
            && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
                ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
-           && walk_non_aliased_vuses (&ref, vuse2,
-                                      vuse_eq, NULL, NULL, vuse1) != NULL))
+           && walk_non_aliased_vuses (&ref, vuse2, vuse_eq, NULL, NULL,
+                                      limit, vuse1) != NULL))
        {
          if (insert)
            {