]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
re PR tree-optimization/90911 (456.hmmer regression with r272239)
authorRichard Biener <rguenther@suse.de>
Thu, 4 Jul 2019 13:55:15 +0000 (13:55 +0000)
committerRichard Biener <rguenth@gcc.gnu.org>
Thu, 4 Jul 2019 13:55:15 +0000 (13:55 +0000)
2019-07-04  Richard Biener  <rguenther@suse.de>

PR tree-optimization/90911
* tree-vectorizer.h (_loop_vec_info::scalar_loop_scaling): New field.
(LOOP_VINFO_SCALAR_LOOP_SCALING): new.
* tree-vect-loop.c (_loop_vec_info::_loop_vec_info): Initialize
scalar_loop_scaling.
(vect_transform_loop): Scale scalar loop profile if needed.
* tree-vect-loop-manip.c (vect_loop_versioning): When re-using
the loop copy from if-conversion adjust edge probabilities
and scale the vectorized loop body profile, queue the scalar
profile for updating after peeling.

From-SVN: r273082

gcc/ChangeLog
gcc/tree-vect-loop-manip.c
gcc/tree-vect-loop.c
gcc/tree-vectorizer.h

index 5f889096dac2bf4ec3d2c2e9bdda274cdbd0aced..591e2972a16984a8e49b2fbe8ed089fda8a2cb3c 100644 (file)
@@ -1,3 +1,16 @@
+2019-07-04  Richard Biener  <rguenther@suse.de>
+
+       PR tree-optimization/90911
+       * tree-vectorizer.h (_loop_vec_info::scalar_loop_scaling): New field.
+       (LOOP_VINFO_SCALAR_LOOP_SCALING): new.
+       * tree-vect-loop.c (_loop_vec_info::_loop_vec_info): Initialize
+       scalar_loop_scaling.
+       (vect_transform_loop): Scale scalar loop profile if needed.
+       * tree-vect-loop-manip.c (vect_loop_versioning): When re-using
+       the loop copy from if-conversion adjust edge probabilities
+       and scale the vectorized loop body profile, queue the scalar
+       profile for updating after peeling.
+
 2019-07-04  Jan Hubicka  <jh@suse.cz>
 
        * tree-ssa-alias.c (decl_refs_may_alias_p): Add size1 and size2
index c5cabc8633b6d3e293046256d284366f491672b1..bd8fffb1704787d0a611fc02ee29054422596cbb 100644 (file)
@@ -3114,8 +3114,17 @@ vect_loop_versioning (loop_vec_info loop_vinfo,
                                 GSI_SAME_STMT);
        }
 
-      /* ???  if-conversion uses profile_probability::always () but
-         prob below is profile_probability::likely ().  */
+      /* if-conversion uses profile_probability::always () for both paths,
+        reset the paths probabilities appropriately.  */
+      edge te, fe;
+      extract_true_false_edges_from_block (condition_bb, &te, &fe);
+      te->probability = prob;
+      fe->probability = prob.invert ();
+      /* We can scale loops counts immediately but have to postpone
+         scaling the scalar loop because we re-use it during peeling.  */
+      scale_loop_frequencies (loop_to_version, te->probability);
+      LOOP_VINFO_SCALAR_LOOP_SCALING (loop_vinfo) = fe->probability;
+
       nloop = scalar_loop;
       if (dump_enabled_p ())
        dump_printf_loc (MSG_NOTE, vect_location,
index c46cd9d56f68208d3380dd1295a24c965fe1be83..b49ab152012a5c7fe9cc0564e58d296447f9ffb1 100644 (file)
@@ -833,6 +833,7 @@ _loop_vec_info::_loop_vec_info (struct loop *loop_in, vec_info_shared *shared)
     operands_swapped (false),
     no_data_dependencies (false),
     has_mask_store (false),
+    scalar_loop_scaling (profile_probability::uninitialized ()),
     scalar_loop (NULL),
     orig_loop_info (NULL)
 {
@@ -8557,6 +8558,10 @@ vect_transform_loop (loop_vec_info loop_vinfo)
   epilogue = vect_do_peeling (loop_vinfo, niters, nitersm1, &niters_vector,
                              &step_vector, &niters_vector_mult_vf, th,
                              check_profitability, niters_no_overflow);
+  if (LOOP_VINFO_SCALAR_LOOP (loop_vinfo)
+      && LOOP_VINFO_SCALAR_LOOP_SCALING (loop_vinfo).initialized_p ())
+    scale_loop_frequencies (LOOP_VINFO_SCALAR_LOOP (loop_vinfo),
+                           LOOP_VINFO_SCALAR_LOOP_SCALING (loop_vinfo));
 
   if (niters_vector == NULL_TREE)
     {
index 0dd29f851207439eb0d476fbbbd3c5ce1e5ca90e..f7432f0584762fd28d54f2978dc59f2df443e991 100644 (file)
@@ -548,6 +548,9 @@ typedef struct _loop_vec_info : public vec_info {
   /* Mark loops having masked stores.  */
   bool has_mask_store;
 
+  /* Queued scaling factor for the scalar loop.  */
+  profile_probability scalar_loop_scaling;
+
   /* If if-conversion versioned this loop before conversion, this is the
      loop version without if-conversion.  */
   struct loop *scalar_loop;
@@ -603,6 +606,7 @@ typedef struct _loop_vec_info : public vec_info {
 #define LOOP_VINFO_PEELING_FOR_NITER(L)    (L)->peeling_for_niter
 #define LOOP_VINFO_NO_DATA_DEPENDENCIES(L) (L)->no_data_dependencies
 #define LOOP_VINFO_SCALAR_LOOP(L)         (L)->scalar_loop
+#define LOOP_VINFO_SCALAR_LOOP_SCALING(L)  (L)->scalar_loop_scaling
 #define LOOP_VINFO_HAS_MASK_STORE(L)       (L)->has_mask_store
 #define LOOP_VINFO_SCALAR_ITERATION_COST(L) (L)->scalar_cost_vec
 #define LOOP_VINFO_SINGLE_SCALAR_ITERATION_COST(L) (L)->single_scalar_iteration_cost