]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
make inverted_post_order_compute() operate on a vec
authorTrevor Saunders <tbsaunde+gcc@tbsaunde.org>
Sun, 14 May 2017 00:39:23 +0000 (00:39 +0000)
committerTrevor Saunders <tbsaunde@gcc.gnu.org>
Sun, 14 May 2017 00:39:23 +0000 (00:39 +0000)
gcc/ChangeLog:

2017-05-13  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

* cfganal.c (inverted_post_order_compute): Change argument type
to vec *.
* cfganal.h (inverted_post_order_compute): Adjust prototype.
* df-core.c (rest_of_handle_df_initialize): Adjust.
(rest_of_handle_df_finish): Likewise.
(df_analyze_1): Likewise.
(df_analyze): Likewise.
(loop_inverted_post_order_compute): Change argument to be a vec *.
(df_analyze_loop): Adjust.
(df_get_n_blocks): Likewise.
(df_get_postorder): Likewise.
* df.h (struct df_d): Change field to be a vec.
* lcm.c (compute_laterin): Adjust.
(compute_available): Likewise.
* lra-lives.c (lra_create_live_ranges_1): Likewise.
* tree-ssa-dce.c (remove_dead_stmt): Likewise.
* tree-ssa-pre.c (compute_antic): Likewise.

From-SVN: r248027

gcc/ChangeLog
gcc/cfganal.c
gcc/cfganal.h
gcc/df-core.c
gcc/df.h
gcc/lcm.c
gcc/lra-lives.c
gcc/tree-ssa-dce.c
gcc/tree-ssa-pre.c

index c94ddb92744d3cf93eef2e784b3208b5984f1a84..b8bff6a17c51ac153705c666d58adba604894bb1 100644 (file)
@@ -1,3 +1,23 @@
+2017-05-13  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
+
+       * cfganal.c (inverted_post_order_compute): Change argument type
+       to vec *.
+       * cfganal.h (inverted_post_order_compute): Adjust prototype.
+       * df-core.c (rest_of_handle_df_initialize): Adjust.
+       (rest_of_handle_df_finish): Likewise.
+       (df_analyze_1): Likewise.
+       (df_analyze): Likewise.
+       (loop_inverted_post_order_compute): Change argument to be a vec *.
+       (df_analyze_loop): Adjust.
+       (df_get_n_blocks): Likewise.
+       (df_get_postorder): Likewise.
+       * df.h (struct df_d): Change field to be a vec.
+       * lcm.c (compute_laterin): Adjust.
+       (compute_available): Likewise.
+       * lra-lives.c (lra_create_live_ranges_1): Likewise.
+       * tree-ssa-dce.c (remove_dead_stmt): Likewise.
+       * tree-ssa-pre.c (compute_antic): Likewise.
+
 2017-05-13  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
 
        * cfganal.c (connect_infinite_loops_to_exit): Adjust.
index 27b453ca3f7b8ad1fb9497f598e9dbb4d3194ccc..a3a6ea869941f9204f383ecc3e2d6b5e984c1f6e 100644 (file)
@@ -790,12 +790,12 @@ dfs_find_deadend (basic_block bb)
    and start looking for a "dead end" from that block
    and do another inverted traversal from that block.  */
 
-int
-inverted_post_order_compute (int *post_order,
+void
+inverted_post_order_compute (vec<int> *post_order,
                             sbitmap *start_points)
 {
   basic_block bb;
-  int post_order_num = 0;
+  post_order->reserve_exact (n_basic_blocks_for_fn (cfun));
 
   if (flag_checking)
     verify_no_unreachable_blocks ();
@@ -863,13 +863,13 @@ inverted_post_order_compute (int *post_order,
                    time, check its predecessors.  */
                stack.quick_push (ei_start (pred->preds));
               else
-                post_order[post_order_num++] = pred->index;
+               post_order->quick_push (pred->index);
             }
           else
             {
              if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
                  && ei_one_before_end_p (ei))
-                post_order[post_order_num++] = bb->index;
+               post_order->quick_push (bb->index);
 
               if (!ei_one_before_end_p (ei))
                ei_next (&stack.last ());
@@ -927,9 +927,7 @@ inverted_post_order_compute (int *post_order,
   while (!stack.is_empty ());
 
   /* EXIT_BLOCK is always included.  */
-  post_order[post_order_num++] = EXIT_BLOCK;
-
-  return post_order_num;
+  post_order->quick_push (EXIT_BLOCK);
 }
 
 /* Compute the depth first search order of FN and store in the array
index 7df484b84419323c8e0713f16dc6ec0cda806de9..39bb5e547a53fca66a8b7718d87e789973600db4 100644 (file)
@@ -63,7 +63,7 @@ extern void add_noreturn_fake_exit_edges (void);
 extern void connect_infinite_loops_to_exit (void);
 extern int post_order_compute (int *, bool, bool);
 extern basic_block dfs_find_deadend (basic_block);
-extern int inverted_post_order_compute (int *, sbitmap *start_points = 0);
+extern void inverted_post_order_compute (vec<int> *postorder, sbitmap *start_points = 0);
 extern int pre_and_rev_post_order_compute_fn (struct function *,
                                              int *, int *, bool);
 extern int pre_and_rev_post_order_compute (int *, int *, bool);
index 1b270d417aa652988a199f5a9dacf62e422bb1e0..1e84d4d948f4404df36bdb6ff9557a19cfae1b7c 100644 (file)
@@ -702,10 +702,9 @@ rest_of_handle_df_initialize (void)
     df_live_add_problem ();
 
   df->postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
-  df->postorder_inverted = XNEWVEC (int, last_basic_block_for_fn (cfun));
   df->n_blocks = post_order_compute (df->postorder, true, true);
-  df->n_blocks_inverted = inverted_post_order_compute (df->postorder_inverted);
-  gcc_assert (df->n_blocks == df->n_blocks_inverted);
+  inverted_post_order_compute (&df->postorder_inverted);
+  gcc_assert ((unsigned) df->n_blocks == df->postorder_inverted.length ());
 
   df->hard_regs_live_count = XCNEWVEC (unsigned int, FIRST_PSEUDO_REGISTER);
 
@@ -816,7 +815,7 @@ rest_of_handle_df_finish (void)
     }
 
   free (df->postorder);
-  free (df->postorder_inverted);
+  df->postorder_inverted.release ();
   free (df->hard_regs_live_count);
   free (df);
   df = NULL;
@@ -1198,7 +1197,7 @@ df_analyze_1 (void)
   int i;
 
   /* These should be the same.  */
-  gcc_assert (df->n_blocks == df->n_blocks_inverted);
+  gcc_assert ((unsigned) df->n_blocks == df->postorder_inverted.length ());
 
   /* We need to do this before the df_verify_all because this is
      not kept incrementally up to date.  */
@@ -1222,8 +1221,8 @@ df_analyze_1 (void)
           if (dflow->problem->dir == DF_FORWARD)
             df_analyze_problem (dflow,
                                 df->blocks_to_analyze,
-                                df->postorder_inverted,
-                                df->n_blocks_inverted);
+                               df->postorder_inverted.address (),
+                               df->postorder_inverted.length ());
           else
             df_analyze_problem (dflow,
                                 df->blocks_to_analyze,
@@ -1249,23 +1248,21 @@ void
 df_analyze (void)
 {
   bitmap current_all_blocks = BITMAP_ALLOC (&df_bitmap_obstack);
-  int i;
 
   free (df->postorder);
-  free (df->postorder_inverted);
   df->postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
-  df->postorder_inverted = XNEWVEC (int, last_basic_block_for_fn (cfun));
   df->n_blocks = post_order_compute (df->postorder, true, true);
-  df->n_blocks_inverted = inverted_post_order_compute (df->postorder_inverted);
+  df->postorder_inverted.truncate (0);
+  inverted_post_order_compute (&df->postorder_inverted);
 
-  for (i = 0; i < df->n_blocks; i++)
+  for (int i = 0; i < df->n_blocks; i++)
     bitmap_set_bit (current_all_blocks, df->postorder[i]);
 
   if (flag_checking)
     {
       /* Verify that POSTORDER_INVERTED only contains blocks reachable from
         the ENTRY block.  */
-      for (i = 0; i < df->n_blocks_inverted; i++)
+      for (unsigned int i = 0; i < df->postorder_inverted.length (); i++)
        gcc_assert (bitmap_bit_p (current_all_blocks,
                                  df->postorder_inverted[i]));
     }
@@ -1277,9 +1274,10 @@ df_analyze (void)
       bitmap_and_into (df->blocks_to_analyze, current_all_blocks);
       df->n_blocks = df_prune_to_subcfg (df->postorder,
                                         df->n_blocks, df->blocks_to_analyze);
-      df->n_blocks_inverted = df_prune_to_subcfg (df->postorder_inverted,
-                                                 df->n_blocks_inverted,
+      unsigned int newlen = df_prune_to_subcfg (df->postorder_inverted.address (),
+                                               df->postorder_inverted.length (),
                                                  df->blocks_to_analyze);
+      df->postorder_inverted.truncate (newlen);
       BITMAP_FREE (current_all_blocks);
     }
   else
@@ -1355,13 +1353,14 @@ loop_post_order_compute (int *post_order, struct loop *loop)
 /* Compute the reverse top sort order of the inverted sub-CFG specified
    by LOOP.  Returns the number of blocks which is always loop->num_nodes.  */
 
-static int
-loop_inverted_post_order_compute (int *post_order, struct loop *loop)
+static void
+loop_inverted_post_order_compute (vec<int> *post_order, struct loop *loop)
 {
   basic_block bb;
   edge_iterator *stack;
   int sp;
-  int post_order_num = 0;
+
+  post_order->reserve_exact (loop->num_nodes);
 
   /* Allocate stack for back-tracking up CFG.  */
   stack = XNEWVEC (edge_iterator, loop->num_nodes + 1);
@@ -1398,13 +1397,13 @@ loop_inverted_post_order_compute (int *post_order, struct loop *loop)
               time, check its predecessors.  */
            stack[sp++] = ei_start (pred->preds);
          else
-           post_order[post_order_num++] = pred->index;
+           post_order->quick_push (pred->index);
        }
       else
        {
          if (flow_bb_inside_loop_p (loop, bb)
              && ei_one_before_end_p (ei))
-           post_order[post_order_num++] = bb->index;
+           post_order->quick_push (bb->index);
 
          if (!ei_one_before_end_p (ei))
            ei_next (&stack[sp - 1]);
@@ -1414,7 +1413,6 @@ loop_inverted_post_order_compute (int *post_order, struct loop *loop)
     }
 
   free (stack);
-  return post_order_num;
 }
 
 
@@ -1424,15 +1422,13 @@ void
 df_analyze_loop (struct loop *loop)
 {
   free (df->postorder);
-  free (df->postorder_inverted);
 
   df->postorder = XNEWVEC (int, loop->num_nodes);
-  df->postorder_inverted = XNEWVEC (int, loop->num_nodes);
+  df->postorder_inverted.truncate (0);
   df->n_blocks = loop_post_order_compute (df->postorder, loop);
-  df->n_blocks_inverted
-    = loop_inverted_post_order_compute (df->postorder_inverted, loop);
+    loop_inverted_post_order_compute (&df->postorder_inverted, loop);
   gcc_assert ((unsigned) df->n_blocks == loop->num_nodes);
-  gcc_assert ((unsigned) df->n_blocks_inverted == loop->num_nodes);
+  gcc_assert (df->postorder_inverted.length () == loop->num_nodes);
 
   bitmap blocks = BITMAP_ALLOC (&df_bitmap_obstack);
   for (int i = 0; i < df->n_blocks; ++i)
@@ -1453,8 +1449,8 @@ df_get_n_blocks (enum df_flow_dir dir)
 
   if (dir == DF_FORWARD)
     {
-      gcc_assert (df->postorder_inverted);
-      return df->n_blocks_inverted;
+      gcc_assert (df->postorder_inverted.length ());
+      return df->postorder_inverted.length ();
     }
 
   gcc_assert (df->postorder);
@@ -1473,8 +1469,8 @@ df_get_postorder (enum df_flow_dir dir)
 
   if (dir == DF_FORWARD)
     {
-      gcc_assert (df->postorder_inverted);
-      return df->postorder_inverted;
+      gcc_assert (df->postorder_inverted.length ());
+      return df->postorder_inverted.address ();
     }
   gcc_assert (df->postorder);
   return df->postorder;
index 681ff32098e96b32cdd809b75f58c442fe7df78f..07fd3345d9d002d6e9bf73c2157e980225625660 100644 (file)
--- a/gcc/df.h
+++ b/gcc/df.h
@@ -582,11 +582,9 @@ struct df_d
   bitmap_head insns_to_notes_rescan;
   int *postorder;                /* The current set of basic blocks
                                     in reverse postorder.  */
-  int *postorder_inverted;       /* The current set of basic blocks
+  vec<int> postorder_inverted;       /* The current set of basic blocks
                                     in reverse postorder of inverted CFG.  */
   int n_blocks;                  /* The number of blocks in reverse postorder.  */
-  int n_blocks_inverted;         /* The number of blocks
-                                    in reverse postorder of inverted CFG.  */
 
   /* An array [FIRST_PSEUDO_REGISTER], indexed by regno, of the number
      of refs that qualify as being real hard regs uses.  Artificial
index edc86b5700999a703e4d06ed1ffd169604cbeb25..e86662742111907c2bc4e34bcc66979a1e4ff499 100644 (file)
--- a/gcc/lcm.c
+++ b/gcc/lcm.c
@@ -270,9 +270,9 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
 
   /* Add all the blocks to the worklist.  This prevents an early exit from
      the loop given our optimistic initialization of LATER above.  */
-  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
-  int postorder_num = inverted_post_order_compute (postorder);
-  for (int i = 0; i < postorder_num; ++i)
+  auto_vec<int, 20> postorder;
+  inverted_post_order_compute (&postorder);
+  for (unsigned int i = 0; i < postorder.length (); ++i)
     {
       bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
       if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
@@ -281,7 +281,6 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
       *qin++ = bb;
       bb->aux = bb;
     }
-  free (postorder);
 
   /* Note that we do not use the last allocated element for our queue,
      as EXIT_BLOCK is never inserted into it. */
@@ -512,9 +511,9 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
   /* Put every block on the worklist; this is necessary because of the
      optimistic initialization of AVOUT above.  Use inverted postorder
      to make the dataflow problem require less iterations.  */
-  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
-  int postorder_num = inverted_post_order_compute (postorder);
-  for (int i = 0; i < postorder_num; ++i)
+  auto_vec<int, 20> postorder;
+  inverted_post_order_compute (&postorder);
+  for (unsigned int i = 0; i < postorder.length (); ++i)
     {
       bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
       if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
@@ -523,7 +522,6 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
       *qin++ = bb;
       bb->aux = bb;
     }
-  free (postorder);
 
   qin = worklist;
   qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
index 5d4015b5ab9a65a33b3198507506761caeb0487c..e728e348215fc9f58736ac34762a37428f200ebd 100644 (file)
@@ -1287,11 +1287,11 @@ lra_create_live_ranges_1 (bool all_p, bool dead_insn_p)
   point_freq_vec.truncate (0);
   point_freq_vec.reserve_exact (new_length);
   lra_point_freq = point_freq_vec.address ();
-  int *post_order_rev_cfg = XNEWVEC (int, last_basic_block_for_fn (cfun));
-  int n_blocks_inverted = inverted_post_order_compute (post_order_rev_cfg);
-  lra_assert (n_blocks_inverted == n_basic_blocks_for_fn (cfun));
+  auto_vec<int, 20> post_order_rev_cfg;
+  inverted_post_order_compute (&post_order_rev_cfg);
+  lra_assert (post_order_rev_cfg.length () == (unsigned) n_basic_blocks_for_fn (cfun));
   bb_live_change_p = false;
-  for (i = n_blocks_inverted - 1; i >= 0; --i)
+  for (i = post_order_rev_cfg.length () - 1; i >= 0; --i)
     {
       bb = BASIC_BLOCK_FOR_FN (cfun, post_order_rev_cfg[i]);
       if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb
@@ -1338,7 +1338,6 @@ lra_create_live_ranges_1 (bool all_p, bool dead_insn_p)
            }
        }
     }
-  free (post_order_rev_cfg);
   lra_live_max_point = curr_point;
   if (lra_dump_file != NULL)
     print_live_ranges (lra_dump_file);
index 4225c3cc1a11d9e954fe29cb55449a5f8cdab127..428dc814d933adc4f284568b7f6a505bb582a95f 100644 (file)
@@ -1040,14 +1040,12 @@ remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
        {
          if (!bb_postorder)
            {
-             int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
-             int postorder_num
-                = inverted_post_order_compute (postorder,
-                                               &bb_contains_live_stmts);
+             auto_vec<int, 20> postorder;
+                inverted_post_order_compute (&postorder,
+                                             &bb_contains_live_stmts);
              bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
-             for (int i = 0; i < postorder_num; ++i)
+             for (unsigned int i = 0; i < postorder.length (); ++i)
                 bb_postorder[postorder[i]] = i;
-             free (postorder);
            }
           FOR_EACH_EDGE (e2, ei, bb->succs)
            if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
index 0d9295c2d117b4eb33beb22a57d259048f239034..f3e5eff9a76d03a3e4a5aa37e92e94a516074c84 100644 (file)
@@ -2375,8 +2375,8 @@ compute_antic (void)
   /* For ANTIC computation we need a postorder that also guarantees that
      a block with a single successor is visited after its successor.
      RPO on the inverted CFG has this property.  */
-  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
-  int postorder_num = inverted_post_order_compute (postorder);
+  auto_vec<int, 20> postorder;
+  inverted_post_order_compute (&postorder);
 
   auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
   bitmap_ones (worklist);
@@ -2390,7 +2390,7 @@ compute_antic (void)
         for PA ANTIC computation.  */
       num_iterations++;
       changed = false;
-      for (i = postorder_num - 1; i >= 0; i--)
+      for (i = postorder.length () - 1; i >= 0; i--)
        {
          if (bitmap_bit_p (worklist, postorder[i]))
            {
@@ -2417,7 +2417,7 @@ compute_antic (void)
     {
       /* For partial antic we ignore backedges and thus we do not need
          to perform any iteration when we process blocks in postorder.  */
-      postorder_num = pre_and_rev_post_order_compute (NULL, postorder, false);
+      int postorder_num = pre_and_rev_post_order_compute (NULL, postorder.address (), false);
       for (i = postorder_num - 1 ; i >= 0; i--)
        {
          basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
@@ -2428,7 +2428,6 @@ compute_antic (void)
     }
 
   sbitmap_free (has_abnormal_preds);
-  free (postorder);
 }