]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
Eliminate profile_status macro.
authordmalcolm <dmalcolm@138bc75d-0d04-0410-961f-82ee72b054a4>
Mon, 9 Dec 2013 20:38:21 +0000 (20:38 +0000)
committerdmalcolm <dmalcolm@138bc75d-0d04-0410-961f-82ee72b054a4>
Mon, 9 Dec 2013 20:38:21 +0000 (20:38 +0000)
gcc/
* basic-block.h (profile_status): Eliminate macro.

* cfgbuild.c (find_many_sub_basic_blocks): Eliminate use of
profile_status macro in favor of profile_status_for_fn, making
use of cfun explicit.
* cfghooks.c (account_profile_record): Likewise.
* cfgloopanal.c (single_likely_exit):
* cfgrtl.c (rtl_verify_edges, rtl_account_profile_record): Likewise.
* graphite.c (graphite_finalize):
* internal-fn.c (ubsan_expand_si_overflow_addsub_check,
ubsan_expand_si_overflow_neg_check,
ubsan_expand_si_overflow_mul_check): Likewise.
* ipa-split.c (consider_split, execute_split_functions):
* loop-unroll.c (decide_peel_simple):
* optabs.c (emit_cmp_and_jump_insn_1):
* predict.c (maybe_hot_edge_p, probably_never_executed,
predictable_edge_p, probability_reliable_p, gimple_predict_edge,
tree_estimate_probability_driver, estimate_bb_frequencies,
compute_function_frequency, rebuild_frequencies): Likewise.
* profile.c (compute_branch_probabilities): Likewise.
* tree-cfg.c (gimple_account_profile_record): Likewise.
* tree-inline.c (optimize_inline_calls): Likewise.

git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@205825 138bc75d-0d04-0410-961f-82ee72b054a4

15 files changed:
gcc/ChangeLog
gcc/basic-block.h
gcc/cfgbuild.c
gcc/cfghooks.c
gcc/cfgloopanal.c
gcc/cfgrtl.c
gcc/graphite.c
gcc/internal-fn.c
gcc/ipa-split.c
gcc/loop-unroll.c
gcc/optabs.c
gcc/predict.c
gcc/profile.c
gcc/tree-cfg.c
gcc/tree-inline.c

index 502ded7e7c0ce9bc1fdbfb0b8f8362076bf17eb0..543a3f081e193e031efddb8d839252685b1b34bb 100644 (file)
@@ -1,3 +1,28 @@
+2013-12-09  David Malcolm  <dmalcolm@redhat.com>
+
+       * basic-block.h (profile_status): Eliminate macro.
+
+       * cfgbuild.c (find_many_sub_basic_blocks): Eliminate use of
+       profile_status macro in favor of profile_status_for_fn, making
+       use of cfun explicit.
+       * cfghooks.c (account_profile_record): Likewise.
+       * cfgloopanal.c (single_likely_exit):
+       * cfgrtl.c (rtl_verify_edges, rtl_account_profile_record): Likewise.
+       * graphite.c (graphite_finalize):
+       * internal-fn.c (ubsan_expand_si_overflow_addsub_check,
+       ubsan_expand_si_overflow_neg_check,
+       ubsan_expand_si_overflow_mul_check): Likewise.
+       * ipa-split.c (consider_split, execute_split_functions):
+       * loop-unroll.c (decide_peel_simple):
+       * optabs.c (emit_cmp_and_jump_insn_1):
+       * predict.c (maybe_hot_edge_p, probably_never_executed,
+       predictable_edge_p, probability_reliable_p, gimple_predict_edge,
+       tree_estimate_probability_driver, estimate_bb_frequencies,
+       compute_function_frequency, rebuild_frequencies): Likewise.
+       * profile.c (compute_branch_probabilities): Likewise.
+       * tree-cfg.c (gimple_account_profile_record): Likewise.
+       * tree-inline.c (optimize_inline_calls): Likewise.
+
 2013-12-09  David Malcolm  <dmalcolm@redhat.com>
 
        * basic-block.h (label_to_block_map): Eliminate macro.
index 4ab8289050051f62bee3f6ebd3c045489fc0e35b..d000a432522d9210e53a8a2a23d88ca729164793 100644 (file)
@@ -328,7 +328,6 @@ struct GTY(()) control_flow_graph {
 
 /* Defines for textual backward source compatibility.  */
 #define last_basic_block       (cfun->cfg->x_last_basic_block)
-#define profile_status         (cfun->cfg->x_profile_status)
 
 /* For iterating over basic blocks.  */
 #define FOR_BB_BETWEEN(BB, FROM, TO, DIR) \
index 08534d4bdde18fe2e2eb80e24b17204c0d19e26e..a0c2c66a72e3be6f7b34f6234fe792be2096f791 100644 (file)
@@ -618,7 +618,7 @@ find_many_sub_basic_blocks (sbitmap blocks)
 
   /* Update branch probabilities.  Expect only (un)conditional jumps
      to be created with only the forward edges.  */
-  if (profile_status != PROFILE_ABSENT)
+  if (profile_status_for_fn (cfun) != PROFILE_ABSENT)
     FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
       {
        edge e;
index 0cd6af0f01276ae06643891509e56bf8b7d4182e..ab1c15fb2c433f9363fab3ef78620c9c1d84763c 100644 (file)
@@ -1411,7 +1411,7 @@ account_profile_record (struct profile_record *record, int after_pass)
   FOR_ALL_BB (bb)
    {
       if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
-         && profile_status != PROFILE_ABSENT)
+         && profile_status_for_fn (cfun) != PROFILE_ABSENT)
        {
          sum = 0;
          FOR_EACH_EDGE (e, ei, bb->succs)
@@ -1426,7 +1426,7 @@ account_profile_record (struct profile_record *record, int after_pass)
            record->num_mismatched_count_out[after_pass]++;
        }
       if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
-         && profile_status != PROFILE_ABSENT)
+         && profile_status_for_fn (cfun) != PROFILE_ABSENT)
        {
          sum = 0;
          FOR_EACH_EDGE (e, ei, bb->preds)
index 0cee6c68b28bca1dc4d149b4bd41498c941cf194..2260f4b4baa96fe8b80aa8fd3a78c63db7d69fb0 100644 (file)
@@ -470,7 +470,7 @@ single_likely_exit (struct loop *loop)
         ruled out by this test.  The static branch prediction algorithm
          will not assign such a low probability to conditionals for usual
          reasons.  */
-      if (profile_status != PROFILE_ABSENT
+      if (profile_status_for_fn (cfun) != PROFILE_ABSENT
          && ex->probability < 5 && !ex->count)
        continue;
       if (!found)
index 772d939a26b9f9ab2cfb3cccf4d1cdbd59492d94..34fe4f37846ad7f77fd1fc46078648676c8a65c7 100644 (file)
@@ -2420,7 +2420,7 @@ rtl_verify_edges (void)
          && any_condjump_p (BB_END (bb)))
        {
          if (XINT (note, 0) != BRANCH_EDGE (bb)->probability
-             && profile_status != PROFILE_ABSENT)
+             && profile_status_for_fn (cfun) != PROFILE_ABSENT)
            {
              error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
                     XINT (note, 0), BRANCH_EDGE (bb)->probability);
@@ -5011,10 +5011,10 @@ rtl_account_profile_record (basic_block bb, int after_pass,
       {
        record->size[after_pass]
          += insn_rtx_cost (PATTERN (insn), false);
-       if (profile_status == PROFILE_READ)
+       if (profile_status_for_fn (cfun) == PROFILE_READ)
          record->time[after_pass]
            += insn_rtx_cost (PATTERN (insn), true) * bb->count;
-       else if (profile_status == PROFILE_GUESSED)
+       else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
          record->time[after_pass]
            += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
       }
index e46710ca9398ddf7f556307653abbe31555028f6..a573ea74d1900a89a0d6cb51011b2b265c89d252 100644 (file)
@@ -245,7 +245,7 @@ graphite_finalize (bool need_cfg_cleanup_p)
     {
       scev_reset ();
       cleanup_tree_cfg ();
-      profile_status = PROFILE_ABSENT;
+      profile_status_for_fn (cfun) = PROFILE_ABSENT;
       release_recorded_exits ();
       tree_estimate_probability ();
     }
index fb1e5784b15bb472b71bc948cd6ad83d0f379440..8c54d987a626e9b739f71bf4b3f28509f6a5adb6 100644 (file)
@@ -194,7 +194,7 @@ ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
       if (maybe_expand_insn (icode, 4, ops))
        {
          last = get_last_insn ();
-         if (profile_status != PROFILE_ABSENT
+         if (profile_status_for_fn (cfun) != PROFILE_ABSENT
              && JUMP_P (last)
              && any_condjump_p (last)
              && !find_reg_note (last, REG_BR_PROB, 0))
@@ -285,7 +285,7 @@ ubsan_expand_si_overflow_neg_check (gimple stmt)
       if (maybe_expand_insn (icode, 3, ops))
        {
          last = get_last_insn ();
-         if (profile_status != PROFILE_ABSENT
+         if (profile_status_for_fn (cfun) != PROFILE_ABSENT
              && JUMP_P (last)
              && any_condjump_p (last)
              && !find_reg_note (last, REG_BR_PROB, 0))
@@ -364,7 +364,7 @@ ubsan_expand_si_overflow_mul_check (gimple stmt)
       if (maybe_expand_insn (icode, 4, ops))
        {
          last = get_last_insn ();
-         if (profile_status != PROFILE_ABSENT
+         if (profile_status_for_fn (cfun) != PROFILE_ABSENT
              && JUMP_P (last)
              && any_condjump_p (last)
              && !find_reg_note (last, REG_BR_PROB, 0))
index eca86dab14a5c2ba06ba32e8199713276324e40e..f8fa0eee2e5e1d1ceb4088291f099358ede07fbf 100644 (file)
@@ -411,7 +411,7 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
         a loop, enable splitting since inlining code skipping the loop
         is likely noticeable win.  */
       if (back_edge
-         && profile_status != PROFILE_READ
+         && profile_status_for_fn (cfun) != PROFILE_READ
          && incoming_freq < ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency)
        {
          if (dump_file && (dump_flags & TDF_DETAILS))
@@ -1585,7 +1585,7 @@ execute_split_functions (void)
 
   /* We enforce splitting after loop headers when profile info is not
      available.  */
-  if (profile_status != PROFILE_READ)
+  if (profile_status_for_fn (cfun) != PROFILE_READ)
     mark_dfs_back_edges ();
 
   /* Initialize bitmap to track forbidden calls.  */
index 9910b4e1b439e4c4608a39f6870f6f04c17b6745..d1c7b9cdfb35aed632f4f07387857b4e4bdbdb78 100644 (file)
@@ -1371,7 +1371,7 @@ decide_peel_simple (struct loop *loop, int flags)
      also branch from branch prediction POV (and probably better reason
      to not unroll/peel).  */
   if (num_loop_branches (loop) > 1
-      && profile_status != PROFILE_READ)
+      && profile_status_for_fn (cfun) != PROFILE_READ)
     {
       if (dump_file)
        fprintf (dump_file, ";; Not peeling, contains branches\n");
index e035af18c612e0a280e4e44e3884de929705808b..5172bd41666b1cb876c626bf8d0dac7233f581db 100644 (file)
@@ -4286,7 +4286,7 @@ emit_cmp_and_jump_insn_1 (rtx test, enum machine_mode mode, rtx label, int prob)
   insn = emit_jump_insn (GEN_FCN (icode) (test, XEXP (test, 0),
                                           XEXP (test, 1), label));
   if (prob != -1
-      && profile_status != PROFILE_ABSENT
+      && profile_status_for_fn (cfun) != PROFILE_ABSENT
       && insn
       && JUMP_P (insn)
       && any_condjump_p (insn)
index 1dec4dc92c7658670d9cc54eb2e991b77691c1ac..6bb1b2cfef13a713737ea6491f16f5acca5512a9 100644 (file)
@@ -224,7 +224,7 @@ cgraph_maybe_hot_edge_p (struct cgraph_edge *edge)
 bool
 maybe_hot_edge_p (edge e)
 {
-  if (profile_status == PROFILE_READ)
+  if (profile_status_for_fn (cfun) == PROFILE_READ)
     return maybe_hot_count_p (cfun, e->count);
   return maybe_hot_frequency_p (cfun, EDGE_FREQUENCY (e));
 }
@@ -239,7 +239,7 @@ probably_never_executed (struct function *fun,
                          gcov_type count, int frequency)
 {
   gcc_checking_assert (fun);
-  if (profile_status_for_fn (fun) == PROFILE_READ)
+  if (profile_status_for_fn (cfun) == PROFILE_READ)
     {
       int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION);
       if (count * unlikely_count_fraction >= profile_info->runs)
@@ -438,7 +438,7 @@ optimize_loop_nest_for_size_p (struct loop *loop)
 bool
 predictable_edge_p (edge e)
 {
-  if (profile_status == PROFILE_ABSENT)
+  if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
     return false;
   if ((e->probability
        <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)
@@ -539,8 +539,8 @@ gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
 static bool
 probability_reliable_p (int prob)
 {
-  return (profile_status == PROFILE_READ
-         || (profile_status == PROFILE_GUESSED
+  return (profile_status_for_fn (cfun) == PROFILE_READ
+         || (profile_status_for_fn (cfun) == PROFILE_GUESSED
              && (prob <= HITRATE (1) || prob >= HITRATE (99))));
 }
 
@@ -610,7 +610,7 @@ rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
 void
 gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
 {
-  gcc_assert (profile_status != PROFILE_GUESSED);
+  gcc_assert (profile_status_for_fn (cfun) != PROFILE_GUESSED);
   if ((e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && EDGE_COUNT (e->src->succs) >
        1)
       && flag_guess_branch_prob && optimize)
@@ -2443,8 +2443,8 @@ tree_estimate_probability_driver (void)
   loop_optimizer_finalize ();
   if (dump_file && (dump_flags & TDF_DETAILS))
     gimple_dump_cfg (dump_file, dump_flags);
-  if (profile_status == PROFILE_ABSENT)
-    profile_status = PROFILE_GUESSED;
+  if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
+    profile_status_for_fn (cfun) = PROFILE_GUESSED;
   return 0;
 }
 \f
@@ -2954,7 +2954,7 @@ estimate_bb_frequencies (bool force)
   basic_block bb;
   sreal freq_max;
 
-  if (force || profile_status != PROFILE_READ || !counts_to_freqs ())
+  if (force || profile_status_for_fn (cfun) != PROFILE_READ || !counts_to_freqs ())
     {
       static int real_values_initialized = 0;
 
@@ -3030,7 +3030,7 @@ compute_function_frequency (void)
   if (DECL_STATIC_DESTRUCTOR (current_function_decl))
     node->only_called_at_exit = true;
 
-  if (profile_status != PROFILE_READ)
+  if (profile_status_for_fn (cfun) != PROFILE_READ)
     {
       int flags = flags_from_decl_or_type (current_function_decl);
       if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
@@ -3189,8 +3189,8 @@ rebuild_frequencies (void)
   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     count_max = MAX (bb->count, count_max);
 
-  if (profile_status == PROFILE_GUESSED
-      || (profile_status == PROFILE_READ && count_max < REG_BR_PROB_BASE/10))
+  if (profile_status_for_fn (cfun) == PROFILE_GUESSED
+      || (profile_status_for_fn (cfun) == PROFILE_READ && count_max < REG_BR_PROB_BASE/10))
     {
       loop_optimizer_init (0);
       add_noreturn_fake_exit_edges ();
@@ -3200,7 +3200,7 @@ rebuild_frequencies (void)
       remove_fake_exit_edges ();
       loop_optimizer_finalize ();
     }
-  else if (profile_status == PROFILE_READ)
+  else if (profile_status_for_fn (cfun) == PROFILE_READ)
     counts_to_freqs ();
   else
     gcc_unreachable ();
index 9aec3cb06b3f8894ace34433f391181becde5738..24c16aa5b955100663f27b1cd2ce0f0a4e75594d 100644 (file)
@@ -797,7 +797,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
         give all abnormals frequency of 0, otherwise distribute the
         frequency over abnormals (this is the case of noreturn
         calls).  */
-      else if (profile_status == PROFILE_ABSENT)
+      else if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
        {
          int total = 0;
 
@@ -825,7 +825,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
        }
     }
   counts_to_freqs ();
-  profile_status = PROFILE_READ;
+  profile_status_for_fn (cfun) = PROFILE_READ;
   compute_function_frequency ();
 
   if (dump_file)
index f384b04eb5496f1bbf9b2874bd781a532145fb5a..57d648755f0fbc0d7161c879337b8409fe6695e8 100644 (file)
@@ -7875,11 +7875,11 @@ gimple_account_profile_record (basic_block bb, int after_pass,
     {
       record->size[after_pass]
        += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
-      if (profile_status == PROFILE_READ)
+      if (profile_status_for_fn (cfun) == PROFILE_READ)
        record->time[after_pass]
          += estimate_num_insns (gsi_stmt (i),
                                 &eni_time_weights) * bb->count;
-      else if (profile_status == PROFILE_GUESSED)
+      else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
        record->time[after_pass]
          += estimate_num_insns (gsi_stmt (i),
                                 &eni_time_weights) * bb->frequency;
index 1d1bc1eacb1eac7ca9bf4dba780952567bcc249a..fd7eedb8fb0cd4f114152a4547024c924e0d7aa0 100644 (file)
@@ -4612,7 +4612,8 @@ optimize_inline_calls (tree fn)
          | TODO_cleanup_cfg
          | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
          | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
-         | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
+         | (profile_status_for_fn (cfun) != PROFILE_ABSENT
+            ? TODO_rebuild_frequencies : 0));
 }
 
 /* Passed to walk_tree.  Copies the node pointed to, if appropriate.  */