]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
i386.c (make_resolver_func): Update.
authorJan Hubicka <hubicka@ucw.cz>
Sun, 4 Jun 2017 18:55:08 +0000 (20:55 +0200)
committerJan Hubicka <hubicka@gcc.gnu.org>
Sun, 4 Jun 2017 18:55:08 +0000 (18:55 +0000)
2017-05-23  Jan Hubicka  <hubicka@ucw.cz>

* config/i386/i386.c (make_resolver_func): Update.
* Makefile.in: Add profile-count.h and profile-count.o
* auto-profile.c (afdo_indirect_call): Update to new API.
(afdo_set_bb_count): Update.
(afdo_propagate_edge): Update.
(afdo_propagate_circuit): Update.
(afdo_calculate_branch_prob): Update.
(afdo_annotate_cfg): Update.
* basic-block.h: Include profile-count.h
(struct edge_def): Turn count to profile_count.
(struct basic_block_def): Likewie.
(REG_BR_PROB_BASE): Move to profile-count.h
(RDIV): Move to profile-count.h
* bb-reorder.c (max_entry_count): Turn to profile_count.
(find_traces): Update.
(rotate_loop):Update.
(connect_traces):Update.
(sanitize_hot_paths):Update.
* bt-load.c (migrate_btr_defs): Update.
* cfg.c (RDIV): Remove.
(init_flow): Use alloc_block.
(alloc_block): Uninitialize count.
(unchecked_make_edge): Uninitialize count.
(check_bb_profile): Update.
(dump_edge_info): Update.
(dump_bb_info): Update.
(update_bb_profile_for_threading): Update.
(scale_bbs_frequencies_int): Update.
(scale_bbs_frequencies_gcov_type): Update.
(scale_bbs_frequencies_profile_count): New.
* cfg.h (update_bb_profile_for_threading): Update.
(scale_bbs_frequencies_profile_count): Declare.
* cfgbuild.c (compute_outgoing_frequencies): Update.
(find_many_sub_basic_blocks): Update.
* cfgcleanup.c (try_forward_edges): Update.
(try_crossjump_to_edge): Update.
* cfgexpand.c (expand_gimple_tailcall): Update.
(construct_exit_block): Update.
* cfghooks.c (verify_flow_info): Update.
(dump_bb_for_graph): Update.
(split_edge): Update.
(make_forwarder_block): Update.
(duplicate_block): Update.
(account_profile_record): Update.
* cfgloop.c (find_subloop_latch_edge_by_profile): Update.
(get_estimated_loop_iterations): Update.
* cfgloopanal.c (expected_loop_iterations_unbounded): Update.
(single_likely_exit): Update.
* cfgloopmanip.c (scale_loop_profile): Update.
(loopify): Update.
(set_zero_probability): Update.
(lv_adjust_loop_entry_edge): Update.
* cfgrtl.c (force_nonfallthru_and_redirect): Update.
(purge_dead_edges): Update.
(rtl_account_profile_record): Update.
* cgraph.c (cgraph_node::create): Uninitialize count.
(symbol_table::create_edge): Uninitialize count.
(cgraph_update_edges_for_call_stmt_node): Update.
(cgraph_edge::dump_edge_flags): Update.
(cgraph_node::dump): Update.
(cgraph_edge::maybe_hot_p): Update.
* cgraph.h: Include profile-count.h
(create_clone), create_edge, create_indirect_edge): Update.
(cgraph_node): Turn count to profile_count.
(cgraph_edge0: Likewise.
(make_speculative, clone): Update.
(create_edge): Update.
(init_lowered_empty_function): Update.
* cgraphclones.c (cgraph_edge::clone): Update.
(duplicate_thunk_for_node): Update.
(cgraph_node::create_clone): Update.
* cgraphunit.c (cgraph_node::analyze): Update.
(cgraph_node::expand_thunk): Update.
* final.c (dump_basic_block_info): Update.
* gimple-streamer-in.c (input_bb): Update.
* gimple-streamer-out.c (output_bb): Update.
* graphite.c (print_global_statistics): Update.
(print_graphite_scop_statistics): Update.
* hsa-brig.c: Include basic-block.h.
* hsa-dump.c: Include basic-block.h.
* hsa-gen.c (T sum_slice): Update.
(convert_switch_statements):Update.
* hsa-regalloc.c: Include basic-block.h.
* ipa-chkp.c (chkp_produce_thunks): Update.
* ipa-cp.c (struct caller_statistics): Update.
(init_caller_stats): Update.
(gather_caller_stats): Update.
(ipcp_cloning_candidate_p): Update.
(good_cloning_opportunity_p): Update.
(get_info_about_necessary_edges): Update.
(dump_profile_updates): Update.
(update_profiling_info): Update.
(update_specialized_profile): Update.
(perhaps_add_new_callers): Update.
(decide_about_value): Update.
(ipa_cp_c_finalize): Update.
* ipa-devirt.c (struct odr_type_warn_count): Update.
(struct decl_warn_count): Update.
(struct final_warning_record): Update.
(possible_polymorphic_call_targets): Update.
(ipa_devirt): Update.
* ipa-fnsummary.c (redirect_to_unreachable): Update.
* ipa-icf.c (sem_function::merge): Update.
* ipa-inline-analysis.c (do_estimate_edge_time): Update.
* ipa-inline.c (compute_uninlined_call_time): Update.
(compute_inlined_call_time): Update.
(want_inline_small_function_p): Update.
(want_inline_self_recursive_call_p): Update.
(edge_badness): Update.
(lookup_recursive_calls): Update.
(recursive_inlining): Update.
(inline_small_functions): Update.
(dump_overall_stats): Update.
(dump_inline_stats): Update.
* ipa-profile.c (ipa_profile_generate_summary): Update.
(ipa_propagate_frequency): Update.
(ipa_profile): Update.
* ipa-prop.c (ipa_make_edge_direct_to_target): Update.
* ipa-utils.c (ipa_merge_profiles): Update.
* loop-doloop.c (doloop_modify): Update.
* loop-unroll.c (report_unroll): Update.
(unroll_loop_runtime_iterations): Update.
* lto-cgraph.c (lto_output_edge): Update.
(lto_output_node): Update.
(input_node): Update.
(input_edge): Update.
(merge_profile_summaries): Update.
* lto-streamer-in.c (input_cfg): Update.
* lto-streamer-out.c (output_cfg): Update.
* mcf.c (create_fixup_graph): Update.
(adjust_cfg_counts): Update.
(sum_edge_counts): Update.
* modulo-sched.c (sms_schedule): Update.
* postreload-gcse.c (eliminate_partially_redundant_load): Update.
* predict.c (maybe_hot_count_p): Update.
(probably_never_executed): Update.
(dump_prediction): Update.
(combine_predictions_for_bb): Update.
(propagate_freq): Update.
(handle_missing_profiles): Update.
(counts_to_freqs): Update.
(rebuild_frequencies): Update.
(force_edge_cold): Update.
* predict.h: Include profile-count.h
(maybe_hot_count_p, counts_to_freqs): UPdate.
* print-rtl-function.c: Do not include cfg.h
* print-rtl.c: Include basic-block.h
* profile-count.c: New file.
* profile-count.h: New file.
* profile.c (is_edge_inconsistent): Update.
(correct_negative_edge_counts): Update.
(is_inconsistent): Update.
(set_bb_counts): Update.
(read_profile_edge_counts): Update.
(compute_frequency_overlap): Update.
(compute_branch_probabilities): Update; Initialize and deinitialize
gcov_count tables.
(branch_prob): Update.
* profile.h (bb_gcov_counts, edge_gcov_counts): New.
(edge_gcov_count): New.
(bb_gcov_count): New.
* shrink-wrap.c (try_shrink_wrapping): Update.
* tracer.c (better_p): Update.
* trans-mem.c (expand_transaction): Update.
(ipa_tm_insert_irr_call): Update.
(ipa_tm_insert_gettmclone_call): Update.
* tree-call-cdce.c: Update.
* tree-cfg.c (gimple_duplicate_sese_region): Update.
(gimple_duplicate_sese_tail): Update.
(gimple_account_profile_record): Update.
(execute_fixup_cfg): Update.
* tree-inline.c (copy_bb): Update.
(copy_edges_for_bb): Update.
(initialize_cfun): Update.
(freqs_to_counts): Update.
(copy_cfg_body): Update.
(expand_call_inline): Update.
* tree-ssa-ifcombine.c (update_profile_after_ifcombine): Update.
* tree-ssa-loop-ivcanon.c (unloop_loops): Update.
(try_unroll_loop_completely): Update.
(try_peel_loop): Update.
* tree-ssa-loop-manip.c (tree_transform_and_unroll_loop): Update.
* tree-ssa-loop-niter.c (estimate_numbers_of_iterations_loop): Update.
* tree-ssa-loop-split.c (connect_loops): Update.
* tree-ssa-loop-unswitch.c (hoist_guard): Update.
* tree-ssa-reassoc.c (branch_fixup): Update.
* tree-ssa-tail-merge.c (replace_block_by): Update.
* tree-ssa-threadupdate.c (create_block_for_threading): Update.
(compute_path_counts): Update.
(update_profile): Update.
(recompute_probabilities): Update.
(update_joiner_offpath_counts): Update.
(estimated_freqs_path): Update.
(freqs_to_counts_path): Update.
(clear_counts_path): Update.
(ssa_fix_duplicate_block_edges): Update.
(duplicate_thread_path): Update.
* tree-switch-conversion.c (case_bit_test_cmp): Update.
(struct switch_conv_info): Update.
* tree-tailcall.c (decrease_profile): Update.
* tree-vect-loop-manip.c (slpeel_add_loop_guard): Update.
* tree-vect-loop.c (scale_profile_for_vect_loop): Update.
* value-prof.c (check_counter): Update.
(gimple_divmod_fixed_value): Update.
(gimple_mod_pow2): Update.
(gimple_mod_subtract): Update.
(gimple_ic_transform): Update.
(gimple_stringop_fixed_value): Update.
* value-prof.h (gimple_ic): Update.

* gcc.dg/tree-ssa/attr-hotcold-2.c: Update template.

From-SVN: r248863

79 files changed:
gcc/ChangeLog
gcc/Makefile.in
gcc/auto-profile.c
gcc/basic-block.h
gcc/bb-reorder.c
gcc/bt-load.c
gcc/cfg.c
gcc/cfg.h
gcc/cfgbuild.c
gcc/cfgcleanup.c
gcc/cfgexpand.c
gcc/cfghooks.c
gcc/cfgloop.c
gcc/cfgloopanal.c
gcc/cfgloopmanip.c
gcc/cfgrtl.c
gcc/cgraph.c
gcc/cgraph.h
gcc/cgraphclones.c
gcc/cgraphunit.c
gcc/config/i386/i386.c
gcc/final.c
gcc/fortran/expr.c
gcc/gimple-streamer-in.c
gcc/gimple-streamer-out.c
gcc/graphite.c
gcc/hsa-brig.c
gcc/hsa-dump.c
gcc/hsa-gen.c
gcc/hsa-regalloc.c
gcc/ipa-chkp.c
gcc/ipa-cp.c
gcc/ipa-devirt.c
gcc/ipa-fnsummary.c
gcc/ipa-icf.c
gcc/ipa-inline-analysis.c
gcc/ipa-inline.c
gcc/ipa-profile.c
gcc/ipa-prop.c
gcc/ipa-utils.c
gcc/loop-doloop.c
gcc/loop-unroll.c
gcc/lto-cgraph.c
gcc/lto-streamer-in.c
gcc/lto-streamer-out.c
gcc/mcf.c
gcc/modulo-sched.c
gcc/postreload-gcse.c
gcc/predict.c
gcc/predict.h
gcc/print-rtl-function.c
gcc/print-rtl.c
gcc/profile-count.c [new file with mode: 0644]
gcc/profile-count.h [new file with mode: 0644]
gcc/profile.c
gcc/profile.h
gcc/shrink-wrap.c
gcc/testsuite/ChangeLog
gcc/testsuite/gcc.dg/tree-ssa/attr-hotcold-2.c
gcc/tracer.c
gcc/trans-mem.c
gcc/tree-call-cdce.c
gcc/tree-cfg.c
gcc/tree-inline.c
gcc/tree-ssa-ifcombine.c
gcc/tree-ssa-loop-ivcanon.c
gcc/tree-ssa-loop-manip.c
gcc/tree-ssa-loop-niter.c
gcc/tree-ssa-loop-split.c
gcc/tree-ssa-loop-unswitch.c
gcc/tree-ssa-reassoc.c
gcc/tree-ssa-tail-merge.c
gcc/tree-ssa-threadupdate.c
gcc/tree-switch-conversion.c
gcc/tree-tailcall.c
gcc/tree-vect-loop-manip.c
gcc/tree-vect-loop.c
gcc/value-prof.c
gcc/value-prof.h

index dcb621615067c4fddd16bf36ccd52e76c314f700..037168e20c61e648880971921dfec714c7583919 100644 (file)
@@ -1,3 +1,215 @@
+2017-05-23  Jan Hubicka  <hubicka@ucw.cz>
+
+       * config/i386/i386.c (make_resolver_func): Update.
+       * Makefile.in: Add profile-count.h and profile-count.o
+       * auto-profile.c (afdo_indirect_call): Update to new API.
+       (afdo_set_bb_count): Update.
+       (afdo_propagate_edge): Update.
+       (afdo_propagate_circuit): Update.
+       (afdo_calculate_branch_prob): Update.
+       (afdo_annotate_cfg): Update.
+       * basic-block.h: Include profile-count.h
+       (struct edge_def): Turn count to profile_count.
+       (struct basic_block_def): Likewie.
+       (REG_BR_PROB_BASE): Move to profile-count.h
+       (RDIV): Move to profile-count.h
+       * bb-reorder.c (max_entry_count): Turn to profile_count.
+       (find_traces): Update.
+       (rotate_loop):Update.
+       (connect_traces):Update.
+       (sanitize_hot_paths):Update.
+       * bt-load.c (migrate_btr_defs): Update.
+       * cfg.c (RDIV): Remove.
+       (init_flow): Use alloc_block.
+       (alloc_block): Uninitialize count.
+       (unchecked_make_edge): Uninitialize count.
+       (check_bb_profile): Update.
+       (dump_edge_info): Update.
+       (dump_bb_info): Update.
+       (update_bb_profile_for_threading): Update.
+       (scale_bbs_frequencies_int): Update.
+       (scale_bbs_frequencies_gcov_type): Update.
+       (scale_bbs_frequencies_profile_count): New.
+       * cfg.h (update_bb_profile_for_threading): Update.
+       (scale_bbs_frequencies_profile_count): Declare.
+       * cfgbuild.c (compute_outgoing_frequencies): Update.
+       (find_many_sub_basic_blocks): Update.
+       * cfgcleanup.c (try_forward_edges): Update.
+       (try_crossjump_to_edge): Update.
+       * cfgexpand.c (expand_gimple_tailcall): Update.
+       (construct_exit_block): Update.
+       * cfghooks.c (verify_flow_info): Update.
+       (dump_bb_for_graph): Update.
+       (split_edge): Update.
+       (make_forwarder_block): Update.
+       (duplicate_block): Update.
+       (account_profile_record): Update.
+       * cfgloop.c (find_subloop_latch_edge_by_profile): Update.
+       (get_estimated_loop_iterations): Update.
+       * cfgloopanal.c (expected_loop_iterations_unbounded): Update.
+       (single_likely_exit): Update.
+       * cfgloopmanip.c (scale_loop_profile): Update.
+       (loopify): Update.
+       (set_zero_probability): Update.
+       (lv_adjust_loop_entry_edge): Update.
+       * cfgrtl.c (force_nonfallthru_and_redirect): Update.
+       (purge_dead_edges): Update.
+       (rtl_account_profile_record): Update.
+       * cgraph.c (cgraph_node::create): Uninitialize count.
+       (symbol_table::create_edge): Uninitialize count.
+       (cgraph_update_edges_for_call_stmt_node): Update.
+       (cgraph_edge::dump_edge_flags): Update.
+       (cgraph_node::dump): Update.
+       (cgraph_edge::maybe_hot_p): Update.
+       * cgraph.h: Include profile-count.h
+       (create_clone), create_edge, create_indirect_edge): Update.
+       (cgraph_node): Turn count to profile_count.
+       (cgraph_edge0: Likewise.
+       (make_speculative, clone): Update.
+       (create_edge): Update.
+       (init_lowered_empty_function): Update.
+       * cgraphclones.c (cgraph_edge::clone): Update.
+       (duplicate_thunk_for_node): Update.
+       (cgraph_node::create_clone): Update.
+       * cgraphunit.c (cgraph_node::analyze): Update.
+       (cgraph_node::expand_thunk): Update.
+       * final.c (dump_basic_block_info): Update.
+       * gimple-streamer-in.c (input_bb): Update.
+       * gimple-streamer-out.c (output_bb): Update.
+       * graphite.c (print_global_statistics): Update.
+       (print_graphite_scop_statistics): Update.
+       * hsa-brig.c: Include basic-block.h.
+       * hsa-dump.c: Include basic-block.h.
+       * hsa-gen.c (T sum_slice): Update.
+       (convert_switch_statements):Update.
+       * hsa-regalloc.c: Include basic-block.h.
+       * ipa-chkp.c (chkp_produce_thunks): Update.
+       * ipa-cp.c (struct caller_statistics): Update.
+       (init_caller_stats): Update.
+       (gather_caller_stats): Update.
+       (ipcp_cloning_candidate_p): Update.
+       (good_cloning_opportunity_p): Update.
+       (get_info_about_necessary_edges): Update.
+       (dump_profile_updates): Update.
+       (update_profiling_info): Update.
+       (update_specialized_profile): Update.
+       (perhaps_add_new_callers): Update.
+       (decide_about_value): Update.
+       (ipa_cp_c_finalize): Update.
+       * ipa-devirt.c (struct odr_type_warn_count): Update.
+       (struct decl_warn_count): Update.
+       (struct final_warning_record): Update.
+       (possible_polymorphic_call_targets): Update.
+       (ipa_devirt): Update.
+       * ipa-fnsummary.c (redirect_to_unreachable): Update.
+       * ipa-icf.c (sem_function::merge): Update.
+       * ipa-inline-analysis.c (do_estimate_edge_time): Update.
+       * ipa-inline.c (compute_uninlined_call_time): Update.
+       (compute_inlined_call_time): Update.
+       (want_inline_small_function_p): Update.
+       (want_inline_self_recursive_call_p): Update.
+       (edge_badness): Update.
+       (lookup_recursive_calls): Update.
+       (recursive_inlining): Update.
+       (inline_small_functions): Update.
+       (dump_overall_stats): Update.
+       (dump_inline_stats): Update.
+       * ipa-profile.c (ipa_profile_generate_summary): Update.
+       (ipa_propagate_frequency): Update.
+       (ipa_profile): Update.
+       * ipa-prop.c (ipa_make_edge_direct_to_target): Update.
+       * ipa-utils.c (ipa_merge_profiles): Update.
+       * loop-doloop.c (doloop_modify): Update.
+       * loop-unroll.c (report_unroll): Update.
+       (unroll_loop_runtime_iterations): Update.
+       * lto-cgraph.c (lto_output_edge): Update.
+       (lto_output_node): Update.
+       (input_node): Update.
+       (input_edge): Update.
+       (merge_profile_summaries): Update.
+       * lto-streamer-in.c (input_cfg): Update.
+       * lto-streamer-out.c (output_cfg): Update.
+       * mcf.c (create_fixup_graph): Update.
+       (adjust_cfg_counts): Update.
+       (sum_edge_counts): Update.
+       * modulo-sched.c (sms_schedule): Update.
+       * postreload-gcse.c (eliminate_partially_redundant_load): Update.
+       * predict.c (maybe_hot_count_p): Update.
+       (probably_never_executed): Update.
+       (dump_prediction): Update.
+       (combine_predictions_for_bb): Update.
+       (propagate_freq): Update.
+       (handle_missing_profiles): Update.
+       (counts_to_freqs): Update.
+       (rebuild_frequencies): Update.
+       (force_edge_cold): Update.
+       * predict.h: Include profile-count.h
+       (maybe_hot_count_p, counts_to_freqs): UPdate.
+       * print-rtl-function.c: Do not include cfg.h
+       * print-rtl.c: Include basic-block.h
+       * profile-count.c: New file.
+       * profile-count.h: New file.
+       * profile.c (is_edge_inconsistent): Update.
+       (correct_negative_edge_counts): Update.
+       (is_inconsistent): Update.
+       (set_bb_counts): Update.
+       (read_profile_edge_counts): Update.
+       (compute_frequency_overlap): Update.
+       (compute_branch_probabilities): Update; Initialize and deinitialize
+       gcov_count tables.
+       (branch_prob): Update.
+       * profile.h (bb_gcov_counts, edge_gcov_counts): New.
+       (edge_gcov_count): New.
+       (bb_gcov_count): New.
+       * shrink-wrap.c (try_shrink_wrapping): Update.
+       * tracer.c (better_p): Update.
+       * trans-mem.c (expand_transaction): Update.
+       (ipa_tm_insert_irr_call): Update.
+       (ipa_tm_insert_gettmclone_call): Update.
+       * tree-call-cdce.c: Update.
+       * tree-cfg.c (gimple_duplicate_sese_region): Update.
+       (gimple_duplicate_sese_tail): Update.
+       (gimple_account_profile_record): Update.
+       (execute_fixup_cfg): Update.
+       * tree-inline.c (copy_bb): Update.
+       (copy_edges_for_bb): Update.
+       (initialize_cfun): Update.
+       (freqs_to_counts): Update.
+       (copy_cfg_body): Update.
+       (expand_call_inline): Update.
+       * tree-ssa-ifcombine.c (update_profile_after_ifcombine): Update.
+       * tree-ssa-loop-ivcanon.c (unloop_loops): Update.
+       (try_unroll_loop_completely): Update.
+       (try_peel_loop): Update.
+       * tree-ssa-loop-manip.c (tree_transform_and_unroll_loop): Update.
+       * tree-ssa-loop-niter.c (estimate_numbers_of_iterations_loop): Update.
+       * tree-ssa-loop-split.c (connect_loops): Update.
+       * tree-ssa-loop-unswitch.c (hoist_guard): Update.
+       * tree-ssa-reassoc.c (branch_fixup): Update.
+       * tree-ssa-tail-merge.c (replace_block_by): Update.
+       * tree-ssa-threadupdate.c (create_block_for_threading): Update.
+       (compute_path_counts): Update.
+       (update_profile): Update.
+       (recompute_probabilities): Update.
+       (update_joiner_offpath_counts): Update.
+       (estimated_freqs_path): Update.
+       (freqs_to_counts_path): Update.
+       (clear_counts_path): Update.
+       (ssa_fix_duplicate_block_edges): Update.
+       (duplicate_thread_path): Update.
+       * tree-switch-conversion.c (case_bit_test_cmp): Update.
+       (struct switch_conv_info): Update.
+       * tree-tailcall.c (decrease_profile): Update.
+       * tree-vect-loop-manip.c (slpeel_add_loop_guard): Update.
+       * tree-vect-loop.c (scale_profile_for_vect_loop): Update.
+       * value-prof.c (check_counter): Update.
+       (gimple_divmod_fixed_value): Update.
+       (gimple_mod_pow2): Update.
+       (gimple_mod_subtract): Update.
+       (gimple_ic_transform): Update.
+       (gimple_stringop_fixed_value): Update.
+       * value-prof.h (gimple_ic): Update.
+
 2017-06-02  Carl Love  <cel@us.ibm.com>
 
        * config/rs6000/rs6000-c: Add support for built-in functions
index 6e0e55a1dbdf04a730bf89aa7ff1b5e767e15c5c..da98c8e864a374c032b750912dfd93fbaa0ce603 100644 (file)
@@ -928,7 +928,7 @@ TREE_CORE_H = tree-core.h coretypes.h all-tree.def tree.def \
 TREE_H = tree.h $(TREE_CORE_H)  tree-check.h
 REGSET_H = regset.h $(BITMAP_H) hard-reg-set.h
 BASIC_BLOCK_H = basic-block.h $(PREDICT_H) $(VEC_H) $(FUNCTION_H) \
-       cfg-flags.def cfghooks.h
+       cfg-flags.def cfghooks.h profile-count.h
 GIMPLE_H = gimple.h gimple.def gsstruct.def $(VEC_H) \
        $(GGC_H) $(BASIC_BLOCK_H) $(TREE_H) tree-ssa-operands.h \
        tree-ssa-alias.h $(INTERNAL_FN_H) $(HASH_TABLE_H) is-a.h
@@ -1417,6 +1417,7 @@ OBJS = \
        print-rtl-function.o \
        print-tree.o \
        profile.o \
+       profile-count.o \
        read-md.o \
        read-rtl.o \
        read-rtl-function.o \
@@ -2459,7 +2460,7 @@ GTFILES = $(CPP_ID_DATA_H) $(srcdir)/input.h $(srcdir)/coretypes.h \
   $(srcdir)/libfuncs.h $(SYMTAB_H) \
   $(srcdir)/real.h $(srcdir)/function.h $(srcdir)/insn-addr.h $(srcdir)/hwint.h \
   $(srcdir)/fixed-value.h \
-  $(srcdir)/output.h $(srcdir)/cfgloop.h $(srcdir)/cfg.h \
+  $(srcdir)/output.h $(srcdir)/cfgloop.h $(srcdir)/cfg.h $(srcdir)/profile-count.h \
   $(srcdir)/cselib.h $(srcdir)/basic-block.h  $(srcdir)/ipa-ref.h $(srcdir)/cgraph.h \
   $(srcdir)/reload.h $(srcdir)/caller-save.c $(srcdir)/symtab.c \
   $(srcdir)/alias.c $(srcdir)/bitmap.c $(srcdir)/cselib.c $(srcdir)/cgraph.c \
index 0de10402f10b99991480b4817499c9f5db692b40..973d7af37ff17f433e7bb8f44e14c37a6636b6a7 100644 (file)
@@ -1058,8 +1058,10 @@ afdo_indirect_call (gimple_stmt_iterator *gsi, const icall_target_map &map,
       fprintf (dump_file, "\n");
     }
 
+  /* FIXME: Count should be initialized.  */
   struct cgraph_edge *new_edge
-      = indirect_edge->make_speculative (direct_call, 0, 0);
+      = indirect_edge->make_speculative (direct_call,
+                                        profile_count::uninitialized (), 0);
   new_edge->redirect_call_stmt_to_callee ();
   gimple_remove_histogram_value (cfun, stmt, hist);
   inline_call (new_edge, true, NULL, NULL, false);
@@ -1149,7 +1151,7 @@ afdo_set_bb_count (basic_block bb, const stmt_set &promoted)
   FOR_EACH_EDGE (e, ei, bb->succs)
   afdo_source_profile->mark_annotated (e->goto_locus);
 
-  bb->count = max_count;
+  bb->count = profile_count::from_gcov_type (max_count);
   return true;
 }
 
@@ -1226,7 +1228,7 @@ afdo_propagate_edge (bool is_succ, bb_set *annotated_bb,
     edge e, unknown_edge = NULL;
     edge_iterator ei;
     int num_unknown_edge = 0;
-    gcov_type total_known_count = 0;
+    profile_count total_known_count = profile_count::zero ();
 
     FOR_EACH_EDGE (e, ei, is_succ ? bb->succs : bb->preds)
       if (!is_edge_annotated (e, *annotated_edge))
@@ -1249,10 +1251,7 @@ afdo_propagate_edge (bool is_succ, bb_set *annotated_bb,
       }
     else if (num_unknown_edge == 1 && is_bb_annotated (bb, *annotated_bb))
       {
-        if (bb->count >= total_known_count)
-          unknown_edge->count = bb->count - total_known_count;
-        else
-          unknown_edge->count = 0;
+        unknown_edge->count = bb->count - total_known_count;
         set_edge_annotated (unknown_edge, annotated_edge);
         changed = true;
       }
@@ -1350,7 +1349,7 @@ afdo_propagate_circuit (const bb_set &annotated_bb, edge_set *annotated_edge)
           if (e->probability == 0 && !is_edge_annotated (ep, *annotated_edge))
             {
               ep->probability = 0;
-              ep->count = 0;
+              ep->count = profile_count::zero ();
               set_edge_annotated (ep, annotated_edge);
             }
         }
@@ -1404,7 +1403,7 @@ afdo_calculate_branch_prob (bb_set *annotated_bb, edge_set *annotated_edge)
 
   FOR_EACH_BB_FN (bb, cfun)
   {
-    if (bb->count > 0)
+    if (bb->count > profile_count::zero ())
       {
        has_sample = true;
        break;
@@ -1426,7 +1425,7 @@ afdo_calculate_branch_prob (bb_set *annotated_bb, edge_set *annotated_edge)
     edge e;
     edge_iterator ei;
     int num_unknown_succ = 0;
-    gcov_type total_count = 0;
+    profile_count total_count = profile_count::zero ();
 
     FOR_EACH_EDGE (e, ei, bb->succs)
     {
@@ -1435,10 +1434,10 @@ afdo_calculate_branch_prob (bb_set *annotated_bb, edge_set *annotated_edge)
       else
         total_count += e->count;
     }
-    if (num_unknown_succ == 0 && total_count > 0)
+    if (num_unknown_succ == 0 && total_count > profile_count::zero ())
       {
         FOR_EACH_EDGE (e, ei, bb->succs)
-        e->probability = (double)e->count * REG_BR_PROB_BASE / total_count;
+        e->probability = e->count.probability_in (total_count);
       }
   }
   FOR_ALL_BB_FN (bb, cfun)
@@ -1447,7 +1446,7 @@ afdo_calculate_branch_prob (bb_set *annotated_bb, edge_set *annotated_edge)
     edge_iterator ei;
 
     FOR_EACH_EDGE (e, ei, bb->succs)
-      e->count = (double)bb->count * e->probability / REG_BR_PROB_BASE;
+      e->count = bb->count.apply_probability (e->probability);
     bb->aux = NULL;
   }
 
@@ -1536,18 +1535,20 @@ afdo_annotate_cfg (const stmt_set &promoted_stmts)
 
   if (s == NULL)
     return;
-  cgraph_node::get (current_function_decl)->count = s->head_count ();
-  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = s->head_count ();
-  gcov_type max_count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
+  cgraph_node::get (current_function_decl)->count
+     = profile_count::from_gcov_type (s->head_count ());
+  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count
+     = profile_count::from_gcov_type (s->head_count ());
+  profile_count max_count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
 
   FOR_EACH_BB_FN (bb, cfun)
   {
     edge e;
     edge_iterator ei;
 
-    bb->count = 0;
+    bb->count = profile_count::uninitialized ();
     FOR_EACH_EDGE (e, ei, bb->succs)
-      e->count = 0;
+      e->count = profile_count::uninitialized ();
 
     if (afdo_set_bb_count (bb, promoted_stmts))
       set_bb_annotated (bb, &annotated_bb);
@@ -1572,7 +1573,7 @@ afdo_annotate_cfg (const stmt_set &promoted_stmts)
       DECL_SOURCE_LOCATION (current_function_decl));
   afdo_source_profile->mark_annotated (cfun->function_start_locus);
   afdo_source_profile->mark_annotated (cfun->function_end_locus);
-  if (max_count > 0)
+  if (max_count > profile_count::zero ())
     {
       afdo_calculate_branch_prob (&annotated_bb, &annotated_edge);
       counts_to_freqs ();
@@ -1721,7 +1722,7 @@ afdo_callsite_hot_enough_for_early_inline (struct cgraph_edge *edge)
       /* At early inline stage, profile_info is not set yet. We need to
          temporarily set it to afdo_profile_info to calculate hotness.  */
       profile_info = autofdo::afdo_profile_info;
-      is_hot = maybe_hot_count_p (NULL, count);
+      is_hot = maybe_hot_count_p (NULL, profile_count::from_gcov_type (count));
       profile_info = saved_profile_info;
       return is_hot;
     }
index decd6dd2b054090483353cf1134cfddfbb7751b9..b08a14a151b17c89cce3e4a88657e79858d9da1a 100644 (file)
@@ -20,12 +20,7 @@ along with GCC; see the file COPYING3.  If not see
 #ifndef GCC_BASIC_BLOCK_H
 #define GCC_BASIC_BLOCK_H
 
-
-/* Use gcov_type to hold basic block counters.  Should be at least
-   64bit.  Although a counter cannot be negative, we use a signed
-   type, because erroneous negative counts can be generated when the
-   flow graph is manipulated by various optimizations.  A signed type
-   makes those easy to detect.  */
+#include <profile-count.h>
 
 /* Control flow edge information.  */
 struct GTY((user)) edge_def {
@@ -51,7 +46,7 @@ struct GTY((user)) edge_def {
 
   int flags;                   /* see cfg-flags.def */
   int probability;             /* biased by REG_BR_PROB_BASE */
-  gcov_type count;             /* Expected number of executions calculated
+  profile_count count;         /* Expected number of executions calculated
                                   in profile.c  */
 };
 
@@ -150,7 +145,7 @@ struct GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb"))) basic_block_d
   int index;
 
   /* Expected number of executions: calculated in profile.c.  */
-  gcov_type count;
+  profile_count count;
 
   /* Expected frequency.  Normalized to be in range 0 to BB_FREQ_MAX.  */
   int frequency;
@@ -278,9 +273,6 @@ enum cfg_bb_flags
 /* The two blocks that are always in the cfg.  */
 #define NUM_FIXED_BLOCKS (2)
 
-/* The base value for branch probability notes and edge probabilities.  */
-#define REG_BR_PROB_BASE  10000
-
 /* This is the value which indicates no edge is present.  */
 #define EDGE_INDEX_NO_EDGE     -1
 
@@ -307,7 +299,6 @@ enum cfg_bb_flags
 #define BRANCH_EDGE(bb)                        (EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
                                         ? EDGE_SUCC ((bb), 1) : EDGE_SUCC ((bb), 0))
 
-#define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
 /* Return expected execution frequency of the edge E.  */
 #define EDGE_FREQUENCY(e)              RDIV ((e)->src->frequency * (e)->probability, \
                                              REG_BR_PROB_BASE)
index a8d42564c4b59c6b27ff1b2da54a1ad630dc6064..c0386f4b37a504822a4f97afe715faa00ef1c02b 100644 (file)
@@ -196,7 +196,7 @@ struct trace
 
 /* Maximum frequency and count of one of the entry blocks.  */
 static int max_entry_frequency;
-static gcov_type max_entry_count;
+static profile_count max_entry_count;
 
 /* Local function prototypes.  */
 static void find_traces (int *, struct trace *);
@@ -286,14 +286,14 @@ find_traces (int *n_traces, struct trace *traces)
 
   /* Insert entry points of function into heap.  */
   max_entry_frequency = 0;
-  max_entry_count = 0;
+  max_entry_count = profile_count::zero ();
   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
     {
       bbd[e->dest->index].heap = heap;
       bbd[e->dest->index].node = heap->insert (bb_to_key (e->dest), e->dest);
       if (e->dest->frequency > max_entry_frequency)
        max_entry_frequency = e->dest->frequency;
-      if (e->dest->count > max_entry_count)
+      if (e->dest->count.initialized_p () && e->dest->count > max_entry_count)
        max_entry_count = e->dest->count;
     }
 
@@ -306,9 +306,9 @@ find_traces (int *n_traces, struct trace *traces)
        fprintf (dump_file, "STC - round %d\n", i + 1);
 
       if (max_entry_count < INT_MAX / 1000)
-       count_threshold = max_entry_count * exec_threshold[i] / 1000;
+       count_threshold = max_entry_count.to_gcov_type () * exec_threshold[i] / 1000;
       else
-       count_threshold = max_entry_count / 1000 * exec_threshold[i];
+       count_threshold = max_entry_count.to_gcov_type () / 1000 * exec_threshold[i];
 
       find_traces_1_round (REG_BR_PROB_BASE * branch_threshold[i] / 1000,
                           max_entry_frequency * exec_threshold[i] / 1000,
@@ -346,7 +346,7 @@ rotate_loop (edge back_edge, struct trace *trace, int trace_n)
   basic_block best_bb = NULL;
   edge best_edge = NULL;
   int best_freq = -1;
-  gcov_type best_count = -1;
+  profile_count best_count = profile_count::uninitialized ();
   /* The best edge is preferred when its destination is not visited yet
      or is a start block of some trace.  */
   bool is_preferred = false;
@@ -375,7 +375,8 @@ rotate_loop (edge back_edge, struct trace *trace, int trace_n)
                  if (freq > best_freq || e->count > best_count)
                    {
                      best_freq = freq;
-                     best_count = e->count;
+                     if (e->count.initialized_p ())
+                       best_count = e->count;
                      best_edge = e;
                      best_bb = bb;
                    }
@@ -1068,10 +1069,10 @@ connect_traces (int n_traces, struct trace *traces)
   bool for_size = optimize_function_for_size_p (cfun);
 
   freq_threshold = max_entry_frequency * DUPLICATION_THRESHOLD / 1000;
-  if (max_entry_count < INT_MAX / 1000)
-    count_threshold = max_entry_count * DUPLICATION_THRESHOLD / 1000;
+  if (max_entry_count.to_gcov_type () < INT_MAX / 1000)
+    count_threshold = max_entry_count.to_gcov_type () * DUPLICATION_THRESHOLD / 1000;
   else
-    count_threshold = max_entry_count / 1000 * DUPLICATION_THRESHOLD;
+    count_threshold = max_entry_count.to_gcov_type () / 1000 * DUPLICATION_THRESHOLD;
 
   connected = XCNEWVEC (bool, n_traces);
   last_trace = -1;
@@ -1495,7 +1496,7 @@ sanitize_hot_paths (bool walk_up, unsigned int cold_bb_count,
       edge_iterator ei;
       int highest_probability = 0;
       int highest_freq = 0;
-      gcov_type highest_count = 0;
+      profile_count highest_count = profile_count::uninitialized ();
       bool found = false;
 
       /* Walk the preds/succs and check if there is at least one already
@@ -1540,7 +1541,7 @@ sanitize_hot_paths (bool walk_up, unsigned int cold_bb_count,
           /* Select the hottest edge using the edge count, if it is non-zero,
              then fallback to the edge frequency and finally the edge
              probability.  */
-          if (highest_count)
+          if (highest_count > 0)
             {
               if (e->count < highest_count)
                 continue;
index 32924e2ecc5d5fc3008ffeeabbc85d9512e6505e..1da0ad62f1e5f24184709abe46502ec2f7d9ac5a 100644 (file)
@@ -1391,10 +1391,10 @@ migrate_btr_defs (enum reg_class btr_class, int allow_callee_save)
       for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
        {
          basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
-         fprintf (dump_file,
-                  "Basic block %d: count = %" PRId64
-                  " loop-depth = %d idom = %d\n",
-                  i, (int64_t) bb->count, bb_loop_depth (bb),
+         fprintf (dump_file, "Basic block %d: count = ", i);
+         bb->count.dump (dump_file);
+         fprintf (dump_file, " loop-depth = %d idom = %d\n",
+                  bb_loop_depth (bb),
                   get_immediate_dominator (CDI_DOMINATORS, bb)->index);
        }
     }
index aef053f5d22cd522fc6e068b481c247e93c1c937..3cbe684b743a53a4d047aef4decb1e88a797dff9 100644 (file)
--- a/gcc/cfg.c
+++ b/gcc/cfg.c
@@ -59,7 +59,6 @@ along with GCC; see the file COPYING3.  If not see
 #include "dumpfile.h"
 
 \f
-#define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
 
 /* Called once at initialization time.  */
 
@@ -70,10 +69,10 @@ init_flow (struct function *the_fun)
     the_fun->cfg = ggc_cleared_alloc<control_flow_graph> ();
   n_edges_for_fn (the_fun) = 0;
   ENTRY_BLOCK_PTR_FOR_FN (the_fun)
-    = ggc_cleared_alloc<basic_block_def> ();
+    = alloc_block ();
   ENTRY_BLOCK_PTR_FOR_FN (the_fun)->index = ENTRY_BLOCK;
   EXIT_BLOCK_PTR_FOR_FN (the_fun)
-    = ggc_cleared_alloc<basic_block_def> ();
+    = alloc_block ();
   EXIT_BLOCK_PTR_FOR_FN (the_fun)->index = EXIT_BLOCK;
   ENTRY_BLOCK_PTR_FOR_FN (the_fun)->next_bb
     = EXIT_BLOCK_PTR_FOR_FN (the_fun);
@@ -123,6 +122,7 @@ alloc_block (void)
 {
   basic_block bb;
   bb = ggc_cleared_alloc<basic_block_def> ();
+  bb->count = profile_count::uninitialized ();
   return bb;
 }
 
@@ -263,6 +263,7 @@ unchecked_make_edge (basic_block src, basic_block dst, int flags)
   e = ggc_cleared_alloc<edge_def> ();
   n_edges_for_fn (cfun)++;
 
+  e->count = profile_count::uninitialized ();
   e->src = src;
   e->dest = dst;
   e->flags = flags;
@@ -400,7 +401,6 @@ check_bb_profile (basic_block bb, FILE * file, int indent)
 {
   edge e;
   int sum = 0;
-  gcov_type lsum;
   edge_iterator ei;
   struct function *fun = DECL_STRUCT_FUNCTION (current_function_decl);
   char *s_indent = (char *) alloca ((size_t) indent + 1);
@@ -428,14 +428,18 @@ check_bb_profile (basic_block bb, FILE * file, int indent)
            fprintf (file,
                     ";; %sInvalid sum of outgoing probabilities %.1f%%\n",
                     s_indent, sum * 100.0 / REG_BR_PROB_BASE);
-         lsum = 0;
+         profile_count lsum = profile_count::zero ();
          FOR_EACH_EDGE (e, ei, bb->succs)
            lsum += e->count;
-         if (EDGE_COUNT (bb->succs)
-             && (lsum - bb->count > 100 || lsum - bb->count < -100))
-           fprintf (file,
-                    ";; %sInvalid sum of outgoing counts %i, should be %i\n",
-                    s_indent, (int) lsum, (int) bb->count);
+         if (EDGE_COUNT (bb->succs) && lsum.differs_from_p (bb->count))
+           {
+             fprintf (file, ";; %sInvalid sum of outgoing counts ",
+                      s_indent);
+             lsum.dump (file);
+             fprintf (file, ", should be ");
+             bb->count.dump (file);
+             fprintf (file, "\n");
+           }
        }
     }
     if (bb != ENTRY_BLOCK_PTR_FOR_FN (fun))
@@ -447,12 +451,18 @@ check_bb_profile (basic_block bb, FILE * file, int indent)
        fprintf (file,
                 ";; %sInvalid sum of incoming frequencies %i, should be %i\n",
                 s_indent, sum, bb->frequency);
-      lsum = 0;
+      profile_count lsum = profile_count::zero ();
       FOR_EACH_EDGE (e, ei, bb->preds)
        lsum += e->count;
-      if (lsum - bb->count > 100 || lsum - bb->count < -100)
-       fprintf (file, ";; %sInvalid sum of incoming counts %i, should be %i\n",
-                s_indent, (int) lsum, (int) bb->count);
+      if (lsum.differs_from_p (bb->count))
+       {
+         fprintf (file, ";; %sInvalid sum of incoming counts ",
+                  s_indent);
+         lsum.dump (file);
+         fprintf (file, ", should be ");
+         bb->count.dump (file);
+         fprintf (file, "\n");
+       }
     }
   if (BB_PARTITION (bb) == BB_COLD_PARTITION)
     {
@@ -491,10 +501,10 @@ dump_edge_info (FILE *file, edge e, dump_flags_t flags, int do_succ)
   if (e->probability && do_details)
     fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
 
-  if (e->count && do_details)
+  if (e->count.initialized_p () && do_details)
     {
       fputs (" count:", file);
-      fprintf (file, "%" PRId64, e->count);
+      e->count.dump (file);
     }
 
   if (e->flags && do_details)
@@ -741,8 +751,11 @@ dump_bb_info (FILE *outf, basic_block bb, int indent, dump_flags_t flags,
       if (flags & TDF_DETAILS)
        {
          struct function *fun = DECL_STRUCT_FUNCTION (current_function_decl);
-         fprintf (outf, ", count " "%" PRId64,
-                  (int64_t) bb->count);
+         if (bb->count.initialized_p ())
+           {
+             fputs (", count ", outf);
+             bb->count.dump (outf);
+           }
          fprintf (outf, ", freq %i", bb->frequency);
          if (maybe_hot_bb_p (fun, bb))
            fputs (", maybe hot", outf);
@@ -844,20 +857,19 @@ brief_dump_cfg (FILE *file, dump_flags_t flags)
    respectively.  */
 void
 update_bb_profile_for_threading (basic_block bb, int edge_frequency,
-                                gcov_type count, edge taken_edge)
+                                profile_count count, edge taken_edge)
 {
   edge c;
   int prob;
   edge_iterator ei;
 
-  bb->count -= count;
-  if (bb->count < 0)
+  if (bb->count < count)
     {
       if (dump_file)
        fprintf (dump_file, "bb %i count became negative after threading",
                 bb->index);
-      bb->count = 0;
     }
+  bb->count -= count;
 
   bb->frequency -= edge_frequency;
   if (bb->frequency < 0)
@@ -913,14 +925,13 @@ update_bb_profile_for_threading (basic_block bb, int edge_frequency,
     }
 
   gcc_assert (bb == taken_edge->src);
-  taken_edge->count -= count;
-  if (taken_edge->count < 0)
+  if (taken_edge->count < count)
     {
       if (dump_file)
        fprintf (dump_file, "edge %i->%i count became negative after threading",
                 taken_edge->src->index, taken_edge->dest->index);
-      taken_edge->count = 0;
     }
+  taken_edge->count -= count;
 }
 
 /* Multiply all frequencies of basic blocks in array BBS of length NBBS
@@ -954,9 +965,9 @@ scale_bbs_frequencies_int (basic_block *bbs, int nbbs, int num, int den)
       /* Make sure the frequencies do not grow over BB_FREQ_MAX.  */
       if (bbs[i]->frequency > BB_FREQ_MAX)
        bbs[i]->frequency = BB_FREQ_MAX;
-      bbs[i]->count = RDIV (bbs[i]->count * num, den);
+      bbs[i]->count = bbs[i]->count.apply_scale (num, den);
       FOR_EACH_EDGE (e, ei, bbs[i]->succs)
-       e->count = RDIV (e->count * num, den);
+       e->count = e->count.apply_scale (num, den);
     }
 }
 
@@ -983,14 +994,14 @@ scale_bbs_frequencies_gcov_type (basic_block *bbs, int nbbs, gcov_type num,
        edge_iterator ei;
        bbs[i]->frequency = RDIV (bbs[i]->frequency * num, den);
        if (bbs[i]->count <= MAX_SAFE_MULTIPLIER)
-         bbs[i]->count = RDIV (bbs[i]->count * num, den);
+         bbs[i]->count = bbs[i]->count.apply_scale (num, den);
        else
-         bbs[i]->count = RDIV (bbs[i]->count * fraction, 65536);
+         bbs[i]->count = bbs[i]->count.apply_scale (fraction, 65536);
        FOR_EACH_EDGE (e, ei, bbs[i]->succs)
          if (bbs[i]->count <= MAX_SAFE_MULTIPLIER)
-           e->count = RDIV (e->count * num, den);
+           e->count =  e->count.apply_scale (num, den);
          else
-           e->count = RDIV (e->count * fraction, 65536);
+           e->count = e->count.apply_scale (fraction, 65536);
       }
    else
     for (i = 0; i < nbbs; i++)
@@ -1000,12 +1011,33 @@ scale_bbs_frequencies_gcov_type (basic_block *bbs, int nbbs, gcov_type num,
          bbs[i]->frequency = RDIV (bbs[i]->frequency * num, den);
        else
          bbs[i]->frequency = RDIV (bbs[i]->frequency * fraction, 65536);
-       bbs[i]->count = RDIV (bbs[i]->count * fraction, 65536);
+       bbs[i]->count = bbs[i]->count.apply_scale (fraction, 65536);
        FOR_EACH_EDGE (e, ei, bbs[i]->succs)
-         e->count = RDIV (e->count * fraction, 65536);
+         e->count = e->count.apply_scale (fraction, 65536);
       }
 }
 
+/* Multiply all frequencies of basic blocks in array BBS of length NBBS
+   by NUM/DEN, in profile_count arithmetic.  More accurate than previous
+   function but considerably slower.  */
+void
+scale_bbs_frequencies_profile_count (basic_block *bbs, int nbbs,
+                                    profile_count num, profile_count den)
+{
+  int i;
+  edge e;
+
+  for (i = 0; i < nbbs; i++)
+    {
+      edge_iterator ei;
+      bbs[i]->frequency = RDIV (bbs[i]->frequency * num.to_gcov_type (),
+                               den.to_gcov_type ());
+      bbs[i]->count = bbs[i]->count.apply_scale (num, den);
+      FOR_EACH_EDGE (e, ei, bbs[i]->succs)
+       e->count =  e->count.apply_scale (num, den);
+    }
+}
+
 /* Helper types for hash tables.  */
 
 struct htab_bb_copy_original_entry
index f71c00e6df839712c349ec862e177f8ccd3fdab2..365a580a0db822dc4acf8518ca23ebfeeab2d434 100644 (file)
--- a/gcc/cfg.h
+++ b/gcc/cfg.h
@@ -103,10 +103,12 @@ extern void debug_bb (basic_block);
 extern basic_block debug_bb_n (int);
 extern void dump_bb_info (FILE *, basic_block, int, dump_flags_t, bool, bool);
 extern void brief_dump_cfg (FILE *, dump_flags_t);
-extern void update_bb_profile_for_threading (basic_block, int, gcov_type, edge);
+extern void update_bb_profile_for_threading (basic_block, int, profile_count, edge);
 extern void scale_bbs_frequencies_int (basic_block *, int, int, int);
 extern void scale_bbs_frequencies_gcov_type (basic_block *, int, gcov_type,
                                             gcov_type);
+extern void scale_bbs_frequencies_profile_count (basic_block *, int,
+                                            profile_count, profile_count);
 extern void initialize_original_copy_tables (void);
 extern void reset_original_copy_tables (void);
 extern void free_original_copy_tables (void);
index 69ca1282c37f264a9d4ff78cc3cf1b8d6e625c9c..a4004f87e939e33c5999dee9c7a0fcb05a0479b2 100644 (file)
@@ -542,7 +542,7 @@ compute_outgoing_frequencies (basic_block b)
          probability = XINT (note, 0);
          e = BRANCH_EDGE (b);
          e->probability = probability;
-         e->count = apply_probability (b->count, probability);
+         e->count = b->count.apply_probability (probability);
          f = FALLTHRU_EDGE (b);
          f->probability = REG_BR_PROB_BASE - probability;
          f->count = b->count - e->count;
@@ -577,9 +577,9 @@ compute_outgoing_frequencies (basic_block b)
         guess_outgoing_edge_probabilities (b);
     }
 
-  if (b->count)
+  if (b->count > profile_count::zero ())
     FOR_EACH_EDGE (e, ei, b->succs)
-      e->count = apply_probability (b->count, e->probability);
+      e->count = b->count.apply_probability (e->probability);
 }
 
 /* Assume that some pass has inserted labels or control flow
@@ -624,7 +624,7 @@ find_many_sub_basic_blocks (sbitmap blocks)
          continue;
        if (STATE (bb) == BLOCK_NEW)
          {
-           bb->count = 0;
+           bb->count = profile_count::zero ();
            bb->frequency = 0;
            FOR_EACH_EDGE (e, ei, bb->preds)
              {
index 3e1406c114112bf938153428773209d388539bb5..1201148fa88d728019e0768424d878365b26d19b 100644 (file)
@@ -558,7 +558,7 @@ try_forward_edges (int mode, basic_block b)
       else
        {
          /* Save the values now, as the edge may get removed.  */
-         gcov_type edge_count = e->count;
+         profile_count edge_count = e->count;
          int edge_probability = e->probability;
          int edge_frequency;
          int n = 0;
@@ -603,8 +603,6 @@ try_forward_edges (int mode, basic_block b)
              else
                {
                  first->count -= edge_count;
-                 if (first->count < 0)
-                   first->count = 0;
                  first->frequency -= edge_frequency;
                  if (first->frequency < 0)
                    first->frequency = 0;
@@ -619,8 +617,6 @@ try_forward_edges (int mode, basic_block b)
                }
 
              t->count -= edge_count;
-             if (t->count < 0)
-               t->count = 0;
              first = t->dest;
            }
          while (first != target);
@@ -2146,14 +2142,10 @@ try_crossjump_to_edge (int mode, edge e1, edge e2,
       if (FORWARDER_BLOCK_P (s2->dest))
        {
          single_succ_edge (s2->dest)->count -= s2->count;
-         if (single_succ_edge (s2->dest)->count < 0)
-           single_succ_edge (s2->dest)->count = 0;
          s2->dest->count -= s2->count;
          s2->dest->frequency -= EDGE_FREQUENCY (s);
          if (s2->dest->frequency < 0)
            s2->dest->frequency = 0;
-         if (s2->dest->count < 0)
-           s2->dest->count = 0;
        }
 
       if (!redirect_edges_to->frequency && !src1->frequency)
index 698d43b60b45085a97d6db1adbdc1850f50db54d..3261fce8843c62f80efaf4b0529bdd77ea3e40be 100644 (file)
@@ -3783,7 +3783,6 @@ expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
   edge e;
   edge_iterator ei;
   int probability;
-  gcov_type count;
 
   last2 = last = expand_gimple_stmt (stmt);
 
@@ -3809,7 +3808,7 @@ expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
      the exit block.  */
 
   probability = 0;
-  count = 0;
+  profile_count count = profile_count::zero ();
 
   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
     {
@@ -3819,8 +3818,6 @@ expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
            {
              e->dest->count -= e->count;
              e->dest->frequency -= EDGE_FREQUENCY (e);
-             if (e->dest->count < 0)
-               e->dest->count = 0;
              if (e->dest->frequency < 0)
                e->dest->frequency = 0;
            }
@@ -5931,10 +5928,6 @@ construct_exit_block (void)
        exit_block->count -= e2->count;
        exit_block->frequency -= EDGE_FREQUENCY (e2);
       }
-  if (e->count < 0)
-    e->count = 0;
-  if (exit_block->count < 0)
-    exit_block->count = 0;
   if (exit_block->frequency < 0)
     exit_block->frequency = 0;
   update_bb_for_insn (exit_block);
index 1644c9ff31c415d15db89310d784ee28f3d3b5c5..1b3f2695b3905780c623bc60963dd394d7c9a91f 100644 (file)
@@ -141,10 +141,9 @@ verify_flow_info (void)
          err = 1;
        }
 
-      if (bb->count < 0)
+      if (!bb->count.verify ())
        {
-         error ("verify_flow_info: Wrong count of block %i %i",
-                bb->index, (int)bb->count);
+         error ("verify_flow_info: Wrong count of block %i", bb->index);
          err = 1;
        }
       if (bb->frequency < 0)
@@ -167,10 +166,10 @@ verify_flow_info (void)
                     e->src->index, e->dest->index, e->probability);
              err = 1;
            }
-         if (e->count < 0)
+         if (!e->count.verify ())
            {
-             error ("verify_flow_info: Wrong count of edge %i->%i %i",
-                    e->src->index, e->dest->index, (int)e->count);
+             error ("verify_flow_info: Wrong count of edge %i->%i",
+                    e->src->index, e->dest->index);
              err = 1;
            }
 
@@ -309,8 +308,9 @@ dump_bb_for_graph (pretty_printer *pp, basic_block bb)
   if (!cfg_hooks->dump_bb_for_graph)
     internal_error ("%s does not support dump_bb_for_graph",
                    cfg_hooks->name);
-  if (bb->count)
-    pp_printf (pp, "COUNT:" "%" PRId64, bb->count);
+  /* TODO: Add pretty printer for counter.  */
+  if (bb->count.initialized_p ())
+    pp_printf (pp, "COUNT:" "%" PRId64, bb->count.to_gcov_type ());
   pp_printf (pp, " FREQ:%i |", bb->frequency);
   pp_write_text_to_stream (pp);
   if (!(dump_flags & TDF_SLIM))
@@ -624,7 +624,7 @@ basic_block
 split_edge (edge e)
 {
   basic_block ret;
-  gcov_type count = e->count;
+  profile_count count = e->count;
   int freq = EDGE_FREQUENCY (e);
   edge f;
   bool irr = (e->flags & EDGE_IRREDUCIBLE_LOOP) != 0;
@@ -868,9 +868,9 @@ make_forwarder_block (basic_block bb, bool (*redirect_edge_p) (edge),
 
   fallthru = split_block_after_labels (bb);
   dummy = fallthru->src;
-  dummy->count = 0;
+  dummy->count = profile_count::zero ();
   dummy->frequency = 0;
-  fallthru->count = 0;
+  fallthru->count = profile_count::zero ();
   bb = fallthru->dest;
 
   /* Redirect back edges we want to keep.  */
@@ -1071,7 +1071,7 @@ duplicate_block (basic_block bb, edge e, basic_block after)
 {
   edge s, n;
   basic_block new_bb;
-  gcov_type new_count = e ? e->count : 0;
+  profile_count new_count = e ? e->count : profile_count::uninitialized ();
   edge_iterator ei;
 
   if (!cfg_hooks->duplicate_block)
@@ -1095,10 +1095,9 @@ duplicate_block (basic_block bb, edge e, basic_block after)
         is no need to actually check for duplicated edges.  */
       n = unchecked_make_edge (new_bb, s->dest, s->flags);
       n->probability = s->probability;
-      if (e && bb->count)
+      if (e && bb->count > profile_count::zero ())
        {
-         /* Take care for overflows!  */
-         n->count = s->count * (new_count * 10000 / bb->count) / 10000;
+         n->count = s->count.apply_scale (new_count, bb->count);
          s->count -= n->count;
        }
       else
@@ -1116,8 +1115,6 @@ duplicate_block (basic_block bb, edge e, basic_block after)
 
       redirect_edge_and_branch_force (e, new_bb);
 
-      if (bb->count < 0)
-       bb->count = 0;
       if (bb->frequency < 0)
        bb->frequency = 0;
     }
@@ -1448,7 +1445,6 @@ account_profile_record (struct profile_record *record, int after_pass)
   edge_iterator ei;
   edge e;
   int sum;
-  gcov_type lsum;
 
   FOR_ALL_BB_FN (bb, cfun)
    {
@@ -1460,11 +1456,10 @@ account_profile_record (struct profile_record *record, int after_pass)
            sum += e->probability;
          if (EDGE_COUNT (bb->succs) && abs (sum - REG_BR_PROB_BASE) > 100)
            record->num_mismatched_freq_out[after_pass]++;
-         lsum = 0;
+         profile_count lsum = profile_count::zero ();
          FOR_EACH_EDGE (e, ei, bb->succs)
            lsum += e->count;
-         if (EDGE_COUNT (bb->succs)
-             && (lsum - bb->count > 100 || lsum - bb->count < -100))
+         if (EDGE_COUNT (bb->succs) && (lsum.differs_from_p (bb->count)))
            record->num_mismatched_count_out[after_pass]++;
        }
       if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
@@ -1477,10 +1472,10 @@ account_profile_record (struct profile_record *record, int after_pass)
              || (MAX (sum, bb->frequency) > 10
                  && abs ((sum - bb->frequency) * 100 / (MAX (sum, bb->frequency) + 1)) > 10))
            record->num_mismatched_freq_in[after_pass]++;
-         lsum = 0;
+         profile_count lsum = profile_count::zero ();
          FOR_EACH_EDGE (e, ei, bb->preds)
            lsum += e->count;
-         if (lsum - bb->count > 100 || lsum - bb->count < -100)
+         if (lsum.differs_from_p (bb->count))
            record->num_mismatched_count_in[after_pass]++;
        }
       if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
index 654d188e8b5328af22da3f2337151430765643c2..a1e778b85865569f813b28be414bca24ff13cf1c 100644 (file)
@@ -543,7 +543,7 @@ find_subloop_latch_edge_by_profile (vec<edge> latches)
 {
   unsigned i;
   edge e, me = NULL;
-  gcov_type mcount = 0, tcount = 0;
+  profile_count mcount = profile_count::zero (), tcount = profile_count::zero ();
 
   FOR_EACH_VEC_ELT (latches, i, e)
     {
@@ -555,8 +555,8 @@ find_subloop_latch_edge_by_profile (vec<edge> latches)
       tcount += e->count;
     }
 
-  if (tcount < HEAVY_EDGE_MIN_SAMPLES
-      || (tcount - mcount) * HEAVY_EDGE_RATIO > tcount)
+  if (!tcount.initialized_p () || tcount < HEAVY_EDGE_MIN_SAMPLES
+      || (tcount - mcount).apply_scale (HEAVY_EDGE_RATIO, 1) > tcount)
     return NULL;
 
   if (dump_file)
@@ -1899,7 +1899,7 @@ get_estimated_loop_iterations (struct loop *loop, widest_int *nit)
      profile.  */
   if (!loop->any_estimate)
     {
-      if (loop->header->count)
+      if (loop->header->count.reliable_p ())
        {
           *nit = gcov_type_to_wide_int
                   (expected_loop_iterations_unbounded (loop) + 1);
index aa06c966002c2d91f16f2e6e94ef7d07c1a9bb0f..a9537637115c280cb686b3b986bf3b3a5d08e8b0 100644 (file)
@@ -237,7 +237,7 @@ expected_loop_iterations_unbounded (const struct loop *loop,
 {
   edge e;
   edge_iterator ei;
-  gcov_type expected;
+  gcov_type expected = -1;
   
   if (read_profile_p)
     *read_profile_p = false;
@@ -245,12 +245,11 @@ expected_loop_iterations_unbounded (const struct loop *loop,
   /* If we have no profile at all, use AVG_LOOP_NITER.  */
   if (profile_status_for_fn (cfun) == PROFILE_ABSENT)
     expected = PARAM_VALUE (PARAM_AVG_LOOP_NITER);
-  else if (loop->latch && (loop->latch->count || loop->header->count))
+  else if (loop->latch && (loop->latch->count.reliable_p ()
+                          || loop->header->count.reliable_p ()))
     {
-      gcov_type count_in, count_latch;
-
-      count_in = 0;
-      count_latch = 0;
+      profile_count count_in = profile_count::zero (),
+                   count_latch = profile_count::zero ();
 
       FOR_EACH_EDGE (e, ei, loop->header->preds)
        if (e->src == loop->latch)
@@ -258,16 +257,19 @@ expected_loop_iterations_unbounded (const struct loop *loop,
        else
          count_in += e->count;
 
-      if (count_in == 0)
-       expected = count_latch * 2;
+      if (!count_latch.initialized_p ())
+       ;
+      else if (!(count_in > profile_count::zero ()))
+       expected = count_latch.to_gcov_type () * 2;
       else
        {
-         expected = (count_latch + count_in - 1) / count_in;
+         expected = (count_latch.to_gcov_type () + count_in.to_gcov_type ()
+                     - 1) / count_in.to_gcov_type ();
          if (read_profile_p)
            *read_profile_p = true;
        }
     }
-  else
+  if (expected == -1)
     {
       int freq_in, freq_latch;
 
@@ -472,9 +474,11 @@ single_likely_exit (struct loop *loop)
       /* The constant of 5 is set in a way so noreturn calls are
         ruled out by this test.  The static branch prediction algorithm
          will not assign such a low probability to conditionals for usual
-         reasons.  */
-      if (profile_status_for_fn (cfun) != PROFILE_ABSENT
-         && ex->probability < 5 && !ex->count)
+         reasons.
+        FIXME: Turn to likely_never_executed  */
+      if ((profile_status_for_fn (cfun) != PROFILE_ABSENT
+          && ex->probability < 5)
+         || ex->count == profile_count::zero ())
        continue;
       if (!found)
        found = ex;
index 3e34aadd6911f781e558088b188f04074cccaf95..d764ab9a8c7d83e9857b32508b199b2a0ec1958a 100644 (file)
@@ -533,7 +533,7 @@ scale_loop_profile (struct loop *loop, int scale, gcov_type iteration_bound)
        {
          edge other_e;
          int freq_delta;
-         gcov_type count_delta;
+         profile_count count_delta;
 
           FOR_EACH_EDGE (other_e, ei, e->src->succs)
            if (!(other_e->flags & (EDGE_ABNORMAL | EDGE_FAKE))
@@ -548,8 +548,8 @@ scale_loop_profile (struct loop *loop, int scale, gcov_type iteration_bound)
 
          /* Adjust counts accordingly.  */
          count_delta = e->count;
-         e->count = apply_probability (e->src->count, e->probability);
-         other_e->count = apply_probability (e->src->count, other_e->probability);
+         e->count = e->src->count.apply_probability (e->probability);
+         other_e->count = e->src->count.apply_probability (other_e->probability);
          count_delta -= e->count;
 
          /* If latch exists, change its frequency and count, since we changed
@@ -562,8 +562,6 @@ scale_loop_profile (struct loop *loop, int scale, gcov_type iteration_bound)
              if (loop->latch->frequency < 0)
                loop->latch->frequency = 0;
              loop->latch->count += count_delta;
-             if (loop->latch->count < 0)
-               loop->latch->count = 0;
            }
        }
 
@@ -571,19 +569,25 @@ scale_loop_profile (struct loop *loop, int scale, gcov_type iteration_bound)
         difference of loop iterations.  We however can do better if
         we look at the actual profile, if it is available.  */
       scale = RDIV (iteration_bound * scale, iterations);
-      if (loop->header->count)
+
+      bool determined = false;
+      if (loop->header->count.initialized_p ())
        {
-         gcov_type count_in = 0;
+         profile_count count_in = profile_count::zero ();
 
          FOR_EACH_EDGE (e, ei, loop->header->preds)
            if (e->src != loop->latch)
              count_in += e->count;
 
-         if (count_in != 0)
-           scale = GCOV_COMPUTE_SCALE (count_in * iteration_bound,
-                                        loop->header->count);
+         if (count_in > profile_count::zero () )
+           {
+             scale = GCOV_COMPUTE_SCALE (count_in.to_gcov_type ()
+                                         * iteration_bound,
+                                          loop->header->count.to_gcov_type ());
+             determined = true;
+           }
        }
-      else if (loop->header->frequency)
+      if (!determined)
        {
          int freq_in = 0;
 
@@ -864,7 +868,7 @@ loopify (edge latch_edge, edge header_edge,
   struct loop *loop = alloc_loop ();
   struct loop *outer = loop_outer (succ_bb->loop_father);
   int freq;
-  gcov_type cnt;
+  profile_count cnt;
   edge e;
   edge_iterator ei;
 
@@ -907,7 +911,7 @@ loopify (edge latch_edge, edge header_edge,
       switch_bb->count = cnt;
       FOR_EACH_EDGE (e, ei, switch_bb->succs)
        {
-         e->count = apply_probability (switch_bb->count, e->probability);
+         e->count = switch_bb->count.apply_probability (e->probability);
        }
     }
   scale_loop_frequencies (loop, false_scale, REG_BR_PROB_BASE);
@@ -1107,11 +1111,11 @@ set_zero_probability (edge e)
   edge_iterator ei;
   edge ae, last = NULL;
   unsigned n = EDGE_COUNT (bb->succs);
-  gcov_type cnt = e->count, cnt1;
+  profile_count cnt = e->count, cnt1;
   unsigned prob = e->probability, prob1;
 
   gcc_assert (n > 1);
-  cnt1 = cnt / (n - 1);
+  cnt1 = cnt.apply_scale (1, (n - 1));
   prob1 = prob / (n - 1);
 
   FOR_EACH_EDGE (ae, ei, bb->succs)
@@ -1126,10 +1130,12 @@ set_zero_probability (edge e)
 
   /* Move the rest to one of the edges.  */
   last->probability += prob % (n - 1);
-  last->count += cnt % (n - 1);
+  /* TODO: Remove once we have fractional counts.  */
+  if (cnt.initialized_p ())
+    last->count += profile_count::from_gcov_type (cnt.to_gcov_type () % (n - 1));
 
   e->probability = 0;
-  e->count = 0;
+  e->count = profile_count::zero ();
 }
 
 /* Duplicates body of LOOP to given edge E NDUPL times.  Takes care of updating
@@ -1672,8 +1678,8 @@ lv_adjust_loop_entry_edge (basic_block first_head, basic_block second_head,
                  current_ir_type () == IR_GIMPLE ? EDGE_TRUE_VALUE : 0);
   e1->probability = then_prob;
   e->probability = else_prob;
-  e1->count = apply_probability (e->count, e1->probability);
-  e->count = apply_probability (e->count, e->probability);
+  e1->count = e->count.apply_probability (e1->probability);
+  e->count = e->count.apply_probability (e->probability);
 
   set_immediate_dominator (CDI_DOMINATORS, first_head, new_head);
   set_immediate_dominator (CDI_DOMINATORS, second_head, new_head);
index fffc4b070cd951a35815ccc4d3df65cd513c1a8f..4e303a61d787369628190bba70bb36c6eb3afd81 100644 (file)
@@ -1505,14 +1505,11 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
          int prob = XINT (note, 0);
 
          b->probability = prob;
-          /* Update this to use GCOV_COMPUTE_SCALE.  */
-         b->count = e->count * prob / REG_BR_PROB_BASE;
+         b->count = e->count.apply_probability (prob);
          e->probability -= e->probability;
          e->count -= b->count;
          if (e->probability < 0)
            e->probability = 0;
-         if (e->count < 0)
-           e->count = 0;
        }
     }
 
@@ -1620,7 +1617,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
   if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
     {
       rtx_insn *new_head;
-      gcov_type count = e->count;
+      profile_count count = e->count;
       int probability = e->probability;
       /* Create the new structures.  */
 
@@ -1660,13 +1657,13 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
       if (asm_goto_edge)
        {
          new_edge->probability /= 2;
-         new_edge->count /= 2;
-         jump_block->count /= 2;
+         new_edge->count = new_edge->count.apply_scale (1, 2);
+         jump_block->count = jump_block->count.apply_scale (1, 2);
          jump_block->frequency /= 2;
-         new_edge = make_edge (new_edge->src, target,
-                               e->flags & ~EDGE_FALLTHRU);
-         new_edge->probability = probability - probability / 2;
-         new_edge->count = count - count / 2;
+         edge new_edge2 = make_edge (new_edge->src, target,
+                                     e->flags & ~EDGE_FALLTHRU);
+         new_edge2->probability = probability - new_edge->probability;
+         new_edge2->count = count - new_edge->count;
        }
 
       new_bb = jump_block;
@@ -3159,9 +3156,8 @@ purge_dead_edges (basic_block bb)
          f = FALLTHRU_EDGE (bb);
          b->probability = XINT (note, 0);
          f->probability = REG_BR_PROB_BASE - b->probability;
-          /* Update these to use GCOV_COMPUTE_SCALE.  */
-         b->count = bb->count * b->probability / REG_BR_PROB_BASE;
-         f->count = bb->count * f->probability / REG_BR_PROB_BASE;
+         b->count = bb->count.apply_probability (b->probability);
+         f->count = bb->count.apply_probability (f->probability);
        }
 
       return purged;
@@ -5030,9 +5026,9 @@ rtl_account_profile_record (basic_block bb, int after_pass,
       {
        record->size[after_pass]
          += insn_rtx_cost (PATTERN (insn), false);
-       if (profile_status_for_fn (cfun) == PROFILE_READ)
+       if (bb->count.initialized_p ())
          record->time[after_pass]
-           += insn_rtx_cost (PATTERN (insn), true) * bb->count;
+           += insn_rtx_cost (PATTERN (insn), true) * bb->count.to_gcov_type ();
        else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
          record->time[after_pass]
            += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
index 39320063c84aaa54b0012cd41c71d8f67a4966aa..2cbacc774d39bdf9daf1cfbcf359e59e188c2573 100644 (file)
@@ -500,6 +500,8 @@ cgraph_node::create (tree decl)
 
   node->decl = decl;
 
+  node->count = profile_count::uninitialized ();
+
   if ((flag_openacc || flag_openmp)
       && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
     {
@@ -808,7 +810,7 @@ cgraph_edge::set_call_stmt (gcall *new_stmt, bool update_speculative)
 
 cgraph_edge *
 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
-                          gcall *call_stmt, gcov_type count, int freq,
+                          gcall *call_stmt, profile_count count, int freq,
                           bool indir_unknown_callee)
 {
   cgraph_edge *edge;
@@ -849,10 +851,9 @@ symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
   edge->lto_stmt_uid = 0;
 
   edge->count = count;
-  gcc_assert (count >= 0);
   edge->frequency = freq;
-  gcc_assert (freq >= 0);
-  gcc_assert (freq <= CGRAPH_FREQ_MAX);
+  gcc_checking_assert (freq >= 0);
+  gcc_checking_assert (freq <= CGRAPH_FREQ_MAX);
 
   edge->call_stmt = call_stmt;
   push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
@@ -894,7 +895,7 @@ symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
 
 cgraph_edge *
 cgraph_node::create_edge (cgraph_node *callee,
-                         gcall *call_stmt, gcov_type count, int freq)
+                         gcall *call_stmt, profile_count count, int freq)
 {
   cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
                                           freq, false);
@@ -931,7 +932,7 @@ cgraph_allocate_init_indirect_info (void)
 
 cgraph_edge *
 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
-                                  gcov_type count, int freq,
+                                  profile_count count, int freq,
                                   bool compute_indirect_info)
 {
   cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt,
@@ -1047,7 +1048,7 @@ cgraph_edge::remove (void)
    Return direct edge created.  */
 
 cgraph_edge *
-cgraph_edge::make_speculative (cgraph_node *n2, gcov_type direct_count,
+cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count,
                               int direct_frequency)
 {
   cgraph_node *n = caller;
@@ -1303,19 +1304,21 @@ cgraph_edge::redirect_call_stmt_to_callee (void)
       else
        {
          if (dump_file)
-           fprintf (dump_file,
-                    "Expanding speculative call of %s -> %s count: "
-                    "%" PRId64"\n",
-                    e->caller->dump_name (),
-                    e->callee->dump_name (),
-                    (int64_t)e->count);
+           {
+             fprintf (dump_file,
+                      "Expanding speculative call of %s -> %s count: ",
+                      e->caller->dump_name (),
+                      e->callee->dump_name ());
+             e->count.dump (dump_file);
+             fprintf (dump_file, "\n");
+           }
          gcc_assert (e2->speculative);
          push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
          new_stmt = gimple_ic (e->call_stmt,
                                dyn_cast<cgraph_node *> (ref->referred),
-                               e->count || e2->count
-                               ?  RDIV (e->count * REG_BR_PROB_BASE,
-                                        e->count + e2->count)
+                               e->count > profile_count::zero ()
+                               || e2->count > profile_count::zero ()
+                               ? e->count.probability_in (e->count + e2->count)
                                : e->frequency || e2->frequency
                                ? RDIV (e->frequency * REG_BR_PROB_BASE,
                                        e->frequency + e2->frequency)
@@ -1591,7 +1594,7 @@ cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
     {
       cgraph_edge *e = node->get_edge (old_stmt);
       cgraph_edge *ne = NULL;
-      gcov_type count;
+      profile_count count;
       int frequency;
 
       if (e)
@@ -2033,8 +2036,12 @@ cgraph_edge::dump_edge_flags (FILE *f)
     fprintf (f, "(call_stmt_cannot_inline_p) ");
   if (indirect_inlining_edge)
     fprintf (f, "(indirect_inlining) ");
-  if (count)
-    fprintf (f, "(%" PRId64"x) ", (int64_t)count);
+  if (count.initialized_p ())
+    {
+      fprintf (f, "(");
+      count.dump (f);
+      fprintf (f, ")");
+    }
   if (frequency)
     fprintf (f, "(%.2f per call) ", frequency / (double)CGRAPH_FREQ_BASE);
   if (can_throw_external)
@@ -2085,9 +2092,11 @@ cgraph_node::dump (FILE *f)
       fprintf (f, "\n");
     }
   fprintf (f, "  Function flags:");
-  if (count)
-    fprintf (f, " executed %" PRId64"x",
-            (int64_t)count);
+  if (count.initialized_p ())
+    {
+      fprintf (f, " profile_count ");
+      count.dump (f);
+    }
   if (origin)
     fprintf (f, " nested in: %s", origin->asm_name ());
   if (gimple_has_body_p (decl))
@@ -2737,6 +2746,10 @@ cgraph_edge::maybe_hot_p (void)
     return false;
   if (caller->frequency == NODE_FREQUENCY_HOT)
     return true;
+  /* If profile is now known yet, be conservative.
+     FIXME: this predicate is used by early inliner and can do better there.  */
+  if (symtab->state < IPA_SSA)
+    return true;
   if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
       && frequency < CGRAPH_FREQ_BASE * 3 / 2)
     return false;
index 72f9702f851bde427c4e06b3d6f298142f962e87..82a84eabdd9403320dc7953a99e51c1a494d2c25 100644 (file)
@@ -21,6 +21,7 @@ along with GCC; see the file COPYING3.  If not see
 #ifndef GCC_CGRAPH_H
 #define GCC_CGRAPH_H
 
+#include "profile-count.h"
 #include "ipa-ref.h"
 #include "plugin-api.h"
 
@@ -910,7 +911,7 @@ public:
      All hooks will see this in node's global.inlined_to, when invoked.
      Can be NULL if the node is not inlined.  SUFFIX is string that is appended
      to the original name.  */
-  cgraph_node *create_clone (tree decl, gcov_type count, int freq,
+  cgraph_node *create_clone (tree decl, profile_count count, int freq,
                             bool update_original,
                             vec<cgraph_edge *> redirect_callers,
                             bool call_duplication_hook,
@@ -1078,14 +1079,14 @@ public:
 
   /* Create edge from a given function to CALLEE in the cgraph.  */
   cgraph_edge *create_edge (cgraph_node *callee,
-                           gcall *call_stmt, gcov_type count,
+                           gcall *call_stmt, profile_count count,
                            int freq);
 
   /* Create an indirect edge with a yet-undetermined callee where the call
      statement destination is a formal parameter of the caller with index
      PARAM_INDEX. */
   cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
-                                    gcov_type count, int freq,
+                                    profile_count count, int freq,
                                     bool compute_indirect_info = true);
 
   /* Like cgraph_create_edge walk the clone tree and update all clones sharing
@@ -1093,7 +1094,7 @@ public:
    update the edge same way as cgraph_set_call_stmt_including_clones does.  */
   void create_edge_including_clones (cgraph_node *callee,
                                     gimple *old_stmt, gcall *stmt,
-                                    gcov_type count,
+                                    profile_count count,
                                     int freq,
                                     cgraph_inline_failed_t reason);
 
@@ -1356,7 +1357,7 @@ public:
   cgraph_thunk_info thunk;
 
   /* Expected number of executions: calculated in profile.c.  */
-  gcov_type count;
+  profile_count count;
   /* How to scale counts at materialization time; used to merge
      LTO units with different number of profile runs.  */
   int count_materialization_scale;
@@ -1629,7 +1630,7 @@ struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
   /* Turn edge into speculative call calling N2. Update
      the profile so the direct call is taken COUNT times
      with FREQUENCY.  */
-  cgraph_edge *make_speculative (cgraph_node *n2, gcov_type direct_count,
+  cgraph_edge *make_speculative (cgraph_node *n2, profile_count direct_count,
                                 int direct_frequency);
 
    /* Given speculative call edge, return all three components.  */
@@ -1648,7 +1649,8 @@ struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
   /* Create clone of edge in the node N represented
      by CALL_EXPR the callgraph.  */
   cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
-                      gcov_type count_scale, int freq_scale, bool update_original);
+                      gcov_type count_scale, int freq_scale,
+                      bool update_original);
 
   /* Verify edge count and frequency.  */
   bool verify_count_and_frequency ();
@@ -1673,7 +1675,7 @@ struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
   static void rebuild_references (void);
 
   /* Expected number of executions: calculated in profile.c.  */
-  gcov_type count;
+  profile_count count;
   cgraph_node *caller;
   cgraph_node *callee;
   cgraph_edge *prev_caller;
@@ -2250,7 +2252,7 @@ private:
      parameters of which only CALLEE can be NULL (when creating an indirect call
      edge).  */
   cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
-                           gcall *call_stmt, gcov_type count, int freq,
+                           gcall *call_stmt, profile_count count, int freq,
                            bool indir_unknown_callee);
 
   /* Put the edge onto the free list.  */
@@ -2321,7 +2323,7 @@ void cgraphunit_c_finalize (void);
 
 /*  Initialize datastructures so DECL is a function in lowered gimple form.
     IN_SSA is true if the gimple is in SSA.  */
-basic_block init_lowered_empty_function (tree, bool, gcov_type);
+basic_block init_lowered_empty_function (tree, bool, profile_count);
 
 tree thunk_adjust (gimple_stmt_iterator *, tree, bool, HOST_WIDE_INT, tree);
 /* In cgraphclones.c  */
index ec9006637482b89289e21f8494b58739b45fefb2..e6026dc5c2fea9b7907fdf6a929dc6e0938a31e5 100644 (file)
@@ -89,7 +89,7 @@ cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
                    gcov_type count_scale, int freq_scale, bool update_original)
 {
   cgraph_edge *new_edge;
-  gcov_type gcov_count = apply_probability (count, count_scale);
+  profile_count gcov_count = count.apply_scale (count_scale, REG_BR_PROB_BASE);
   gcov_type freq;
 
   /* We do not want to ignore loop nest after frequency drops to 0.  */
@@ -142,8 +142,6 @@ cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
   if (update_original)
     {
       count -= new_edge->count;
-      if (count < 0)
-       count = 0;
     }
   symtab->call_edge_duplication_hooks (this, new_edge);
   return new_edge;
@@ -336,7 +334,7 @@ duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
   new_thunk->clone.args_to_skip = node->clone.args_to_skip;
   new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
 
-  cgraph_edge *e = new_thunk->create_edge (node, NULL, 0,
+  cgraph_edge *e = new_thunk->create_edge (node, NULL, new_thunk->count,
                                                  CGRAPH_FREQ_BASE);
   symtab->call_edge_duplication_hooks (thunk->callees, e);
   symtab->call_cgraph_duplication_hooks (thunk, new_thunk);
@@ -421,7 +419,7 @@ dump_callgraph_transformation (const cgraph_node *original,
    node is not inlined.  */
 
 cgraph_node *
-cgraph_node::create_clone (tree new_decl, gcov_type gcov_count, int freq,
+cgraph_node::create_clone (tree new_decl, profile_count prof_count, int freq,
                           bool update_original,
                           vec<cgraph_edge *> redirect_callers,
                           bool call_duplication_hook,
@@ -436,6 +434,7 @@ cgraph_node::create_clone (tree new_decl, gcov_type gcov_count, int freq,
   if (new_inlined_to)
     dump_callgraph_transformation (this, new_inlined_to, "inlining to");
 
+  new_node->count = prof_count;
   new_node->decl = new_decl;
   new_node->register_symbol ();
   new_node->origin = origin;
@@ -476,21 +475,17 @@ cgraph_node::create_clone (tree new_decl, gcov_type gcov_count, int freq,
   else
     new_node->clone.combined_args_to_skip = args_to_skip;
 
-  if (count)
+  if (count.initialized_p ())
     {
       if (new_node->count > count)
         count_scale = REG_BR_PROB_BASE;
       else
-       count_scale = GCOV_COMPUTE_SCALE (new_node->count, count);
+       count_scale = new_node->count.probability_in (count);
     }
   else
     count_scale = 0;
   if (update_original)
-    {
-      count -= gcov_count;
-      if (count < 0)
-       count = 0;
-    }
+    count -= prof_count;
 
   FOR_EACH_VEC_ELT (redirect_callers, i, e)
     {
@@ -785,7 +780,7 @@ cgraph_node::set_call_stmt_including_clones (gimple *old_stmt,
 void
 cgraph_node::create_edge_including_clones (cgraph_node *callee,
                                           gimple *old_stmt, gcall *stmt,
-                                          gcov_type count,
+                                          profile_count count,
                                           int freq,
                                           cgraph_inline_failed_t reason)
 {
index 7b4f47e6efb41e7c401e7347d86fffca6618c4e9..77209046d9a92155cec50920d144fb92c8edbfe7 100644 (file)
@@ -615,7 +615,7 @@ cgraph_node::analyze (void)
     {
       cgraph_node *t = cgraph_node::get (thunk.alias);
 
-      create_edge (t, NULL, 0, CGRAPH_FREQ_BASE);
+      create_edge (t, NULL, t->count, CGRAPH_FREQ_BASE);
       callees->can_throw_external = !TREE_NOTHROW (t->decl);
       /* Target code in expand_thunk may need the thunk's target
         to be analyzed, so recurse here.  */
@@ -1475,7 +1475,7 @@ mark_functions_to_output (void)
    return basic block in the function body.  */
 
 basic_block
-init_lowered_empty_function (tree decl, bool in_ssa, gcov_type count)
+init_lowered_empty_function (tree decl, bool in_ssa, profile_count count)
 {
   basic_block bb;
   edge e;
@@ -1873,13 +1873,13 @@ cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
                     adjustment, because that's why we're emitting a
                     thunk.  */
                  then_bb = create_basic_block (NULL, bb);
-                 then_bb->count = count - count / 16;
+                 then_bb->count = count - count.apply_scale (1, 16);
                  then_bb->frequency = BB_FREQ_MAX - BB_FREQ_MAX / 16;
                  return_bb = create_basic_block (NULL, then_bb);
                  return_bb->count = count;
                  return_bb->frequency = BB_FREQ_MAX;
                  else_bb = create_basic_block (NULL, else_bb);
-                 then_bb->count = count / 16;
+                 then_bb->count = count.apply_scale (1, 16);
                  then_bb->frequency = BB_FREQ_MAX / 16;
                  add_bb_to_loop (then_bb, bb->loop_father);
                  add_bb_to_loop (return_bb, bb->loop_father);
@@ -1892,19 +1892,19 @@ cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
                  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
                  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
                  e->probability = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 16;
-                 e->count = count - count / 16;
+                 e->count = count - count.apply_scale (1, 16);
                  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
                  e->probability = REG_BR_PROB_BASE / 16;
-                 e->count = count / 16;
+                 e->count = count.apply_scale (1, 16);
                  e = make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
                  e->probability = REG_BR_PROB_BASE;
                  e->count = count;
                  e = make_edge (then_bb, return_bb, EDGE_FALLTHRU);
                  e->probability = REG_BR_PROB_BASE;
-                 e->count = count - count / 16;
+                 e->count = count - count.apply_scale (1, 16);
                  e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
                  e->probability = REG_BR_PROB_BASE;
-                 e->count = count / 16;
+                 e->count = count.apply_scale (1, 16);
                  bsi = gsi_last_bb (then_bb);
                }
 
@@ -1940,7 +1940,7 @@ cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
 
       cfun->gimple_df->in_ssa_p = true;
       profile_status_for_fn (cfun)
-        = count ? PROFILE_READ : PROFILE_GUESSED;
+        = count.initialized_p () ? PROFILE_READ : PROFILE_GUESSED;
       /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks.  */
       TREE_ASM_WRITTEN (thunk_fndecl) = false;
       delete_unreachable_blocks ();
index 94a84b3afd432538302bd916a0e270e2c4409c9c..1ad329cdab4ffc8a1d647cf148a2abf4480cd85a 100644 (file)
@@ -33846,7 +33846,8 @@ make_resolver_func (const tree default_decl,
 
   gimplify_function_tree (decl);
   push_cfun (DECL_STRUCT_FUNCTION (decl));
-  *empty_bb = init_lowered_empty_function (decl, false, 0);
+  *empty_bb = init_lowered_empty_function (decl, false,
+                                          profile_count::uninitialized ());
 
   cgraph_node::add_new_function (decl, true);
   symtab->call_cgraph_insertion_hooks (cgraph_node::get_create (decl));
index e61b78b0a915df748fcf3d73cd46db855cf7a1c6..356c923c4cd7294a2a2d7ec51b354d78105a6111 100644 (file)
@@ -1951,9 +1951,11 @@ dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
       fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
       if (bb->frequency)
         fprintf (file, " freq:%d", bb->frequency);
-      if (bb->count)
-        fprintf (file, " count:%" PRId64,
-                 bb->count);
+      if (bb->count.initialized_p ())
+       {
+          fprintf (file, ", count:");
+         bb->count.dump (file);
+       }
       fprintf (file, " seq:%d", (*bb_seqn)++);
       fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
       FOR_EACH_EDGE (e, ei, bb->preds)
index 9b2e5657a1a956203723d233e2fb6c573ae08c42..d19e2fdde44c0b1f65f2ec0108e74bc4bb5c0e1a 100644 (file)
@@ -1948,7 +1948,7 @@ scalarize_intrinsic_call (gfc_expr *e)
 {
   gfc_actual_arglist *a, *b;
   gfc_constructor_base ctor;
-  gfc_constructor *args[5];
+  gfc_constructor *args[5] = {};  /* Avoid uninitialized warnings.  */
   gfc_constructor *ci, *new_ctor;
   gfc_expr *expr, *old;
   int n, i, rank[5], array_arg;
index d545340951f35ec2dc0dc00ad1f39cad5f604cd9..23cf692e321ee98daf5470093c709c8d7380ce73 100644 (file)
@@ -264,8 +264,8 @@ input_bb (struct lto_input_block *ib, enum LTO_tags tag,
   index = streamer_read_uhwi (ib);
   bb = BASIC_BLOCK_FOR_FN (fn, index);
 
-  bb->count = apply_scale (streamer_read_gcov_count (ib),
-                           count_materialization_scale);
+  bb->count = profile_count::stream_in (ib).apply_scale
+                (count_materialization_scale, REG_BR_PROB_BASE);
   bb->frequency = streamer_read_hwi (ib);
   bb->flags = streamer_read_hwi (ib);
 
index b471d6f739addc3200680f0b6a5341c1ef6ac73c..cdd775388e183edd237d8e3c5a0ddd6a5473b99f 100644 (file)
@@ -209,7 +209,7 @@ output_bb (struct output_block *ob, basic_block bb, struct function *fn)
                                : LTO_bb0);
 
   streamer_write_uhwi (ob, bb->index);
-  streamer_write_gcov_count (ob, bb->count);
+  bb->count.stream_out (ob);
   streamer_write_hwi (ob, bb->frequency);
   streamer_write_hwi (ob, bb->flags);
 
index 1554d517f8c57028aa1af9317d64c3c33a1712bb..af336da5ad4489641dd507918688b757838f461f 100644 (file)
@@ -64,10 +64,10 @@ print_global_statistics (FILE* file)
   long n_loops = 0;
   long n_stmts = 0;
   long n_conditions = 0;
-  long n_p_bbs = 0;
-  long n_p_loops = 0;
-  long n_p_stmts = 0;
-  long n_p_conditions = 0;
+  profile_count n_p_bbs = profile_count::zero ();
+  profile_count n_p_loops = profile_count::zero ();
+  profile_count n_p_stmts = profile_count::zero ();
+  profile_count n_p_conditions = profile_count::zero ();
 
   basic_block bb;
 
@@ -76,7 +76,8 @@ print_global_statistics (FILE* file)
       gimple_stmt_iterator psi;
 
       n_bbs++;
-      n_p_bbs += bb->count;
+      if (bb->count.initialized_p ())
+        n_p_bbs += bb->count;
 
       /* Ignore artificial surrounding loop.  */
       if (bb == bb->loop_father->header
@@ -89,13 +90,15 @@ print_global_statistics (FILE* file)
       if (EDGE_COUNT (bb->succs) > 1)
        {
          n_conditions++;
-         n_p_conditions += bb->count;
+         if (bb->count.initialized_p ())
+           n_p_conditions += bb->count;
        }
 
       for (psi = gsi_start_bb (bb); !gsi_end_p (psi); gsi_next (&psi))
        {
          n_stmts++;
-         n_p_stmts += bb->count;
+         if (bb->count.initialized_p ())
+           n_p_stmts += bb->count;
        }
     }
 
@@ -105,10 +108,15 @@ print_global_statistics (FILE* file)
   fprintf (file, "CONDITIONS:%ld, ", n_conditions);
   fprintf (file, "STMTS:%ld)\n", n_stmts);
   fprintf (file, "\nGlobal profiling statistics (");
-  fprintf (file, "BBS:%ld, ", n_p_bbs);
-  fprintf (file, "LOOPS:%ld, ", n_p_loops);
-  fprintf (file, "CONDITIONS:%ld, ", n_p_conditions);
-  fprintf (file, "STMTS:%ld)\n", n_p_stmts);
+  fprintf (file, "BBS:");
+  n_p_bbs.dump (file);
+  fprintf (file, ", LOOPS:");
+  n_p_loops.dump (file);
+  fprintf (file, ", CONDITIONS:");
+  n_p_conditions.dump (file);
+  fprintf (file, ", STMTS:");
+  n_p_stmts.dump (file);
+  fprintf (file, ")\n");
 }
 
 /* Print statistics for SCOP to FILE.  */
@@ -120,10 +128,10 @@ print_graphite_scop_statistics (FILE* file, scop_p scop)
   long n_loops = 0;
   long n_stmts = 0;
   long n_conditions = 0;
-  long n_p_bbs = 0;
-  long n_p_loops = 0;
-  long n_p_stmts = 0;
-  long n_p_conditions = 0;
+  profile_count n_p_bbs = profile_count::zero ();
+  profile_count n_p_loops = profile_count::zero ();
+  profile_count n_p_stmts = profile_count::zero ();
+  profile_count n_p_conditions = profile_count::zero ();
 
   basic_block bb;
 
@@ -136,7 +144,8 @@ print_graphite_scop_statistics (FILE* file, scop_p scop)
        continue;
 
       n_bbs++;
-      n_p_bbs += bb->count;
+      if (bb->count.initialized_p ())
+        n_p_bbs += bb->count;
 
       if (EDGE_COUNT (bb->succs) > 1)
        {
@@ -173,10 +182,15 @@ print_graphite_scop_statistics (FILE* file, scop_p scop)
   fprintf (file, "CONDITIONS:%ld, ", n_conditions);
   fprintf (file, "STMTS:%ld)\n", n_stmts);
   fprintf (file, "\nSCoP profiling statistics (");
-  fprintf (file, "BBS:%ld, ", n_p_bbs);
-  fprintf (file, "LOOPS:%ld, ", n_p_loops);
-  fprintf (file, "CONDITIONS:%ld, ", n_p_conditions);
-  fprintf (file, "STMTS:%ld)\n", n_p_stmts);
+  fprintf (file, "BBS:");
+  n_p_bbs.dump (file);
+  fprintf (file, ", LOOPS:");
+  n_p_loops.dump (file);
+  fprintf (file, ", CONDITIONS:");
+  n_p_conditions.dump (file);
+  fprintf (file, ", STMTS:");
+  n_p_stmts.dump (file);
+  fprintf (file, ")\n");
 }
 
 /* Print statistics for SCOPS to FILE.  */
index 1a2d45fcc527216c517d865dec5628ed5a9aec6d..6eed014049288323e2bd08d1257f7bfa3935be94 100644 (file)
@@ -34,6 +34,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-iterator.h"
 #include "stor-layout.h"
 #include "output.h"
+#include "basic-block.h"
 #include "cfg.h"
 #include "function.h"
 #include "fold-const.h"
index 865de96bb616fe7f5d77b72ac758fc20f9225640..01bff2f972436a8dbe5fde66faa3aae106b88d39 100644 (file)
@@ -26,6 +26,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "is-a.h"
 #include "vec.h"
 #include "tree.h"
+#include "basic-block.h"
 #include "cfg.h"
 #include "function.h"
 #include "dumpfile.h"
index c5d8a6e1f445773b9f892da8cbc47e5450baf8c1..80298b2b2fe46abb41ba3526195f8bf934c31727 100644 (file)
@@ -29,9 +29,9 @@ along with GCC; see the file COPYING3.  If not see
 #include "vec.h"
 #include "tree.h"
 #include "tree-pass.h"
-#include "cfg.h"
 #include "function.h"
 #include "basic-block.h"
+#include "cfg.h"
 #include "fold-const.h"
 #include "gimple.h"
 #include "gimple-iterator.h"
@@ -6047,9 +6047,10 @@ struct phi_definition
 
 template <typename T>
 static
-T sum_slice (const auto_vec <T> &v, unsigned start, unsigned end)
+T sum_slice (const auto_vec <T> &v, unsigned start, unsigned end,
+            T zero)
 {
-  T s = 0;
+  T s = zero;
 
   for (unsigned i = start; i < end; i++)
     s += v[i];
@@ -6137,7 +6138,7 @@ convert_switch_statements (void)
 
        auto_vec <edge> new_edges;
        auto_vec <phi_definition *> phi_todo_list;
-       auto_vec <gcov_type> edge_counts;
+       auto_vec <profile_count> edge_counts;
        auto_vec <int> edge_probabilities;
 
        /* Investigate all labels that and PHI nodes in these edges which
@@ -6230,7 +6231,7 @@ convert_switch_statements (void)
            basic_block label_bb
              = label_to_block_fn (func, CASE_LABEL (label));
            edge new_edge = make_edge (cur_bb, label_bb, EDGE_TRUE_VALUE);
-           int prob_sum = sum_slice <int> (edge_probabilities, i, labels) +
+           int prob_sum = sum_slice <int> (edge_probabilities, i, labels, 0) +
               edge_probabilities[0];
 
            if (prob_sum)
@@ -6255,7 +6256,8 @@ convert_switch_statements (void)
                next_edge->probability
                  = inverse_probability (new_edge->probability);
                next_edge->count = edge_counts[0]
-                 + sum_slice <gcov_type> (edge_counts, i, labels);
+                 + sum_slice <profile_count> (edge_counts, i, labels,
+                                              profile_count::zero ());
                next_bb->frequency = EDGE_FREQUENCY (next_edge);
                cur_bb = next_bb;
              }
index 4d9441c91d4efd51783de08824dbc3ebe2d3cb5d..2a17254c3b26be4ae53bae967c159b3c0433d123 100644 (file)
@@ -26,6 +26,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "vec.h"
 #include "tree.h"
 #include "dominance.h"
+#include "basic-block.h"
 #include "cfg.h"
 #include "cfganal.h"
 #include "function.h"
index c7fc3a0d0be70abc976ed4fcf9248c20229dfbb7..753673c8f66845a9ab12ecdacdd3aa361554dc1d 100644 (file)
@@ -715,7 +715,7 @@ chkp_produce_thunks (bool early)
          node->thunk.thunk_p = true;
          node->thunk.add_pointer_bounds_args = true;
          node->create_edge (node->instrumented_version, NULL,
-                            0, CGRAPH_FREQ_BASE);
+                            node->count, CGRAPH_FREQ_BASE);
          node->create_reference (node->instrumented_version,
                               IPA_REF_CHKP, NULL);
          /* Thunk shouldn't be a cdtor.  */
index f5e023e748812d6b61945dd416f081fb34edea90..3c9c3f29ee0eb8895e295a1371448bf3a184e028 100644 (file)
@@ -360,7 +360,7 @@ object_allocator<ipcp_agg_lattice> ipcp_agg_lattice_pool
 
 /* Maximal count found in program.  */
 
-static gcov_type max_count;
+static profile_count max_count;
 
 /* Original overall size of the program.  */
 
@@ -640,7 +640,7 @@ ipcp_versionable_function_p (struct cgraph_node *node)
 
 struct caller_statistics
 {
-  gcov_type count_sum;
+  profile_count count_sum;
   int n_calls, n_hot_calls, freq_sum;
 };
 
@@ -649,7 +649,7 @@ struct caller_statistics
 static inline void
 init_caller_stats (struct caller_statistics *stats)
 {
-  stats->count_sum = 0;
+  stats->count_sum = profile_count::zero ();
   stats->n_calls = 0;
   stats->n_hot_calls = 0;
   stats->freq_sum = 0;
@@ -667,7 +667,8 @@ gather_caller_stats (struct cgraph_node *node, void *data)
   for (cs = node->callers; cs; cs = cs->next_caller)
     if (!cs->caller->thunk.thunk_p)
       {
-       stats->count_sum += cs->count;
+        if (cs->count.initialized_p ())
+         stats->count_sum += cs->count;
        stats->freq_sum += cs->frequency;
        stats->n_calls++;
        if (cs->maybe_hot_p ())
@@ -718,9 +719,9 @@ ipcp_cloning_candidate_p (struct cgraph_node *node)
   /* When profile is available and function is hot, propagate into it even if
      calls seems cold; constant propagation can improve function's speed
      significantly.  */
-  if (max_count)
+  if (max_count > profile_count::zero ())
     {
-      if (stats.count_sum > node->count * 90 / 100)
+      if (stats.count_sum > node->count.apply_scale (90, 100))
        {
          if (dump_file)
            fprintf (dump_file, "Considering %s for cloning; "
@@ -2611,7 +2612,7 @@ incorporate_penalties (ipa_node_params *info, int64_t evaluation)
 
 static bool
 good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
-                           int freq_sum, gcov_type count_sum, int size_cost)
+                           int freq_sum, profile_count count_sum, int size_cost)
 {
   if (time_benefit == 0
       || !opt_for_fn (node->decl, flag_ipa_cp_clone)
@@ -2621,22 +2622,25 @@ good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
   gcc_assert (size_cost > 0);
 
   struct ipa_node_params *info = IPA_NODE_REF (node);
-  if (max_count)
+  if (max_count > profile_count::zero ())
     {
-      int factor = (count_sum * 1000) / max_count;
+      int factor = RDIV (count_sum.probability_in (max_count)
+                        * 1000, REG_BR_PROB_BASE);
       int64_t evaluation = (((int64_t) time_benefit * factor)
                                    / size_cost);
       evaluation = incorporate_penalties (info, evaluation);
 
       if (dump_file && (dump_flags & TDF_DETAILS))
-       fprintf (dump_file, "     good_cloning_opportunity_p (time: %i, "
-                "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
-                "%s%s) -> evaluation: " "%" PRId64
+       {
+         fprintf (dump_file, "     good_cloning_opportunity_p (time: %i, "
+                  "size: %i, count_sum: ", time_benefit, size_cost);
+         count_sum.dump (dump_file);
+         fprintf (dump_file, "%s%s) -> evaluation: " "%" PRId64
                 ", threshold: %i\n",
-                time_benefit, size_cost, (HOST_WIDE_INT) count_sum,
                 info->node_within_scc ? ", scc" : "",
                 info->node_calling_single_call ? ", single_call" : "",
                 evaluation, PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD));
+       }
 
       return evaluation >= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD);
     }
@@ -3520,11 +3524,11 @@ template <typename valtype>
 static bool
 get_info_about_necessary_edges (ipcp_value<valtype> *val, cgraph_node *dest,
                                int *freq_sum,
-                               gcov_type *count_sum, int *caller_count)
+                               profile_count *count_sum, int *caller_count)
 {
   ipcp_value_source<valtype> *src;
   int freq = 0, count = 0;
-  gcov_type cnt = 0;
+  profile_count cnt = profile_count::zero ();
   bool hot = false;
 
   for (src = val->sources; src; src = src->next)
@@ -3536,7 +3540,8 @@ get_info_about_necessary_edges (ipcp_value<valtype> *val, cgraph_node *dest,
            {
              count++;
              freq += cs->frequency;
-             cnt += cs->count;
+             if (cs->count.initialized_p ())
+               cnt += cs->count;
              hot |= cs->maybe_hot_p ();
            }
          cs = get_next_cgraph_edge_clone (cs);
@@ -3611,19 +3616,27 @@ dump_profile_updates (struct cgraph_node *orig_node,
 {
   struct cgraph_edge *cs;
 
-  fprintf (dump_file, "    setting count of the specialized node to "
-          HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) new_node->count);
+  fprintf (dump_file, "    setting count of the specialized node to ");
+  new_node->count.dump (dump_file);
+  fprintf (dump_file, "\n");
   for (cs = new_node->callees; cs; cs = cs->next_callee)
-    fprintf (dump_file, "      edge to %s has count "
-            HOST_WIDE_INT_PRINT_DEC "\n",
-            cs->callee->name (), (HOST_WIDE_INT) cs->count);
+    {
+      fprintf (dump_file, "      edge to %s has count ",
+              cs->callee->name ());
+      cs->count.dump (dump_file);
+      fprintf (dump_file, "\n");
+    }
 
-  fprintf (dump_file, "    setting count of the original node to "
-          HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) orig_node->count);
+  fprintf (dump_file, "    setting count of the original node to ");
+  orig_node->count.dump (dump_file);
+  fprintf (dump_file, "\n");
   for (cs = orig_node->callees; cs; cs = cs->next_callee)
-    fprintf (dump_file, "      edge to %s is left with "
-            HOST_WIDE_INT_PRINT_DEC "\n",
-            cs->callee->name (), (HOST_WIDE_INT) cs->count);
+    {
+      fprintf (dump_file, "      edge to %s is left with ",
+              cs->callee->name ());
+      cs->count.dump (dump_file);
+      fprintf (dump_file, "\n");
+    }
 }
 
 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
@@ -3635,10 +3648,10 @@ update_profiling_info (struct cgraph_node *orig_node,
 {
   struct cgraph_edge *cs;
   struct caller_statistics stats;
-  gcov_type new_sum, orig_sum;
-  gcov_type remainder, orig_node_count = orig_node->count;
+  profile_count new_sum, orig_sum;
+  profile_count remainder, orig_node_count = orig_node->count;
 
-  if (orig_node_count == 0)
+  if (!(orig_node_count > profile_count::zero ()))
     return;
 
   init_caller_stats (&stats);
@@ -3653,18 +3666,22 @@ update_profiling_info (struct cgraph_node *orig_node,
   if (orig_node_count < orig_sum + new_sum)
     {
       if (dump_file)
-       fprintf (dump_file, "    Problem: node %s has too low count "
-                HOST_WIDE_INT_PRINT_DEC " while the sum of incoming "
-                "counts is " HOST_WIDE_INT_PRINT_DEC "\n",
-                orig_node->dump_name (),
-                (HOST_WIDE_INT) orig_node_count,
-                (HOST_WIDE_INT) (orig_sum + new_sum));
-
-      orig_node_count = (orig_sum + new_sum) * 12 / 10;
+       {
+         fprintf (dump_file, "    Problem: node %s has too low count ",
+                  orig_node->dump_name ());
+         orig_node_count.dump (dump_file);
+         fprintf (dump_file, "while the sum of incoming count is ");
+         (orig_sum + new_sum).dump (dump_file);
+         fprintf (dump_file, "\n");
+       }
+
+      orig_node_count = (orig_sum + new_sum).apply_scale (12, 10);
       if (dump_file)
-       fprintf (dump_file, "      proceeding by pretending it was "
-                HOST_WIDE_INT_PRINT_DEC "\n",
-                (HOST_WIDE_INT) orig_node_count);
+       {
+         fprintf (dump_file, "      proceeding by pretending it was ");
+         orig_node_count.dump (dump_file);
+         fprintf (dump_file, "\n");
+       }
     }
 
   new_node->count = new_sum;
@@ -3672,17 +3689,14 @@ update_profiling_info (struct cgraph_node *orig_node,
   orig_node->count = remainder;
 
   for (cs = new_node->callees; cs; cs = cs->next_callee)
+    /* FIXME: why we care about non-zero frequency here?  */
     if (cs->frequency)
-      cs->count = apply_probability (cs->count,
-                                    GCOV_COMPUTE_SCALE (new_sum,
-                                                        orig_node_count));
+      cs->count = cs->count.apply_scale (new_sum, orig_node_count);
     else
-      cs->count = 0;
+      cs->count = profile_count::zero ();
 
   for (cs = orig_node->callees; cs; cs = cs->next_callee)
-    cs->count = apply_probability (cs->count,
-                                  GCOV_COMPUTE_SCALE (remainder,
-                                                      orig_node_count));
+    cs->count = cs->count.apply_scale (remainder, orig_node_count);
 
   if (dump_file)
     dump_profile_updates (orig_node, new_node);
@@ -3695,15 +3709,18 @@ update_profiling_info (struct cgraph_node *orig_node,
 static void
 update_specialized_profile (struct cgraph_node *new_node,
                            struct cgraph_node *orig_node,
-                           gcov_type redirected_sum)
+                           profile_count redirected_sum)
 {
   struct cgraph_edge *cs;
-  gcov_type new_node_count, orig_node_count = orig_node->count;
+  profile_count new_node_count, orig_node_count = orig_node->count;
 
   if (dump_file)
-    fprintf (dump_file, "    the sum of counts of redirected  edges is "
-            HOST_WIDE_INT_PRINT_DEC "\n", (HOST_WIDE_INT) redirected_sum);
-  if (orig_node_count == 0)
+    {
+      fprintf (dump_file, "    the sum of counts of redirected  edges is ");
+      redirected_sum.dump (dump_file);
+      fprintf (dump_file, "\n");
+    }
+  if (!(orig_node_count > profile_count::zero ()))
     return;
 
   gcc_assert (orig_node_count >= redirected_sum);
@@ -3714,21 +3731,15 @@ update_specialized_profile (struct cgraph_node *new_node,
 
   for (cs = new_node->callees; cs; cs = cs->next_callee)
     if (cs->frequency)
-      cs->count += apply_probability (cs->count,
-                                     GCOV_COMPUTE_SCALE (redirected_sum,
-                                                         new_node_count));
+      cs->count += cs->count.apply_scale (redirected_sum, new_node_count);
     else
-      cs->count = 0;
+      cs->count = profile_count::zero ();
 
   for (cs = orig_node->callees; cs; cs = cs->next_callee)
     {
-      gcov_type dec = apply_probability (cs->count,
-                                        GCOV_COMPUTE_SCALE (redirected_sum,
-                                                            orig_node_count));
-      if (dec < cs->count)
-       cs->count -= dec;
-      else
-       cs->count = 0;
+      profile_count dec = cs->count.apply_scale (redirected_sum,
+                                                orig_node_count);
+      cs->count -= dec;
     }
 
   if (dump_file)
@@ -4423,7 +4434,7 @@ static void
 perhaps_add_new_callers (cgraph_node *node, ipcp_value<valtype> *val)
 {
   ipcp_value_source<valtype> *src;
-  gcov_type redirected_sum = 0;
+  profile_count redirected_sum = profile_count::zero ();
 
   for (src = val->sources; src; src = src->next)
     {
@@ -4441,13 +4452,14 @@ perhaps_add_new_callers (cgraph_node *node, ipcp_value<valtype> *val)
 
              cs->redirect_callee_duplicating_thunks (val->spec_node);
              val->spec_node->expand_all_artificial_thunks ();
-             redirected_sum += cs->count;
+             if (cs->count.initialized_p ())
+               redirected_sum = redirected_sum + cs->count;
            }
          cs = get_next_cgraph_edge_clone (cs);
        }
     }
 
-  if (redirected_sum)
+  if (redirected_sum > profile_count::zero ())
     update_specialized_profile (val->spec_node, node, redirected_sum);
 }
 
@@ -4550,7 +4562,7 @@ decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
 {
   struct ipa_agg_replacement_value *aggvals;
   int freq_sum, caller_count;
-  gcov_type count_sum;
+  profile_count count_sum;
   vec<cgraph_edge *> callers;
 
   if (val->spec_node)
@@ -5103,7 +5115,7 @@ make_pass_ipa_cp (gcc::context *ctxt)
 void
 ipa_cp_c_finalize (void)
 {
-  max_count = 0;
+  max_count = profile_count::zero ();
   overall_size = 0;
   max_new_size = 0;
 }
index c7460caf7a9907bc8ef4740d2c9c1b8af022f1d1..cee3c3d89d7a95ce1850713ca15e198441b05ef6 100644 (file)
@@ -2939,7 +2939,7 @@ struct odr_type_warn_count
 {
   tree type;
   int count;
-  gcov_type dyn_count;
+  profile_count dyn_count;
 };
 
 /* Record about how many calls would benefit from given method to be final.  */
@@ -2948,14 +2948,14 @@ struct decl_warn_count
 {
   tree decl;
   int count;
-  gcov_type dyn_count;
+  profile_count dyn_count;
 };
 
 /* Information about type and decl warnings.  */
 
 struct final_warning_record
 {
-  gcov_type dyn_count;
+  profile_count dyn_count;
   auto_vec<odr_type_warn_count> type_warnings;
   hash_map<tree, decl_warn_count> decl_warnings;
 };
@@ -3093,15 +3093,22 @@ possible_polymorphic_call_targets (tree otr_type,
       if ((*slot)->type_warning && final_warning_records)
        {
          final_warning_records->type_warnings[(*slot)->type_warning - 1].count++;
-         final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
-           += final_warning_records->dyn_count;
+         if (!final_warning_records->type_warnings
+               [(*slot)->type_warning - 1].dyn_count.initialized_p ())
+           final_warning_records->type_warnings
+               [(*slot)->type_warning - 1].dyn_count = profile_count::zero ();
+         if (final_warning_records->dyn_count > 0)
+           final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
+             = final_warning_records->type_warnings[(*slot)->type_warning - 1].dyn_count
+               + final_warning_records->dyn_count;
        }
       if (!speculative && (*slot)->decl_warning && final_warning_records)
        {
          struct decl_warn_count *c =
             final_warning_records->decl_warnings.get ((*slot)->decl_warning);
          c->count++;
-         c->dyn_count += final_warning_records->dyn_count;
+         if (final_warning_records->dyn_count > 0)
+           c->dyn_count += final_warning_records->dyn_count;
        }
       return (*slot)->targets;
     }
@@ -3227,6 +3234,10 @@ possible_polymorphic_call_targets (tree otr_type,
                        final_warning_records->type_warnings.safe_grow_cleared
                          (odr_types.length ());
                      final_warning_records->type_warnings[outer_type->id].count++;
+                     if (!final_warning_records->type_warnings
+                               [outer_type->id].dyn_count.initialized_p ())
+                       final_warning_records->type_warnings
+                          [outer_type->id].dyn_count = profile_count::zero ();
                      final_warning_records->type_warnings[outer_type->id].dyn_count
                        += final_warning_records->dyn_count;
                      final_warning_records->type_warnings[outer_type->id].type
@@ -3587,7 +3598,9 @@ ipa_devirt (void)
   if (warn_suggest_final_methods || warn_suggest_final_types)
     {
       final_warning_records = new (final_warning_record);
-      final_warning_records->type_warnings.safe_grow_cleared (odr_types.length ());
+      final_warning_records->dyn_count = profile_count::zero ();
+      final_warning_records->type_warnings.safe_grow_cleared
+                                                (odr_types.length ());
       free_polymorphic_call_targets_hash ();
     }
 
@@ -3768,7 +3781,8 @@ ipa_devirt (void)
                nconverted++;
                update = true;
                e->make_speculative
-                 (likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
+                 (likely_target, e->count.apply_scale (8, 10),
+                  e->frequency * 8 / 10);
              }
          }
       if (update)
@@ -3785,10 +3799,10 @@ ipa_devirt (void)
              {
                tree type = final_warning_records->type_warnings[i].type;
                int count = final_warning_records->type_warnings[i].count;
-               long long dyn_count
+               profile_count dyn_count
                  = final_warning_records->type_warnings[i].dyn_count;
 
-               if (!dyn_count)
+               if (!(dyn_count > 0))
                  warning_n (DECL_SOURCE_LOCATION (TYPE_NAME (type)),
                             OPT_Wsuggest_final_types, count,
                             "Declaring type %qD final "
@@ -3808,7 +3822,7 @@ ipa_devirt (void)
                             "executed %lli times",
                             type,
                             count,
-                            dyn_count);
+                            (long long) dyn_count.to_gcov_type ());
              }
        }
 
@@ -3823,9 +3837,10 @@ ipa_devirt (void)
            {
              tree decl = decl_warnings_vec[i]->decl;
              int count = decl_warnings_vec[i]->count;
-             long long dyn_count = decl_warnings_vec[i]->dyn_count;
+             profile_count dyn_count
+                 = decl_warnings_vec[i]->dyn_count;
 
-             if (!dyn_count)
+             if (!(dyn_count > 0))
                if (DECL_CXX_DESTRUCTOR_P (decl))
                  warning_n (DECL_SOURCE_LOCATION (decl),
                              OPT_Wsuggest_final_methods, count,
@@ -3851,7 +3866,8 @@ ipa_devirt (void)
                              "Declaring virtual destructor of %qD final "
                              "would enable devirtualization of %i calls "
                              "executed %lli times",
-                             DECL_CONTEXT (decl), count, dyn_count);
+                             DECL_CONTEXT (decl), count,
+                             (long long)dyn_count.to_gcov_type ());
                else
                  warning_n (DECL_SOURCE_LOCATION (decl),
                              OPT_Wsuggest_final_methods, count,
@@ -3861,7 +3877,8 @@ ipa_devirt (void)
                              "Declaring method %qD final "
                              "would enable devirtualization of %i calls "
                              "executed %lli times",
-                             decl, count, dyn_count);
+                             decl, count,
+                             (long long)dyn_count.to_gcov_type ());
            }
        }
 
index 0e74fc3192735cd27056c4b6f57f15844f07a9ea..878f9a891f361a7feb3417d16a861c426e4f5af7 100644 (file)
@@ -243,7 +243,7 @@ redirect_to_unreachable (struct cgraph_edge *e)
   struct ipa_call_summary *es = ipa_call_summaries->get (e);
   e->inline_failed = CIF_UNREACHABLE;
   e->frequency = 0;
-  e->count = 0;
+  e->count = profile_count::zero ();
   es->call_stmt_size = 0;
   es->call_stmt_time = 0;
   if (callee)
index 0b0c1fdfdbc1ad64737fdc137024b791c43e372e..4d152ceab1e03241526b6068bec78d9e85b9e186 100644 (file)
@@ -1328,7 +1328,8 @@ sem_function::merge (sem_item *alias_item)
       alias->icf_merged = true;
       local_original->icf_merged = true;
 
-      ipa_merge_profiles (local_original, alias, true);
+      /* FIXME update local_original counts.  */
+      ipa_merge_profiles (original, alias, true);
       alias->create_wrapper (local_original);
 
       if (dump_file)
index 9f7b2a1370f902f6cb5c05d0765866217b1ebdca..1d79ec41097cbb5d64db35cae3e1ab0aeeddde7a 100644 (file)
@@ -175,10 +175,11 @@ do_estimate_edge_time (struct cgraph_edge *edge)
      edges and for those we disable size limits.  Don't do that when
      probability that caller will call the callee is low however, since it
      may hurt optimization of the caller's hot path.  */
-  if (edge->count && edge->maybe_hot_p ()
-      && (edge->count * 2
+  if (edge->count.initialized_p () && edge->maybe_hot_p ()
+      && (edge->count.apply_scale (2, 1)
           > (edge->caller->global.inlined_to
-            ? edge->caller->global.inlined_to->count : edge->caller->count)))
+            ? edge->caller->global.inlined_to->count
+            : edge->caller->count)))
     hints |= INLINE_HINT_known_hot;
 
   known_vals.release ();
index 0ebe1477f6cbd28a6bd7c59a30769af47817113d..8900556ef10d891c9f26a4c9aac894a7c483de99 100644 (file)
@@ -123,8 +123,8 @@ typedef fibonacci_node <sreal, cgraph_edge> edge_heap_node_t;
 
 /* Statistics we collect about inlining algorithm.  */
 static int overall_size;
-static gcov_type max_count;
-static gcov_type spec_rem;
+static profile_count max_count;
+static profile_count spec_rem;
 
 /* Pre-computed constants 1/CGRAPH_FREQ_BASE and 1/100. */
 static sreal cgraph_freq_base_rec, percent_rec;
@@ -637,8 +637,10 @@ compute_uninlined_call_time (struct cgraph_edge *edge,
                         ? edge->caller->global.inlined_to
                         : edge->caller);
 
-  if (edge->count && caller->count)
-    uninlined_call_time *= (sreal)edge->count / caller->count;
+  if (edge->count > profile_count::zero ()
+      && caller->count > profile_count::zero ())
+    uninlined_call_time *= (sreal)edge->count.to_gcov_type ()
+                          / caller->count.to_gcov_type ();
   if (edge->frequency)
     uninlined_call_time *= cgraph_freq_base_rec * edge->frequency;
   else
@@ -660,8 +662,9 @@ compute_inlined_call_time (struct cgraph_edge *edge,
                         : edge->caller);
   sreal caller_time = ipa_fn_summaries->get (caller)->time;
 
-  if (edge->count && caller->count)
-    time *= (sreal)edge->count / caller->count;
+  if (edge->count > profile_count::zero ()
+      && caller->count > profile_count::zero ())
+    time *= (sreal)edge->count.to_gcov_type () / caller->count.to_gcov_type ();
   if (edge->frequency)
     time *= cgraph_freq_base_rec * edge->frequency;
   else
@@ -718,7 +721,7 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
      promote non-inline functions to inline and we increase
      MAX_INLINE_INSNS_SINGLE 16-fold for inline functions.  */
   else if ((!DECL_DECLARED_INLINE_P (callee->decl)
-          && (!e->count || !e->maybe_hot_p ()))
+          && (!e->count.initialized_p () || !e->maybe_hot_p ()))
           && ipa_fn_summaries->get (callee)->min_size
                - ipa_call_summaries->get (e)->call_stmt_size
              > MAX (MAX_INLINE_INSNS_SINGLE, MAX_INLINE_INSNS_AUTO))
@@ -726,7 +729,8 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
       e->inline_failed = CIF_MAX_INLINE_INSNS_AUTO_LIMIT;
       want_inline = false;
     }
-  else if ((DECL_DECLARED_INLINE_P (callee->decl) || e->count)
+  else if ((DECL_DECLARED_INLINE_P (callee->decl)
+           || e->count > profile_count::zero ())
           && ipa_fn_summaries->get (callee)->min_size
                - ipa_call_summaries->get (e)->call_stmt_size
              > 16 * MAX_INLINE_INSNS_SINGLE)
@@ -836,7 +840,7 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge,
       reason = "recursive call is cold";
       want_inline = false;
     }
-  else if (max_count && !outer_node->count)
+  else if (outer_node->count == profile_count::zero ())
     {
       reason = "not executed in profile";
       want_inline = false;
@@ -874,14 +878,15 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge,
       int i;
       for (i = 1; i < depth; i++)
        max_prob = max_prob * max_prob / CGRAPH_FREQ_BASE;
-      if (max_count
-         && (edge->count * CGRAPH_FREQ_BASE / outer_node->count
+      if (max_count > profile_count::zero () && edge->count > profile_count::zero ()
+         && (edge->count.to_gcov_type () * CGRAPH_FREQ_BASE
+             / outer_node->count.to_gcov_type ()
              >= max_prob))
        {
          reason = "profile of recursive call is too large";
          want_inline = false;
        }
-      if (!max_count
+      if (max_count == profile_count::zero ()
          && (edge->frequency * CGRAPH_FREQ_BASE / caller_freq
              >= max_prob))
        {
@@ -907,14 +912,15 @@ want_inline_self_recursive_call_p (struct cgraph_edge *edge,
      methods.  */
   else
     {
-      if (max_count
-         && (edge->count * 100 / outer_node->count
+      if (max_count > profile_count::zero ()
+         && (edge->count.to_gcov_type () * 100
+             / outer_node->count.to_gcov_type ()
              <= PARAM_VALUE (PARAM_MIN_INLINE_RECURSIVE_PROBABILITY)))
        {
          reason = "profile of recursive call is too small";
          want_inline = false;
        }
-      else if (!max_count
+      else if (max_count == profile_count::zero ()
               && (edge->frequency * 100 / caller_freq
                   <= PARAM_VALUE (PARAM_MIN_INLINE_RECURSIVE_PROBABILITY)))
        {
@@ -1058,7 +1064,8 @@ edge_badness (struct cgraph_edge *edge, bool dump)
      Again use negative value to make calls with profile appear hotter
      then calls without.
   */
-  else if (opt_for_fn (caller->decl, flag_guess_branch_prob) || caller->count)
+  else if (opt_for_fn (caller->decl, flag_guess_branch_prob)
+          || caller->count > profile_count::zero ())
     {
       sreal numerator, denominator;
       int overall_growth;
@@ -1068,8 +1075,8 @@ edge_badness (struct cgraph_edge *edge, bool dump)
                   - inlined_time);
       if (numerator == 0)
        numerator = ((sreal) 1 >> 8);
-      if (caller->count)
-       numerator *= caller->count;
+      if (caller->count > profile_count::zero ())
+       numerator *= caller->count.to_gcov_type ();
       else if (opt_for_fn (caller->decl, flag_branch_probabilities))
        numerator = numerator >> 11;
       denominator = growth;
@@ -1155,7 +1162,8 @@ edge_badness (struct cgraph_edge *edge, bool dump)
                   " %i (compensated)\n",
                   badness.to_double (),
                  (double)edge->frequency / CGRAPH_FREQ_BASE,
-                  edge->count, caller->count,
+                  edge->count.initialized_p () ? edge->count.to_gcov_type () : -1,
+                  caller->count.initialized_p () ? caller->count.to_gcov_type () : -1,
                   compute_uninlined_call_time (edge,
                                                unspec_edge_time).to_double (),
                   compute_inlined_call_time (edge, edge_time).to_double (),
@@ -1417,8 +1425,10 @@ lookup_recursive_calls (struct cgraph_node *node, struct cgraph_node *where,
       {
        /* When profile feedback is available, prioritize by expected number
           of calls.  */
-        heap->insert (!max_count ? -e->frequency
-                     : -(e->count / ((max_count + (1<<24) - 1) / (1<<24))),
+        heap->insert (!(max_count > 0) || !e->count.initialized_p () ? -e->frequency
+                     : -(e->count.to_gcov_type ()
+                       / ((max_count.to_gcov_type () + (1<<24) - 1)
+                          / (1<<24))),
                      e);
       }
   for (e = where->callees; e; e = e->next_callee)
@@ -1506,10 +1516,11 @@ recursive_inlining (struct cgraph_edge *edge,
        {
          fprintf (dump_file,
                   "   Inlining call of depth %i", depth);
-         if (node->count)
+         if (node->count > profile_count::zero ())
            {
              fprintf (dump_file, " called approx. %.2f times per call",
-                      (double)curr->count / node->count);
+                      (double)curr->count.to_gcov_type ()
+                      / node->count.to_gcov_type ());
            }
          fprintf (dump_file, "\n");
        }
@@ -1731,7 +1742,7 @@ inline_small_functions (void)
   /* Compute overall unit size and other global parameters used by badness
      metrics.  */
 
-  max_count = 0;
+  max_count = profile_count::uninitialized ();
   ipa_reduced_postorder (order, true, true, NULL);
   free (order);
 
@@ -1771,7 +1782,7 @@ inline_small_functions (void)
          }
 
        for (edge = node->callers; edge; edge = edge->next_caller)
-         if (max_count < edge->count)
+         if (!(max_count >= edge->count))
            max_count = edge->count;
       }
   ipa_free_postorder_info ();
@@ -1835,7 +1846,7 @@ inline_small_functions (void)
     }
 
   gcc_assert (in_lto_p
-             || !max_count
+             || !(max_count > 0)
              || (profile_info && flag_branch_probabilities));
 
   while (!edge_heap.empty ())
@@ -1880,9 +1891,7 @@ inline_small_functions (void)
         Increases of badness are handled lazilly; when we see key with out
         of date value on it, we re-insert it now.  */
       current_badness = edge_badness (edge, false);
-      /* Disable checking for profile because roundoff errors may cause slight
-         deviations in the order.  */
-      gcc_assert (max_count || cached_badness == current_badness);
+      gcc_assert (cached_badness == current_badness);
       gcc_assert (current_badness >= badness);
 #else
       current_badness = edge_badness (edge, false);
@@ -1927,9 +1936,12 @@ inline_small_functions (void)
                   : -1,
                   badness.to_double (),
                   edge->frequency / (double)CGRAPH_FREQ_BASE);
-         if (edge->count)
-           fprintf (dump_file," Called %" PRId64"x\n",
-                    edge->count);
+         if (edge->count.initialized_p ())
+           {
+             fprintf (dump_file, " Called ");
+             edge->count.dump (dump_file);
+             fprintf (dump_file, "times\n");
+            }
          if (dump_flags & TDF_DETAILS)
            edge_badness (edge, true);
        }
@@ -2027,7 +2039,7 @@ inline_small_functions (void)
       update_caller_keys (&edge_heap, where, updated_nodes, NULL);
       /* Offline copy count has possibly changed, recompute if profile is
         available.  */
-      if (max_count)
+      if (max_count > profile_count::zero ())
         {
          struct cgraph_node *n = cgraph_node::get (edge->callee->decl);
          if (n != edge->callee && n->analyzed)
@@ -2233,7 +2245,8 @@ dump_overall_stats (void)
       {
        sreal time = ipa_fn_summaries->get (node)->time;
        sum += time;
-       sum_weighted += time * node->count;
+       if (node->count.initialized_p ())
+         sum_weighted += time * node->count.to_gcov_type ();
       }
   fprintf (dump_file, "Overall time estimate: "
           "%f weighted by profile: "
@@ -2263,56 +2276,59 @@ dump_inline_stats (void)
       {
        if (e->inline_failed)
          {
-           reason[(int) e->inline_failed][0] += e->count;
+           if (e->count.initialized_p ())
+             reason[(int) e->inline_failed][0] += e->count.to_gcov_type ();
            reason[(int) e->inline_failed][1] += e->frequency;
            reason[(int) e->inline_failed][2] ++;
-           if (DECL_VIRTUAL_P (e->callee->decl))
+           if (DECL_VIRTUAL_P (e->callee->decl)
+               && e->count.initialized_p ())
              {
                if (e->indirect_inlining_edge)
-                 noninlined_virt_indir_cnt += e->count;
+                 noninlined_virt_indir_cnt += e->count.to_gcov_type ();
                else
-                 noninlined_virt_cnt += e->count;
+                 noninlined_virt_cnt += e->count.to_gcov_type ();
              }
-           else
+           else if (e->count.initialized_p ())
              {
                if (e->indirect_inlining_edge)
-                 noninlined_indir_cnt += e->count;
+                 noninlined_indir_cnt += e->count.to_gcov_type ();
                else
-                 noninlined_cnt += e->count;
+                 noninlined_cnt += e->count.to_gcov_type ();
              }
          }
-       else
+       else if (e->count.initialized_p ())
          {
            if (e->speculative)
              {
                if (DECL_VIRTUAL_P (e->callee->decl))
-                 inlined_speculative_ply += e->count;
+                 inlined_speculative_ply += e->count.to_gcov_type ();
                else
-                 inlined_speculative += e->count;
+                 inlined_speculative += e->count.to_gcov_type ();
              }
            else if (DECL_VIRTUAL_P (e->callee->decl))
              {
                if (e->indirect_inlining_edge)
-                 inlined_virt_indir_cnt += e->count;
+                 inlined_virt_indir_cnt += e->count.to_gcov_type ();
                else
-                 inlined_virt_cnt += e->count;
+                 inlined_virt_cnt += e->count.to_gcov_type ();
              }
            else
              {
                if (e->indirect_inlining_edge)
-                 inlined_indir_cnt += e->count;
+                 inlined_indir_cnt += e->count.to_gcov_type ();
                else
-                 inlined_cnt += e->count;
+                 inlined_cnt += e->count.to_gcov_type ();
              }
          }
       }
     for (e = node->indirect_calls; e; e = e->next_callee)
-      if (e->indirect_info->polymorphic)
-       indirect_poly_cnt += e->count;
-      else
-       indirect_cnt += e->count;
+      if (e->indirect_info->polymorphic
+         & e->count.initialized_p ())
+       indirect_poly_cnt += e->count.to_gcov_type ();
+      else if (e->count.initialized_p ())
+       indirect_cnt += e->count.to_gcov_type ();
   }
-  if (max_count)
+  if (max_count.initialized_p ())
     {
       fprintf (dump_file,
               "Inlined %" PRId64 " + speculative "
@@ -2331,9 +2347,9 @@ dump_inline_stats (void)
               inlined_indir_cnt, inlined_virt_cnt, inlined_virt_indir_cnt,
               noninlined_cnt, noninlined_indir_cnt, noninlined_virt_cnt,
               noninlined_virt_indir_cnt, indirect_cnt, indirect_poly_cnt);
-      fprintf (dump_file,
-              "Removed speculations %" PRId64 "\n",
-              spec_rem);
+      fprintf (dump_file, "Removed speculations ");
+      spec_rem.dump (dump_file);
+      fprintf (dump_file, "\n");
     }
   dump_overall_stats ();
   fprintf (dump_file, "\nWhy inlining failed?\n");
index 54bb5bf30018b71de4309c553fa4e4dbc7e57438..05cad3f85b177429d90edb6870c0eb552885c4a4 100644 (file)
@@ -222,7 +222,9 @@ ipa_profile_generate_summary (void)
            time += estimate_num_insns (stmt, &eni_time_weights);
            size += estimate_num_insns (stmt, &eni_size_weights);
          }
-       account_time_size (&hashtable, histogram, bb->count, time, size);
+       if (bb->count.initialized_p ())
+         account_time_size (&hashtable, histogram, bb->count.to_gcov_type (),
+                            time, size);
       }
   histogram.qsort (cmp_counts);
 }
@@ -428,10 +430,11 @@ ipa_propagate_frequency (struct cgraph_node *node)
     }
 
   /* With profile we can decide on hot/normal based on count.  */
-  if (node->count)
+  if (node->count.initialized_p ())
     {
       bool hot = false;
-      if (node->count >= get_hot_bb_threshold ())
+      if (!(node->count == profile_count::zero ())
+         && node->count >= get_hot_bb_threshold ())
        hot = true;
       if (!hot)
        hot |= contains_hot_call_p (node);
@@ -576,7 +579,7 @@ ipa_profile (void)
 
       for (e = n->indirect_calls; e; e = e->next_callee)
        {
-         if (n->count)
+         if (n->count.initialized_p ())
            nindirect++;
          if (e->indirect_info->common_target_id)
            {
@@ -662,8 +665,8 @@ ipa_profile (void)
                      nconverted++;
                      e->make_speculative
                        (n2,
-                        apply_scale (e->count,
-                                     e->indirect_info->common_target_probability),
+                        e->count.apply_probability
+                                    (e->indirect_info->common_target_probability),
                         apply_scale (e->frequency,
                                      e->indirect_info->common_target_probability));
                      update = true;
index 81fbb520938eaf2357d01c1a7b41bffd95f9b7ee..c73ffd7b5866015d1e233c7aef1311376c53b526 100644 (file)
@@ -2982,7 +2982,7 @@ ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
        }
       /* make_speculative will update ie's cost to direct call cost. */
       ie = ie->make_speculative
-            (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
+            (callee, ie->count.apply_scale (8, 10), ie->frequency * 8 / 10);
     }
 
   return ie;
index 39db923c429122a373cec2941f821c5ea9233e67..731512482349448fbc77fc48681e4593b01b8124 100644 (file)
@@ -402,7 +402,9 @@ ipa_merge_profiles (struct cgraph_node *dst,
   if (src->profile_id && !dst->profile_id)
     dst->profile_id = src->profile_id;
 
-  if (!dst->count)
+  /* FIXME when we merge in unknown profile, we ought to set counts as
+     unsafe.  */
+  if (!dst->count.initialized_p ())
     return;
   if (symtab->dump_file)
     {
@@ -543,7 +545,7 @@ ipa_merge_profiles (struct cgraph_node *dst,
       for (e = dst->indirect_calls, e2 = src->indirect_calls; e;
           e2 = (e2 ? e2->next_callee : NULL), e = e->next_callee)
        {
-         gcov_type count = gimple_bb (e->call_stmt)->count;
+         profile_count count = gimple_bb (e->call_stmt)->count;
          int freq = compute_call_stmt_bb_frequency
                        (dst->decl,
                         gimple_bb (e->call_stmt));
@@ -561,7 +563,8 @@ ipa_merge_profiles (struct cgraph_node *dst,
              gcc_assert (e == indirect);
              if (e2 && e2->speculative)
                e2->speculative_call_info (direct2, indirect2, ref);
-             if (indirect->count || direct->count)
+             if (indirect->count > profile_count::zero ()
+                 || direct->count > profile_count::zero ())
                {
                  /* We should mismatch earlier if there is no matching
                     indirect edge.  */
@@ -594,8 +597,8 @@ ipa_merge_profiles (struct cgraph_node *dst,
                           indirect->count += indirect2->count;
                        }
                    }
-                 int  prob = RDIV (direct->count * REG_BR_PROB_BASE ,
-                                   direct->count + indirect->count);
+                 int  prob = direct->count.probability_in (direct->count
+                                                           + indirect->count);
                  direct->frequency = RDIV (freq * prob, REG_BR_PROB_BASE);
                  indirect->frequency = RDIV (freq * (REG_BR_PROB_BASE - prob),
                                              REG_BR_PROB_BASE);
@@ -613,7 +616,7 @@ ipa_merge_profiles (struct cgraph_node *dst,
              e2->speculative_call_info (direct, indirect, ref);
              e->count = count;
              e->frequency = freq;
-             int prob = RDIV (direct->count * REG_BR_PROB_BASE, e->count);
+             int prob = direct->count.probability_in (e->count);
              e->make_speculative (direct->callee, direct->count,
                                   RDIV (freq * prob, REG_BR_PROB_BASE));
            }
index c018e2ce634b99e7e138b355bb8e36946722950b..05daabb22fba2e484b5c6a8882f910689a7edb08 100644 (file)
@@ -506,7 +506,7 @@ doloop_modify (struct loop *loop, struct niter_desc *desc,
       redirect_edge_and_branch_force (single_succ_edge (preheader), new_preheader);
       set_immediate_dominator (CDI_DOMINATORS, new_preheader, preheader);
 
-      set_zero->count = 0;
+      set_zero->count = profile_count::uninitialized ();
       set_zero->frequency = 0;
 
       te = single_succ_edge (preheader);
index 5feaa8c20cb06447911e53d963e15042ec89d120..e39766c2a79f2528ec7a465d07d622ea60310349 100644 (file)
@@ -202,10 +202,10 @@ report_unroll (struct loop *loop, location_t locus)
   dump_printf_loc (report_flags, locus,
                    "loop unrolled %d times",
                    loop->lpt_decision.times);
-  if (profile_info)
+  if (profile_info && loop->header->count.initialized_p ())
     dump_printf (report_flags,
                  " (header execution count %d)",
-                 (int)loop->header->count);
+                 (int)loop->header->count.to_gcov_type ());
 
   dump_printf (report_flags, "\n");
 }
@@ -860,7 +860,7 @@ unroll_loop_runtime_iterations (struct loop *loop)
   unsigned i, j, p;
   basic_block preheader, *body, swtch, ezc_swtch = NULL;
   int may_exit_copy, iter_freq, new_freq;
-  gcov_type iter_count, new_count;
+  profile_count iter_count, new_count;
   unsigned n_peel;
   edge e;
   bool extra_zero_check, last_may_exit;
@@ -970,7 +970,7 @@ unroll_loop_runtime_iterations (struct loop *loop)
      innermost switch block.  Switch blocks and peeled loop copies are built
      from innermost outward.  */
   iter_freq = new_freq = swtch->frequency / (max_unroll + 1);
-  iter_count = new_count = swtch->count / (max_unroll + 1);
+  iter_count = new_count = swtch->count.apply_scale (1, max_unroll + 1);
   swtch->frequency = new_freq;
   swtch->count = new_count;
   single_succ_edge (swtch)->count = new_count;
@@ -1027,7 +1027,7 @@ unroll_loop_runtime_iterations (struct loop *loop)
       /* Recompute frequency/count adjustments since initial peel copy may
         have exited and reduced those values that were computed above.  */
       iter_freq = swtch->frequency / (max_unroll + 1);
-      iter_count = swtch->count / (max_unroll + 1);
+      iter_count = swtch->count.apply_scale (1, max_unroll + 1);
       /* Add in frequency/count of edge from switch block.  */
       preheader->frequency += iter_freq;
       preheader->count += iter_count;
index 77605f70109241633dde3e9bf4e9779c6ea73ce6..e2680277bb5f2ccfcb8b695578ad8b3381c9e615 100644 (file)
@@ -256,7 +256,7 @@ lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
       streamer_write_hwi_stream (ob->main_stream, ref);
     }
 
-  streamer_write_gcov_count_stream (ob->main_stream, edge->count);
+  edge->count.stream_out (ob->main_stream);
 
   bp = bitpack_create (ob->main_stream);
   uid = (!gimple_has_body_p (edge->caller->decl) || edge->caller->thunk.thunk_p
@@ -458,7 +458,7 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
 
 
   lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
-  streamer_write_gcov_count_stream (ob->main_stream, node->count);
+  node->count.stream_out (ob->main_stream);
   streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
 
   streamer_write_hwi_stream (ob->main_stream,
@@ -1246,7 +1246,7 @@ input_node (struct lto_file_decl_data *file_data,
   if (clone_ref != LCC_NOT_FOUND)
     {
       node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
-       0, CGRAPH_FREQ_BASE, false,
+       profile_count::uninitialized (), CGRAPH_FREQ_BASE, false,
        vNULL, false, NULL, NULL);
     }
   else
@@ -1263,7 +1263,7 @@ input_node (struct lto_file_decl_data *file_data,
   if (order >= symtab->order)
     symtab->order = order + 1;
 
-  node->count = streamer_read_gcov_count (ib);
+  node->count = profile_count::stream_in (ib);
   node->count_materialization_scale = streamer_read_hwi (ib);
 
   count = streamer_read_hwi (ib);
@@ -1461,7 +1461,7 @@ input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
   struct cgraph_node *caller, *callee;
   struct cgraph_edge *edge;
   unsigned int stmt_id;
-  gcov_type count;
+  profile_count count;
   int freq;
   cgraph_inline_failed_t inline_failed;
   struct bitpack_d bp;
@@ -1480,7 +1480,7 @@ input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
   else
     callee = NULL;
 
-  count = streamer_read_gcov_count (ib);
+  count = profile_count::stream_in (ib);
 
   bp = streamer_read_bitpack (ib);
   inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
@@ -1821,8 +1821,8 @@ merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
        if (scale == REG_BR_PROB_BASE)
          continue;
        for (edge = node->callees; edge; edge = edge->next_callee)
-         edge->count = apply_scale (edge->count, scale);
-       node->count = apply_scale (node->count, scale);
+         edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
+       node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
       }
 }
 
index afed0372a080db9aa82e10d01ae0754c638a0b27..ccc8cf602df0b1abbce52aa3b4a92cd0508fc0f5 100644 (file)
@@ -755,13 +755,13 @@ input_cfg (struct lto_input_block *ib, struct data_in *data_in,
          unsigned int edge_flags;
          basic_block dest;
          int probability;
-         gcov_type count;
+         profile_count count;
          edge e;
 
          dest_index = streamer_read_uhwi (ib);
          probability = (int) streamer_read_hwi (ib);
-         count = apply_scale ((gcov_type) streamer_read_gcov_count (ib),
-                               count_materialization_scale);
+         count = profile_count::stream_in (ib).apply_scale
+                        (count_materialization_scale, REG_BR_PROB_BASE);
          edge_flags = streamer_read_uhwi (ib);
 
          dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
index b96b9df63b75dcdbaa493e47fb1e32f798980c6a..9378d5cf7b42f51a80116f8dff2c4eef61c3e7e3 100644 (file)
@@ -1861,7 +1861,7 @@ output_cfg (struct output_block *ob, struct function *fn)
        {
          streamer_write_uhwi (ob, e->dest->index);
          streamer_write_hwi (ob, e->probability);
-         streamer_write_gcov_count (ob, e->count);
+         e->count.stream_out (ob);
          streamer_write_uhwi (ob, e->flags);
        }
     }
index dbda491621462386ca14b2421cac709f8ba8cc91..bb4b4f9ef76e5560598dbf5bb4e3927a7a9ae51b 100644 (file)
--- a/gcc/mcf.c
+++ b/gcc/mcf.c
@@ -508,7 +508,7 @@ create_fixup_graph (fixup_graph_type *fixup_graph)
   /* Compute constants b, k_pos, k_neg used in the cost function calculation.
      b = sqrt(avg_vertex_weight(cfg)); k_pos = b; k_neg = 50b.  */
   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
-    total_vertex_weight += bb->count;
+    total_vertex_weight += bb_gcov_count (bb);
 
   sqrt_avg_vertex_weight = mcf_sqrt (total_vertex_weight /
                                     n_basic_blocks_for_fn (cfun));
@@ -526,8 +526,8 @@ create_fixup_graph (fixup_graph_type *fixup_graph)
   {
     /* v'->v'': index1->(index1+1).  */
     i = 2 * bb->index;
-    fcost = (gcov_type) COST (k_pos, bb->count);
-    add_fixup_edge (fixup_graph, i, i + 1, VERTEX_SPLIT_EDGE, bb->count,
+    fcost = (gcov_type) COST (k_pos, bb_gcov_count (bb));
+    add_fixup_edge (fixup_graph, i, i + 1, VERTEX_SPLIT_EDGE, bb_gcov_count (bb),
                     fcost, CAP_INFINITY);
     fixup_graph->num_vertices++;
 
@@ -538,9 +538,9 @@ create_fixup_graph (fixup_graph_type *fixup_graph)
       if (EDGE_INFO (e) && EDGE_INFO (e)->ignore)
         continue;
       j = 2 * e->dest->index;
-      fcost = (gcov_type) COST (k_pos, e->count);
-      add_fixup_edge (fixup_graph, i + 1, j, REDIRECT_EDGE, e->count, fcost,
-                      CAP_INFINITY);
+      fcost = (gcov_type) COST (k_pos, edge_gcov_count (e));
+      add_fixup_edge (fixup_graph, i + 1, j, REDIRECT_EDGE, edge_gcov_count (e),
+                     fcost, CAP_INFINITY);
     }
   }
 
@@ -1132,12 +1132,12 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
       /* Fixup BB.  */
       if (dump_file)
         fprintf (dump_file,
-                 "BB%d: %" PRId64 "", bb->index, bb->count);
+                 "BB%d: %" PRId64 "", bb->index, bb_gcov_count (bb));
 
       pfedge = find_fixup_edge (fixup_graph, i, i + 1);
       if (pfedge->flow)
         {
-          bb->count += pfedge->flow;
+          bb_gcov_count (bb) += pfedge->flow;
          if (dump_file)
            {
              fprintf (dump_file, " + %" PRId64 "(",
@@ -1152,7 +1152,7 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
       /* Deduct flow from normalized reverse edge.  */
       if (pfedge->norm_vertex_index && pfedge_n->flow)
         {
-          bb->count -= pfedge_n->flow;
+          bb_gcov_count (bb) -= pfedge_n->flow;
          if (dump_file)
            {
              fprintf (dump_file, " - %" PRId64 "(",
@@ -1163,7 +1163,7 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
            }
         }
       if (dump_file)
-        fprintf (dump_file, " = %" PRId64 "\n", bb->count);
+        fprintf (dump_file, " = %" PRId64 "\n", bb_gcov_count (bb));
 
       /* Fixup edge.  */
       FOR_EACH_EDGE (e, ei, bb->succs)
@@ -1175,7 +1175,7 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
           j = 2 * e->dest->index;
           if (dump_file)
            fprintf (dump_file, "%d->%d: %" PRId64 "",
-                    bb->index, e->dest->index, e->count);
+                    bb->index, e->dest->index, edge_gcov_count (e));
 
           pfedge = find_fixup_edge (fixup_graph, i + 1, j);
 
@@ -1184,7 +1184,7 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
              /* Non-self edge.  */
              if (pfedge->flow)
                {
-                 e->count += pfedge->flow;
+                 edge_gcov_count (e) += pfedge->flow;
                  if (dump_file)
                    {
                      fprintf (dump_file, " + %" PRId64 "(",
@@ -1199,7 +1199,7 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
              /* Deduct flow from normalized reverse edge.  */
              if (pfedge->norm_vertex_index && pfedge_n->flow)
                {
-                 e->count -= pfedge_n->flow;
+                 edge_gcov_count (e) -= pfedge_n->flow;
                  if (dump_file)
                    {
                      fprintf (dump_file, " - %" PRId64 "(",
@@ -1217,8 +1217,8 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
              pfedge = find_fixup_edge (fixup_graph, j, i + 1);
              pfedge_n =
                find_fixup_edge (fixup_graph, i + 1, pfedge->norm_vertex_index);
-             e->count += pfedge_n->flow;
-             bb->count += pfedge_n->flow;
+             edge_gcov_count (e) += pfedge_n->flow;
+             bb_gcov_count (bb) += pfedge_n->flow;
              if (dump_file)
                {
                  fprintf (dump_file, "(self edge)");
@@ -1230,26 +1230,29 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
                }
            }
 
-          if (bb->count)
-           e->probability = REG_BR_PROB_BASE * e->count / bb->count;
+          if (bb_gcov_count (bb))
+           e->probability = RDIV (REG_BR_PROB_BASE * edge_gcov_count (e),
+                                  bb_gcov_count (bb));
           if (dump_file)
            fprintf (dump_file, " = %" PRId64 "\t(%.1f%%)\n",
-                    e->count, e->probability * 100.0 / REG_BR_PROB_BASE);
+                    edge_gcov_count (e),
+                    e->probability * 100.0 / REG_BR_PROB_BASE);
         }
     }
 
-  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
+  bb_gcov_count (ENTRY_BLOCK_PTR_FOR_FN (cfun)) =
                     sum_edge_counts (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
-  EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
+  bb_gcov_count (EXIT_BLOCK_PTR_FOR_FN (cfun)) =
                     sum_edge_counts (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
 
   /* Compute edge probabilities.  */
   FOR_ALL_BB_FN (bb, cfun)
     {
-      if (bb->count)
+      if (bb_gcov_count (bb))
         {
           FOR_EACH_EDGE (e, ei, bb->succs)
-            e->probability = REG_BR_PROB_BASE * e->count / bb->count;
+            e->probability = RDIV (REG_BR_PROB_BASE * edge_gcov_count (e),
+                                  bb_gcov_count (bb));
         }
       else
         {
@@ -1282,15 +1285,15 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
               current_function_name ());
       FOR_EACH_BB_FN (bb, cfun)
         {
-          if ((bb->count != sum_edge_counts (bb->preds))
-               || (bb->count != sum_edge_counts (bb->succs)))
+          if ((bb_gcov_count (bb) != sum_edge_counts (bb->preds))
+               || (bb_gcov_count (bb) != sum_edge_counts (bb->succs)))
             {
               fprintf (dump_file,
                        "BB%d(%" PRId64 ")  **INVALID**: ",
-                       bb->index, bb->count);
+                       bb->index, bb_gcov_count (bb));
               fprintf (stderr,
                        "******** BB%d(%" PRId64
-                       ")  **INVALID**: \n", bb->index, bb->count);
+                       ")  **INVALID**: \n", bb->index, bb_gcov_count (bb));
               fprintf (dump_file, "in_edges=%" PRId64 " ",
                        sum_edge_counts (bb->preds));
               fprintf (dump_file, "out_edges=%" PRId64 "\n",
@@ -1378,7 +1381,7 @@ sum_edge_counts (vec<edge, va_gc> *to_edges)
     {
       if (EDGE_INFO (e) && EDGE_INFO (e)->ignore)
         continue;
-      sum += e->count;
+      sum += edge_gcov_count (e);
     }
   return sum;
 }
index 720391f7cfed376b781cae9d7139ba1cadbf0983..005c6e596ad526bc0dc7cdbac1ae224417403d1a 100644 (file)
@@ -1422,13 +1422,16 @@ sms_schedule (void)
       get_ebb_head_tail (bb, bb, &head, &tail);
       latch_edge = loop_latch_edge (loop);
       gcc_assert (single_exit (loop));
-      if (single_exit (loop)->count)
-       trip_count = latch_edge->count / single_exit (loop)->count;
+      if (single_exit (loop)->count > profile_count::zero ())
+       trip_count = latch_edge->count.to_gcov_type ()
+                    / single_exit (loop)->count.to_gcov_type ();
 
       /* Perform SMS only on loops that their average count is above threshold.  */
 
-      if ( latch_edge->count
-          && (latch_edge->count < single_exit (loop)->count * SMS_LOOP_AVERAGE_COUNT_THRESHOLD))
+      if ( latch_edge->count > profile_count::zero ()
+          && (latch_edge->count
+             < single_exit (loop)->count.apply_scale
+                                (SMS_LOOP_AVERAGE_COUNT_THRESHOLD, 1)))
        {
          if (dump_file)
            {
@@ -1438,7 +1441,7 @@ sms_schedule (void)
                {
                  fprintf (dump_file, "SMS loop-count ");
                  fprintf (dump_file, "%" PRId64,
-                          (int64_t) bb->count);
+                          (int64_t) bb->count.to_gcov_type ());
                  fprintf (dump_file, "\n");
                   fprintf (dump_file, "SMS trip-count ");
                   fprintf (dump_file, "%" PRId64,
@@ -1549,8 +1552,9 @@ sms_schedule (void)
 
       latch_edge = loop_latch_edge (loop);
       gcc_assert (single_exit (loop));
-      if (single_exit (loop)->count)
-       trip_count = latch_edge->count / single_exit (loop)->count;
+      if (single_exit (loop)->count > profile_count::zero ())
+       trip_count = latch_edge->count.to_gcov_type ()
+                    / single_exit (loop)->count.to_gcov_type ();
 
       if (dump_file)
        {
@@ -1560,7 +1564,7 @@ sms_schedule (void)
            {
              fprintf (dump_file, "SMS loop-count ");
              fprintf (dump_file, "%" PRId64,
-                      (int64_t) bb->count);
+                      (int64_t) bb->count.to_gcov_type ());
              fprintf (dump_file, "\n");
              fprintf (dump_file, "SMS profile-sum-max ");
              fprintf (dump_file, "%" PRId64,
index cbc0df2606cf3b0a956233d7ac6dedaab6ecc39b..e3632a26f66ffd3366c5f392c290a6372c99e2ef 100644 (file)
@@ -1045,14 +1045,16 @@ eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
   struct unoccr *occr, *avail_occrs = NULL;
   struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
   int npred_ok = 0;
-  gcov_type ok_count = 0; /* Redundant load execution count.  */
-  gcov_type critical_count = 0; /* Execution count of critical edges.  */
+  profile_count ok_count = profile_count::zero ();
+                /* Redundant load execution count.  */
+  profile_count critical_count = profile_count::zero ();
+                /* Execution count of critical edges.  */
   edge_iterator ei;
   bool critical_edge_split = false;
 
   /* The execution count of the loads to be added to make the
      load fully redundant.  */
-  gcov_type not_ok_count = 0;
+  profile_count not_ok_count = profile_count::zero ();
   basic_block pred_bb;
 
   pat = PATTERN (insn);
@@ -1106,13 +1108,14 @@ eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
            avail_insn = NULL;
        }
 
-      if (EDGE_CRITICAL_P (pred))
+      if (EDGE_CRITICAL_P (pred) && pred->count.initialized_p ())
        critical_count += pred->count;
 
       if (avail_insn != NULL_RTX)
        {
          npred_ok++;
-         ok_count += pred->count;
+         if (pred->count.initialized_p ())
+           ok_count = ok_count + pred->count;
          if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
                                                    copy_rtx (avail_reg)))))
            {
@@ -1136,7 +1139,8 @@ eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
          /* Adding a load on a critical edge will cause a split.  */
          if (EDGE_CRITICAL_P (pred))
            critical_edge_split = true;
-         not_ok_count += pred->count;
+         if (pred->count.initialized_p ())
+           not_ok_count = not_ok_count + pred->count;
          unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
                                                    sizeof (struct unoccr));
          unoccr->insn = NULL;
@@ -1160,9 +1164,11 @@ eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
     goto cleanup;
 
   /* Check if it's worth applying the partial redundancy elimination.  */
-  if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
+  if (ok_count.to_gcov_type ()
+      < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count.to_gcov_type ())
     goto cleanup;
-  if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
+  if (ok_count.to_gcov_type ()
+      < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count.to_gcov_type ())
     goto cleanup;
 
   /* Generate moves to the loaded register from where
index ac35fa41129ff46bb77182a0fefcebe1950a40c0..8eb28634b2feaf83197328b07f93b5869c9768c0 100644 (file)
@@ -172,14 +172,14 @@ set_hot_bb_threshold (gcov_type min)
 /* Return TRUE if frequency FREQ is considered to be hot.  */
 
 bool
-maybe_hot_count_p (struct function *fun, gcov_type count)
+maybe_hot_count_p (struct function *, profile_count count)
 {
-  if (fun && profile_status_for_fn (fun) != PROFILE_READ)
+  if (!count.initialized_p ())
     return true;
   /* Code executed at most once is not hot.  */
-  if (profile_info->runs >= count)
+  if (count <= MAX (profile_info ? profile_info->runs : 1, 1))
     return false;
-  return (count >= get_hot_bb_threshold ());
+  return (count.to_gcov_type () >= get_hot_bb_threshold ());
 }
 
 /* Return true in case BB can be CPU intensive and should be optimized
@@ -210,42 +210,17 @@ maybe_hot_edge_p (edge e)
    
 static bool
 probably_never_executed (struct function *fun,
-                         gcov_type count, int frequency)
+                         profile_count count, int)
 {
   gcc_checking_assert (fun);
-  if (profile_status_for_fn (fun) == PROFILE_READ)
+  if (count.initialized_p () && profile_status_for_fn (fun) == PROFILE_READ)
     {
-      int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION);
-      if (count * unlikely_count_fraction >= profile_info->runs)
-       return false;
-      if (!frequency)
+      if (count == profile_count::zero ())
        return true;
-      if (!ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency)
+
+      int unlikely_count_fraction = PARAM_VALUE (UNLIKELY_BB_COUNT_FRACTION);
+      if (count.apply_scale (unlikely_count_fraction, 1) >= profile_info->runs)
        return false;
-      if (ENTRY_BLOCK_PTR_FOR_FN (fun)->count)
-       {
-          gcov_type computed_count;
-          /* Check for possibility of overflow, in which case entry bb count
-             is large enough to do the division first without losing much
-             precision.  */
-         if (ENTRY_BLOCK_PTR_FOR_FN (fun)->count < REG_BR_PROB_BASE *
-             REG_BR_PROB_BASE)
-            {
-              gcov_type scaled_count
-                 = frequency * ENTRY_BLOCK_PTR_FOR_FN (fun)->count *
-            unlikely_count_fraction;
-             computed_count = RDIV (scaled_count,
-                                    ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency);
-            }
-          else
-            {
-             computed_count = RDIV (ENTRY_BLOCK_PTR_FOR_FN (fun)->count,
-                                    ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency);
-              computed_count *= frequency * unlikely_count_fraction;
-            }
-          if (computed_count >= profile_info->runs)
-            return false;
-       }
       return true;
     }
   if ((!profile_info || !(opt_for_fn (fun->decl, flag_branch_probabilities)))
@@ -772,13 +747,16 @@ dump_prediction (FILE *file, enum br_predictor predictor, int probability,
           edge_info_str, reason_messages[reason],
           probability * 100.0 / REG_BR_PROB_BASE);
 
-  if (bb->count)
+  if (bb->count.initialized_p ())
     {
-      fprintf (file, "  exec %" PRId64, bb->count);
+      fprintf (file, "  exec ");
+      bb->count.dump (file);
       if (e)
        {
-         fprintf (file, " hit %" PRId64, e->count);
-         fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
+         fprintf (file, " hit ");
+         e->count.dump (file);
+         fprintf (file, " (%.1f%%)", e->count.to_gcov_type() * 100.0
+                  / bb->count.to_gcov_type ());
        }
     }
 
@@ -1113,7 +1091,7 @@ combine_predictions_for_bb (basic_block bb, bool dry_run)
          if (pred->ep_probability <= PROB_VERY_UNLIKELY)
            unlikely_edges.add (pred->ep_edge);
 
-      if (!bb->count && !dry_run)
+      if (!bb->count.initialized_p () && !dry_run)
        set_even_probabilities (bb, &unlikely_edges);
       clear_bb_predictions (bb);
       if (dump_file)
@@ -1239,7 +1217,7 @@ combine_predictions_for_bb (basic_block bb, bool dry_run)
     }
   clear_bb_predictions (bb);
 
-  if (!bb->count && !dry_run)
+  if (!bb->count.initialized_p () && !dry_run)
     {
       first->probability = combined_probability;
       second->probability = REG_BR_PROB_BASE - combined_probability;
@@ -3011,7 +2989,10 @@ propagate_freq (basic_block head, bitmap tovisit)
       BLOCK_INFO (bb)->npredecessors = count;
       /* When function never returns, we will never process exit block.  */
       if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
-       bb->count = bb->frequency = 0;
+       {
+         bb->count = profile_count::zero ();
+         bb->frequency = 0;
+       }
     }
 
   BLOCK_INFO (head)->frequency = 1;
@@ -3152,7 +3133,7 @@ estimate_loops (void)
    whether it is expected to be hot given the CALL_COUNT.  */
 
 static void
-drop_profile (struct cgraph_node *node, gcov_type call_count)
+drop_profile (struct cgraph_node *node, profile_count call_count)
 {
   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
   /* In the case where this was called by another function with a
@@ -3220,18 +3201,21 @@ handle_missing_profiles (void)
   FOR_EACH_DEFINED_FUNCTION (node)
     {
       struct cgraph_edge *e;
-      gcov_type call_count = 0;
+      profile_count call_count = profile_count::zero ();
       gcov_type max_tp_first_run = 0;
       struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
 
-      if (node->count)
+      if (!(node->count == profile_count::zero ()))
         continue;
       for (e = node->callers; e; e = e->next_caller)
       {
-        call_count += e->count;
+       if (e->count.initialized_p () > 0)
+         {
+            call_count = call_count + e->count;
 
-       if (e->caller->tp_first_run > max_tp_first_run)
-         max_tp_first_run = e->caller->tp_first_run;
+           if (e->caller->tp_first_run > max_tp_first_run)
+             max_tp_first_run = e->caller->tp_first_run;
+         }
       }
 
       /* If time profile is missing, let assign the maximum that comes from
@@ -3239,9 +3223,9 @@ handle_missing_profiles (void)
       if (!node->tp_first_run && max_tp_first_run)
        node->tp_first_run = max_tp_first_run + 1;
 
-      if (call_count
+      if (call_count > 0
           && fn && fn->cfg
-          && (call_count * unlikely_count_fraction >= profile_info->runs))
+          && (call_count.apply_scale (unlikely_count_fraction, 1) >= profile_info->runs))
         {
           drop_profile (node, call_count);
           worklist.safe_push (node);
@@ -3265,7 +3249,7 @@ handle_missing_profiles (void)
           if (DECL_COMDAT (callee->decl) && fn && fn->cfg
               && profile_status_for_fn (fn) == PROFILE_READ)
             {
-              drop_profile (node, 0);
+              drop_profile (node, profile_count::zero ());
               worklist.safe_push (callee);
             }
         }
@@ -3275,26 +3259,31 @@ handle_missing_profiles (void)
 /* Convert counts measured by profile driven feedback to frequencies.
    Return nonzero iff there was any nonzero execution count.  */
 
-int
+bool
 counts_to_freqs (void)
 {
-  gcov_type count_max, true_count_max = 0;
+  gcov_type count_max;
+  profile_count true_count_max = profile_count::zero ();
   basic_block bb;
 
   /* Don't overwrite the estimated frequencies when the profile for
      the function is missing.  We may drop this function PROFILE_GUESSED
      later in drop_profile ().  */
-  if (!flag_auto_profile && !ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
+  if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ()
+      || ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ())
     return 0;
 
   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
-    true_count_max = MAX (bb->count, true_count_max);
+    if (bb->count > true_count_max)
+      true_count_max = bb->count;
+
+  count_max = MAX (true_count_max.to_gcov_type (), 1);
 
-  count_max = MAX (true_count_max, 1);
   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
-    bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
+    if (bb->count.initialized_p ())
+      bb->frequency = RDIV (bb->count.to_gcov_type () * BB_FREQ_MAX, count_max);
 
-  return true_count_max;
+  return !(true_count_max == profile_count::zero ());
 }
 
 /* Return true if function is likely to be expensive, so there is no point to
@@ -3657,14 +3646,15 @@ rebuild_frequencies (void)
      which may also lead to frequencies incorrectly reduced to 0. There
      is less precision in the probabilities, so we only do this for small
      max counts.  */
-  gcov_type count_max = 0;
+  profile_count count_max = profile_count::zero ();
   basic_block bb;
   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
-    count_max = MAX (bb->count, count_max);
+    if (bb->count > count_max)
+      count_max = bb->count;
 
   if (profile_status_for_fn (cfun) == PROFILE_GUESSED
       || (!flag_auto_profile && profile_status_for_fn (cfun) == PROFILE_READ
-         && count_max < REG_BR_PROB_BASE/10))
+         && count_max < REG_BR_PROB_BASE / 10))
     {
       loop_optimizer_init (0);
       add_noreturn_fake_exit_edges ();
@@ -3725,23 +3715,23 @@ report_predictor_hitrates (void)
 void
 force_edge_cold (edge e, bool impossible)
 {
-  gcov_type count_sum = 0;
+  profile_count count_sum = profile_count::zero ();
   int prob_sum = 0;
   edge_iterator ei;
   edge e2;
-  gcov_type old_count = e->count;
+  profile_count old_count = e->count;
   int old_probability = e->probability;
-  gcov_type gcov_scale = REG_BR_PROB_BASE;
   int prob_scale = REG_BR_PROB_BASE;
 
   /* If edge is already improbably or cold, just return.  */
   if (e->probability <= (impossible ? PROB_VERY_UNLIKELY : 0)
-      && (!impossible || !e->count))
+      && (!impossible || e->count == profile_count::zero ()))
     return;
   FOR_EACH_EDGE (e2, ei, e->src->succs)
     if (e2 != e)
       {
-       count_sum += e2->count;
+       if (e2->count.initialized_p ())
+         count_sum += e2->count;
        prob_sum += e2->probability;
       }
 
@@ -3751,14 +3741,13 @@ force_edge_cold (edge e, bool impossible)
     {
       e->probability
         = MIN (e->probability, impossible ? 0 : PROB_VERY_UNLIKELY);
+      if (impossible)
+       e->count = profile_count::zero ();
       if (old_probability)
-       e->count = RDIV (e->count * e->probability, old_probability);
+       e->count = e->count.apply_scale (e->probability, old_probability);
       else
-        e->count = MIN (e->count, impossible ? 0 : 1);
+        e->count = e->count.apply_scale (1, REG_BR_PROB_BASE);
 
-      if (count_sum)
-       gcov_scale = RDIV ((count_sum + old_count - e->count) * REG_BR_PROB_BASE,
-                          count_sum);
       prob_scale = RDIV ((REG_BR_PROB_BASE - e->probability) * REG_BR_PROB_BASE,
                         prob_sum);
       if (dump_file && (dump_flags & TDF_DETAILS))
@@ -3766,10 +3755,12 @@ force_edge_cold (edge e, bool impossible)
                 "probability to other edges.\n",
                 e->src->index, e->dest->index,
                 impossible ? "impossible" : "cold");
+      profile_count count_sum2 = count_sum + old_count - e->count;
       FOR_EACH_EDGE (e2, ei, e->src->succs)
        if (e2 != e)
          {
-           e2->count = RDIV (e2->count * gcov_scale, REG_BR_PROB_BASE);
+           if (count_sum > 0)
+             e2->count.apply_scale (count_sum2, count_sum);
            e2->probability = RDIV (e2->probability * prob_scale,
                                    REG_BR_PROB_BASE);
          }
@@ -3785,16 +3776,19 @@ force_edge_cold (edge e, bool impossible)
         This in general is difficult task to do, but handle special case when
         BB has only one predecestor.  This is common case when we are updating
         after loop transforms.  */
-      if (!prob_sum && !count_sum && single_pred_p (e->src)
-         && e->src->frequency > (impossible ? 0 : 1))
+      if (!prob_sum && count_sum == profile_count::zero ()
+         && single_pred_p (e->src) && e->src->frequency > (impossible ? 0 : 1))
        {
          int old_frequency = e->src->frequency;
          if (dump_file && (dump_flags & TDF_DETAILS))
            fprintf (dump_file, "Making bb %i %s.\n", e->src->index,
                     impossible ? "impossible" : "cold");
          e->src->frequency = MIN (e->src->frequency, impossible ? 0 : 1);
-         e->src->count = e->count = RDIV (e->src->count * e->src->frequency,
-                                          old_frequency);
+         if (impossible)
+           e->src->count = e->count = profile_count::zero ();
+         else
+           e->src->count = e->count = e->count.apply_scale (e->src->frequency,
+                                                            old_frequency);
          force_edge_cold (single_pred_edge (e->src), impossible);
        }
       else if (dump_file && (dump_flags & TDF_DETAILS)
index 6fc7c2c89f595eaf278e7ccd2b571c7c8424b600..d8ef0ab6c42727e2c181b39cd2310fef0f7b1d53 100644 (file)
@@ -20,6 +20,8 @@ along with GCC; see the file COPYING3.  If not see
 #ifndef GCC_PREDICT_H
 #define GCC_PREDICT_H
 
+#include "profile-count.h"
+
 /* Random guesstimation given names.
    PROB_VERY_UNLIKELY should be small enough so basic block predicted
    by it gets below HOT_BB_FREQUENCY_FRACTION.  */
@@ -47,7 +49,7 @@ enum prediction
 
 extern gcov_type get_hot_bb_threshold (void);
 extern void set_hot_bb_threshold (gcov_type);
-extern bool maybe_hot_count_p (struct function *, gcov_type);
+extern bool maybe_hot_count_p (struct function *, profile_count);
 extern bool maybe_hot_bb_p (struct function *, const_basic_block);
 extern bool maybe_hot_edge_p (edge);
 extern bool probably_never_executed_bb_p (struct function *, const_basic_block);
@@ -83,7 +85,7 @@ extern void invert_br_probabilities (rtx);
 extern void guess_outgoing_edge_probabilities (basic_block);
 extern void tree_estimate_probability (bool);
 extern void handle_missing_profiles (void);
-extern int counts_to_freqs (void);
+extern bool counts_to_freqs (void);
 extern bool expensive_function_p (int);
 extern void estimate_bb_frequencies (bool);
 extern void compute_function_frequency (void);
index 03d80bdac58fb73386db3c6d0895c448ab5d7e2f..ac8963ec6bb4ee6618c2b0e1765e22efef6fc0ed 100644 (file)
@@ -24,7 +24,6 @@ along with GCC; see the file COPYING3.  If not see
 #include "rtl.h"
 #include "alias.h"
 #include "tree.h"
-#include "cfg.h"
 #include "flags.h"
 #include "predict.h"
 #include "function.h"
index 20bdafdb580cd43cb548febe440ce2866052b0bd..dc8d9800de578097b4d87f8546af5a9fecf2ae8b 100644 (file)
@@ -35,6 +35,7 @@ along with GCC; see the file COPYING3.  If not see
 #ifndef GENERATOR_FILE
 #include "alias.h"
 #include "tree.h"
+#include "basic-block.h"
 #include "cfg.h"
 #include "print-tree.h"
 #include "flags.h"
diff --git a/gcc/profile-count.c b/gcc/profile-count.c
new file mode 100644 (file)
index 0000000..5ad7178
--- /dev/null
@@ -0,0 +1,80 @@
+/* Profile counter container type.
+   Copyright (C) 2017 Free Software Foundation, Inc.
+   Contributed by Jan Hubicka
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 3, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3.  If not see
+<http://www.gnu.org/licenses/>.  */
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "profile-count.h"
+#include "options.h"
+#include "tree.h"
+#include "basic-block.h"
+#include "cfg.h"
+#include "function.h"
+#include "gimple.h"
+#include "data-streamer.h"
+#include "cgraph.h"
+
+void
+profile_count::dump (FILE *f) const
+{
+  if (!initialized_p ())
+    fprintf (f, "uninitialized");
+  else
+    fprintf (f, "%" PRId64, m_val);
+}
+
+void
+profile_count::debug () const
+{
+  dump (stderr);
+}
+
+bool
+profile_count::differs_from_p (profile_count other) const
+{
+  if (!initialized_p () || !other.initialized_p ())
+    return false;
+  if (m_val - other.m_val < 100 && other.m_val - m_val < 100)
+    return false;
+  if (!other.m_val)
+    return true;
+  int64_t ratio = m_val * 100 / other.m_val;
+  return ratio < 99 || ratio > 101;
+}
+
+profile_count
+profile_count::stream_in (struct lto_input_block *ib)
+{
+  profile_count ret;
+  ret.m_val = streamer_read_gcov_count (ib);
+  return ret;
+}
+
+void
+profile_count::stream_out (struct output_block *ob)
+{
+  streamer_write_gcov_count (ob, m_val);
+}
+
+void
+profile_count::stream_out (struct lto_output_stream *ob)
+{
+  streamer_write_gcov_count_stream (ob, m_val);
+}
diff --git a/gcc/profile-count.h b/gcc/profile-count.h
new file mode 100644 (file)
index 0000000..2ac7f32
--- /dev/null
@@ -0,0 +1,290 @@
+/* Profile counter container type.
+   Copyright (C) 2017 Free Software Foundation, Inc.
+   Contributed by Jan Hubicka
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 3, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3.  If not see
+<http://www.gnu.org/licenses/>.  */
+
+#ifndef GCC_PROFILE_COUNT_H
+#define GCC_PROFILE_COUNT_H
+
+
+/* The base value for branch probability notes and edge probabilities.  */
+#define REG_BR_PROB_BASE  10000
+
+#define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
+
+/* Main data type to hold profile counters in GCC.  In most cases profile
+   counts originate from profile feedback. They are 64bit integers
+   representing number of executions during the train run.
+   As the profile is maintained during the compilation, many adjustments are
+   made.  Not all transformations can be made precisely, most importantly
+   when code is being duplicated.  It also may happen that part of CFG has
+   profile counts known while other do not - for example when LTO optimizing
+   partly profiled program or when profile was lost due to COMDAT merging.
+
+   For this information profile_count tracks more information than
+   just unsigned integer and it is also ready for profile mismatches.
+   The API of this data type represent operations that are natural
+   on profile counts - sum, difference and operation with scales and
+   probabilities.  All operations are safe by never getting negative counts
+   and they do end up in uninitialized scale if any of the parameters is
+   uninitialized.
+
+   All comparsions that are three state and handling of probabilities.  Thus
+   a < b is not equal to !(a >= b).
+
+   The following pre-defined counts are available:
+
+   profile_count::zero ()  for code that is known to execute zero times at
+      runtime (this can be detected statically i.e. for paths leading to
+      abort ();
+   profile_count::one () for code that is known to execute once (such as
+      main () function
+   profile_count::uninitialized ()  for unknown execution count.
+
+ */
+
+
+class GTY(()) profile_count
+{
+  /* Use int64_t to hold basic block counters.  Should be at least
+     64bit.  Although a counter cannot be negative, we use a signed
+     type to hold various extra stages.  */
+
+  int64_t m_val;
+
+  /* Assume numbers smaller than this to multiply.  This is set to make
+     testsuite pass, in future we may implement precise multiples in higer
+     rangers.  */
+  static const int64_t max_safe_multiplier = 131072;
+public:
+
+  /* Used for counters which are expected to be never executed.  */
+  static profile_count zero ()
+    {
+      return from_gcov_type (0);
+    }
+  static profile_count one ()
+    {
+      return from_gcov_type (1);
+    }
+  /* Value of counters which has not been initialized. Either because
+     initialization did not happen yet or because profile is unknown.  */
+  static profile_count uninitialized ()
+    {
+      profile_count c;
+      c.m_val = -1;
+      return c;
+    }
+
+  /* The profiling runtime uses gcov_type, which is usually 64bit integer.
+     Conversions back and forth are used to read the coverage and get it
+     into internal representation.  */
+  static profile_count from_gcov_type (gcov_type v)
+    {
+      profile_count ret;
+      gcc_checking_assert (v>=0);
+      ret.m_val = v;
+      return ret;
+    }
+
+  /* Conversion to gcov_type is lossy.  */
+  gcov_type to_gcov_type () const
+    {
+      gcc_checking_assert (initialized_p ());
+      return m_val;
+    }
+
+  /* Return true if value has been initialized.  */
+  bool initialized_p () const
+    {
+      return m_val != -1;
+    }
+  /* Return true if value can be trusted.  */
+  bool reliable_p () const
+    {
+      return initialized_p ();
+    }
+
+  /* Basic operations.  */
+  bool operator== (const profile_count &other) const
+    {
+      return m_val == other.m_val;
+    }
+  profile_count operator+ (const profile_count &other) const
+    {
+      if (other == profile_count::zero ())
+       return *this;
+      if (*this == profile_count::zero ())
+       return other;
+      if (!initialized_p () || !other.initialized_p ())
+       return profile_count::uninitialized ();
+
+      profile_count ret;
+      ret.m_val = m_val + other.m_val;
+      return ret;
+    }
+  profile_count &operator+= (const profile_count &other)
+    {
+      if (other == profile_count::zero ())
+       return *this;
+      if (*this == profile_count::zero ())
+       {
+         *this = other;
+         return *this;
+       }
+      if (!initialized_p () || !other.initialized_p ())
+       return *this = profile_count::uninitialized ();
+      else
+       m_val += other.m_val;
+      return *this;
+    }
+  profile_count operator- (const profile_count &other) const
+    {
+      if (*this == profile_count::zero () || other == profile_count::zero ())
+       return *this;
+      if (!initialized_p () || !other.initialized_p ())
+       return profile_count::uninitialized ();
+      profile_count ret;
+      ret.m_val = MAX (m_val - other.m_val, 0);
+      return ret;
+    }
+  profile_count &operator-= (const profile_count &other)
+    {
+      if (*this == profile_count::zero () || other == profile_count::zero ())
+       return *this;
+      if (!initialized_p () || !other.initialized_p ())
+       return *this = profile_count::uninitialized ();
+      else
+       m_val = MAX (m_val - other.m_val, 0);
+      return *this;
+    }
+
+  /* Return false if profile_count is bogus.  */
+  bool verify () const
+    {
+      return m_val >= -1;
+    }
+
+  /* Comparsions are three-state and conservative.  False is returned if
+     the inequality can not be decided.  */
+  bool operator< (const profile_count &other) const
+    {
+      return initialized_p () && other.initialized_p () && m_val < other.m_val;
+    }
+  bool operator> (const profile_count &other) const
+    {
+      return initialized_p () && other.initialized_p () && m_val > other.m_val;
+    }
+  bool operator< (const gcov_type other) const
+    {
+      return initialized_p () && m_val < other;
+    }
+  bool operator> (const gcov_type other) const
+    {
+      return initialized_p () && m_val > other;
+    }
+
+  bool operator<= (const profile_count &other) const
+    {
+      return initialized_p () && other.initialized_p () && m_val <= other.m_val;
+    }
+  bool operator>= (const profile_count &other) const
+    {
+      return initialized_p () && m_val >= other.m_val;
+    }
+  bool operator<= (const gcov_type other) const
+    {
+      return initialized_p () && m_val <= other;
+    }
+  bool operator>= (const gcov_type other) const
+    {
+      return initialized_p () && m_val >= other;
+    }
+
+  /* PROB is a probability in scale 0...REG_BR_PROB_BASE.  Scale counter
+     accordingly.  */
+  profile_count apply_probability (int prob) const
+    {
+      gcc_checking_assert (prob >= 0 && prob <= REG_BR_PROB_BASE);
+      if (!initialized_p ())
+       return profile_count::uninitialized ();
+      profile_count ret;
+      ret.m_val = RDIV (m_val * prob, REG_BR_PROB_BASE);
+      return ret;
+    }
+  /* Return *THIS * NUM / DEN.  */
+  profile_count apply_scale (int64_t num, int64_t den) const
+    {
+      if (!initialized_p ())
+       return profile_count::uninitialized ();
+      profile_count ret;
+      /* FIXME: shrink wrapping violates this sanity check.  */
+      gcc_checking_assert ((num >= 0
+                           && (num <= REG_BR_PROB_BASE
+                               || den <= REG_BR_PROB_BASE)
+                           && den > 0) || 1);
+      ret.m_val = RDIV (m_val * num, den);
+      return ret;
+    }
+  profile_count apply_scale (profile_count num, profile_count den) const
+    {
+      if (*this == profile_count::zero ())
+       return profile_count::zero ();
+      if (!initialized_p () || !num.initialized_p () || !den.initialized_p ())
+       return profile_count::uninitialized ();
+      profile_count ret;
+      gcc_checking_assert (den > 0);
+      /* Take care for overflows!  */
+      if (num.m_val < max_safe_multiplier || m_val < max_safe_multiplier)
+        ret.m_val = RDIV (m_val * num.m_val, den.m_val);
+      else
+        ret.m_val = RDIV (m_val * RDIV (num.m_val * max_safe_multiplier,
+                                       den.m_val), max_safe_multiplier);
+      return ret;
+    }
+
+  /* Return probability of event with counter THIS within event with counter
+     OVERALL.  */
+  int probability_in (profile_count overall)
+    {
+      if (*this == profile_count::zero ())
+       return 0;
+      if (!initialized_p () || !overall.initialized_p ())
+       return REG_BR_PROB_BASE / 2;
+      if (overall < *this)
+       return REG_BR_PROB_BASE;
+      if (!overall.m_val)
+       return REG_BR_PROB_BASE / 2;
+      return RDIV (m_val * REG_BR_PROB_BASE, overall.m_val);
+    }
+
+  /* Output THIS to F.  */
+  void dump (FILE *f) const;
+
+  /* Print THIS to stderr.  */
+  void debug () const;
+
+  /* Return true if THIS is known to differ significantly from OTHER.  */
+  bool differs_from_p (profile_count other) const;
+
+  /* LTO streaming support.  */
+  static profile_count stream_in (struct lto_input_block *);
+  void stream_out (struct output_block *);
+  void stream_out (struct lto_output_stream *);
+};
+#endif
index 3346af4e961ac77b34805c1dcc273b4ae4d2c6a7..aca5c67465a16150dc4ba9429b3bddff4c3c4fd2 100644 (file)
@@ -67,6 +67,10 @@ along with GCC; see the file COPYING3.  If not see
 
 #include "profile.h"
 
+/* Map from BBs/edges to gcov counters.  */
+vec<gcov_type> bb_gcov_counts;
+hash_map<edge,gcov_type> edge_gcov_counts;
+
 struct bb_profile_info {
   unsigned int count_valid : 1;
 
@@ -303,7 +307,7 @@ is_edge_inconsistent (vec<edge, va_gc> *edges)
     {
       if (!EDGE_INFO (e)->ignore)
         {
-          if (e->count < 0
+          if (edge_gcov_count (e) < 0
              && (!(e->flags & EDGE_FAKE)
                  || !block_ends_with_call_p (e->src)))
            {
@@ -311,7 +315,7 @@ is_edge_inconsistent (vec<edge, va_gc> *edges)
                {
                  fprintf (dump_file,
                           "Edge %i->%i is inconsistent, count%" PRId64,
-                          e->src->index, e->dest->index, e->count);
+                          e->src->index, e->dest->index, edge_gcov_count (e));
                  dump_bb (dump_file, e->src, 0, TDF_DETAILS);
                  dump_bb (dump_file, e->dest, 0, TDF_DETAILS);
                }
@@ -333,8 +337,8 @@ correct_negative_edge_counts (void)
     {
       FOR_EACH_EDGE (e, ei, bb->succs)
         {
-           if (e->count < 0)
-             e->count = 0;
+           if (edge_gcov_count (e) < 0)
+             edge_gcov_count (e) = 0;
         }
     }
 }
@@ -354,32 +358,32 @@ is_inconsistent (void)
       inconsistent |= is_edge_inconsistent (bb->succs);
       if (!dump_file && inconsistent)
        return true;
-      if (bb->count < 0)
+      if (bb_gcov_count (bb) < 0)
         {
          if (dump_file)
            {
              fprintf (dump_file, "BB %i count is negative "
                       "%" PRId64,
                       bb->index,
-                      bb->count);
+                      bb_gcov_count (bb));
              dump_bb (dump_file, bb, 0, TDF_DETAILS);
            }
          inconsistent = true;
        }
-      if (bb->count != sum_edge_counts (bb->preds))
+      if (bb_gcov_count (bb) != sum_edge_counts (bb->preds))
         {
          if (dump_file)
            {
              fprintf (dump_file, "BB %i count does not match sum of incoming edges "
                       "%" PRId64" should be %" PRId64,
                       bb->index,
-                      bb->count,
+                      bb_gcov_count (bb),
                       sum_edge_counts (bb->preds));
              dump_bb (dump_file, bb, 0, TDF_DETAILS);
            }
          inconsistent = true;
        }
-      if (bb->count != sum_edge_counts (bb->succs) &&
+      if (bb_gcov_count (bb) != sum_edge_counts (bb->succs) &&
          ! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
             && block_ends_with_call_p (bb)))
        {
@@ -388,7 +392,7 @@ is_inconsistent (void)
              fprintf (dump_file, "BB %i count does not match sum of outgoing edges "
                       "%" PRId64" should be %" PRId64,
                       bb->index,
-                      bb->count,
+                      bb_gcov_count (bb),
                       sum_edge_counts (bb->succs));
              dump_bb (dump_file, bb, 0, TDF_DETAILS);
            }
@@ -408,8 +412,8 @@ set_bb_counts (void)
   basic_block bb;
   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     {
-      bb->count = sum_edge_counts (bb->succs);
-      gcc_assert (bb->count >= 0);
+      bb_gcov_count (bb) = sum_edge_counts (bb->succs);
+      gcc_assert (bb_gcov_count (bb) >= 0);
     }
 }
 
@@ -436,8 +440,8 @@ read_profile_edge_counts (gcov_type *exec_counts)
            num_edges++;
            if (exec_counts)
              {
-               e->count = exec_counts[exec_counts_pos++];
-               if (e->count > profile_info->sum_max)
+               edge_gcov_count (e) = exec_counts[exec_counts_pos++];
+               if (edge_gcov_count (e) > profile_info->sum_max)
                  {
                    if (flag_profile_correction)
                      {
@@ -454,7 +458,7 @@ read_profile_edge_counts (gcov_type *exec_counts)
                  }
              }
            else
-             e->count = 0;
+             edge_gcov_count (e) = 0;
 
            EDGE_INFO (e)->count_valid = 1;
            BB_INFO (bb)->succ_count--;
@@ -464,7 +468,7 @@ read_profile_edge_counts (gcov_type *exec_counts)
                fprintf (dump_file, "\nRead edge from %i to %i, count:",
                         bb->index, e->dest->index);
                fprintf (dump_file, "%" PRId64,
-                        (int64_t) e->count);
+                        (int64_t) edge_gcov_count (e));
              }
          }
     }
@@ -491,7 +495,7 @@ compute_frequency_overlap (void)
 
   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     {
-      count_total += bb->count;
+      count_total += bb_gcov_count (bb);
       freq_total += bb->frequency;
     }
 
@@ -499,7 +503,7 @@ compute_frequency_overlap (void)
     return 0;
 
   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
-    overlap += MIN (bb->count * OVERLAP_BASE / count_total,
+    overlap += MIN (bb_gcov_count (bb) * OVERLAP_BASE / count_total,
                    bb->frequency * OVERLAP_BASE / freq_total);
 
   return overlap;
@@ -527,6 +531,8 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
   if (!profile_info)
     return;
 
+  bb_gcov_counts.safe_grow_cleared (last_basic_block_for_fn (cfun));
+
   if (profile_info->sum_all < profile_info->sum_max)
     {
       error ("corrupted profile info: sum_all is smaller than sum_max");
@@ -592,8 +598,8 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
                  gcov_type total = 0;
 
                  FOR_EACH_EDGE (e, ei, bb->succs)
-                   total += e->count;
-                 bb->count = total;
+                   total += edge_gcov_count (e);
+                 bb_gcov_count (bb) = total;
                  bi->count_valid = 1;
                  changes = 1;
                }
@@ -604,8 +610,8 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
                  gcov_type total = 0;
 
                  FOR_EACH_EDGE (e, ei, bb->preds)
-                   total += e->count;
-                 bb->count = total;
+                   total += edge_gcov_count (e);
+                 bb_gcov_count (bb) = total;
                  bi->count_valid = 1;
                  changes = 1;
                }
@@ -621,7 +627,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
                  /* One of the counts will be invalid, but it is zero,
                     so adding it in also doesn't hurt.  */
                  FOR_EACH_EDGE (e, ei, bb->succs)
-                   total += e->count;
+                   total += edge_gcov_count (e);
 
                  /* Search for the invalid edge, and set its count.  */
                  FOR_EACH_EDGE (e, ei, bb->succs)
@@ -629,11 +635,11 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
                      break;
 
                  /* Calculate count for remaining edge by conservation.  */
-                 total = bb->count - total;
+                 total = bb_gcov_count (bb) - total;
 
                  gcc_assert (e);
                  EDGE_INFO (e)->count_valid = 1;
-                 e->count = total;
+                 edge_gcov_count (e) = total;
                  bi->succ_count--;
 
                  BB_INFO (e->dest)->pred_count--;
@@ -648,7 +654,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
                  /* One of the counts will be invalid, but it is zero,
                     so adding it in also doesn't hurt.  */
                  FOR_EACH_EDGE (e, ei, bb->preds)
-                   total += e->count;
+                   total += edge_gcov_count (e);
 
                  /* Search for the invalid edge, and set its count.  */
                  FOR_EACH_EDGE (e, ei, bb->preds)
@@ -656,11 +662,11 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
                      break;
 
                  /* Calculate count for remaining edge by conservation.  */
-                 total = bb->count - total + e->count;
+                 total = bb_gcov_count (bb) - total + edge_gcov_count (e);
 
                  gcc_assert (e);
                  EDGE_INFO (e)->count_valid = 1;
-                 e->count = total;
+                 edge_gcov_count (e) = total;
                  bi->pred_count--;
 
                  BB_INFO (e->src)->succ_count--;
@@ -727,11 +733,11 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
       edge e;
       edge_iterator ei;
 
-      if (bb->count < 0)
+      if (bb_gcov_count (bb) < 0)
        {
          error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
-                bb->index, (int)bb->count);
-         bb->count = 0;
+                bb->index, (int)bb_gcov_count (bb));
+         bb_gcov_count (bb) = 0;
        }
       FOR_EACH_EDGE (e, ei, bb->succs)
        {
@@ -740,26 +746,29 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
             edge from the entry, since extra edge from the exit is
             already present.  We get negative frequency from the entry
             point.  */
-         if ((e->count < 0
+         if ((edge_gcov_count (e) < 0
               && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
-             || (e->count > bb->count
+             || (edge_gcov_count (e) > bb_gcov_count (bb)
                  && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
            {
              if (block_ends_with_call_p (bb))
-               e->count = e->count < 0 ? 0 : bb->count;
+               edge_gcov_count (e) = edge_gcov_count (e) < 0
+                                     ? 0 : bb_gcov_count (bb);
            }
-         if (e->count < 0 || e->count > bb->count)
+         if (edge_gcov_count (e) < 0
+             || edge_gcov_count (e) > bb_gcov_count (bb))
            {
              error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
                     e->src->index, e->dest->index,
-                    (int)e->count);
-             e->count = bb->count / 2;
+                    (int)edge_gcov_count (e));
+             edge_gcov_count (e) = bb_gcov_count (bb) / 2;
            }
        }
-      if (bb->count)
+      if (bb_gcov_count (bb))
        {
          FOR_EACH_EDGE (e, ei, bb->succs)
-           e->probability = GCOV_COMPUTE_SCALE (e->count, bb->count);
+           e->probability = GCOV_COMPUTE_SCALE (edge_gcov_count (e),
+                                                bb_gcov_count (bb));
          if (bb->index >= NUM_FIXED_BLOCKS
              && block_ends_with_condjump_p (bb)
              && EDGE_COUNT (bb->succs) >= 2)
@@ -816,6 +825,19 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
            num_branches++;
        }
     }
+
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
+    {
+      edge e;
+      edge_iterator ei;
+
+      bb->count = profile_count::from_gcov_type (bb_gcov_count (bb));
+      FOR_EACH_EDGE (e, ei, bb->succs)
+        e->count = profile_count::from_gcov_type (edge_gcov_count (e));
+    }
+  bb_gcov_counts.release ();
+  edge_gcov_counts.empty ();
+
   counts_to_freqs ();
 
   if (dump_file)
@@ -1125,7 +1147,7 @@ branch_prob (void)
   for (i = 0 ; i < num_edges ; i++)
     {
       edge e = INDEX_EDGE (el, i);
-      e->count = 0;
+      edge_gcov_count (e) = 0;
 
       /* Mark edges we've replaced by fake edges above as ignored.  */
       if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
@@ -1323,7 +1345,7 @@ branch_prob (void)
       /* At this moment we have precise loop iteration count estimates.
         Record them to loop structure before the profile gets out of date. */
       FOR_EACH_LOOP (loop, 0)
-       if (loop->header->count)
+       if (loop->header->count > 0)
          {
            gcov_type nit = expected_loop_iterations_unbounded (loop);
            widest_int bound = gcov_type_to_wide_int (nit);
index 2c5bf3d1a17c331ff8d987b46297d33ce597e707..5ff806e5535d77977c197152690feafd5069477b 100644 (file)
@@ -37,6 +37,27 @@ struct edge_profile_info
 
 #define EDGE_INFO(e)  ((struct edge_profile_info *) (e)->aux)
 
+/* Helpers annotating edges/basic blocks to GCOV counts.  */
+
+extern vec<gcov_type> bb_gcov_counts;
+extern hash_map<edge,gcov_type> edge_gcov_counts;
+
+inline gcov_type &
+edge_gcov_count (edge e)
+{
+  bool existed;
+  gcov_type &c = edge_gcov_counts.get_or_insert (e, &existed);
+  if (!existed)
+    c = 0;
+  return c;
+}
+
+inline gcov_type &
+bb_gcov_count (basic_block bb)
+{
+  return bb_gcov_counts[bb->index];
+}
+
 typedef struct gcov_working_set_info gcov_working_set_t;
 extern gcov_working_set_t *find_working_set (unsigned pct_times_10);
 extern void add_working_set (gcov_working_set_t *);
index 1ac4ea3b0540f020f1f51a753c3a38a2f10be7ac..eaa1522bb0db28ce90905f9434e28175b09b74cf 100644 (file)
@@ -921,7 +921,7 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
 
        bb->frequency = RDIV (num * bb->frequency, den);
        dup->frequency -= bb->frequency;
-       bb->count = RDIV (num * bb->count, den);
+       bb->count = bb->count.apply_scale (num, den);
        dup->count -= bb->count;
       }
 
@@ -993,7 +993,7 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
          continue;
        }
 
-      new_bb->count += RDIV (e->src->count * e->probability, REG_BR_PROB_BASE);
+      new_bb->count += e->src->count.apply_probability (e->probability);
       new_bb->frequency += EDGE_FREQUENCY (e);
 
       redirect_edge_and_branch_force (e, new_bb);
index 3c6a47e824ebae05c4af28c7b7bb6b973702822e..6246d22fb37d650bea341d9b4e8ffdfe3e22b939 100644 (file)
@@ -1,3 +1,7 @@
+2017-05-23  Jan Hubicka  <hubicka@ucw.cz>
+
+       * gcc.dg/tree-ssa/attr-hotcold-2.c: Update template.
+
 2017-06-04  Dominique d'Humieres  <dominiq@lps.ens.fr>
 
        * lib/fortran-modules.exp (igrep): New procedure, case insensitive
@@ -77,6 +81,7 @@
        * gfortran.dg/warn_unused_function.f90: Likewise.
        * gfortran.dg/warn_unused_function_2.f90: Likewise.
        * gfortran.dg/ieee/ieee_8.f90: Likewise.
+
 2017-06-04  Marek Polacek  <polacek@redhat.com>
 
        PR c/80919
index 13d2916c47b9f0b358fe455088b6e17e3a6ad60f..f7a5098709b8a1c5f26c5583098aec0dc03f8f5e 100644 (file)
@@ -20,9 +20,9 @@ void f(int x, int y)
 
 /* { dg-final { scan-tree-dump-times "hot label heuristics" 1 "profile_estimate" } } */
 /* { dg-final { scan-tree-dump-times "cold label heuristics" 1 "profile_estimate" } } */
-/* { dg-final { scan-tree-dump-times "block 4, loop depth 0, count 0, freq \[1-4\]\[^0-9\]" 3 "profile_estimate" } } */
+/* { dg-final { scan-tree-dump-times "block 4, loop depth 0, freq \[1-4\]\[^0-9\]" 3 "profile_estimate" } } */
 
 /* Note: we're attempting to match some number > 6000, i.e. > 60%.
    The exact number ought to be tweekable without having to juggle
    the testcase around too much.  */
-/* { dg-final { scan-tree-dump-times "block 5, loop depth 0, count 0, freq \[6-9\]\[0-9\]\[0-9\]\[0-9\]" 3 "profile_estimate" } } */
+/* { dg-final { scan-tree-dump-times "block 5, loop depth 0, freq \[6-9\]\[0-9\]\[0-9\]\[0-9\]" 3 "profile_estimate" } } */
index 51d223c870a2d32e4afdfba5dbcce539c16f7c03..bb44673142057107e047a07a2e2cf4f46159771f 100644 (file)
@@ -132,7 +132,8 @@ count_insns (basic_block bb)
 static bool
 better_p (const_edge e1, const_edge e2)
 {
-  if (e1->count != e2->count)
+  if (e1->count.initialized_p () && e2->count.initialized_p ()
+      && !(e1->count == e2->count))
     return e1->count > e2->count;
   if (e1->src->frequency * e1->probability !=
       e2->src->frequency * e2->probability)
index f9809d54006c0b4e9fabc046e709f7ba7687e04c..f66abfc88354d8feaf695960c7d34e9c5c590acb 100644 (file)
@@ -2937,8 +2937,8 @@ expand_transaction (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
       ei->probability = PROB_ALWAYS;
       et->probability = PROB_LIKELY;
       ef->probability = PROB_UNLIKELY;
-      et->count = apply_probability (test_bb->count, et->probability);
-      ef->count = apply_probability (test_bb->count, ef->probability);
+      et->count = test_bb->count.apply_probability (et->probability);
+      ef->count = test_bb->count.apply_probability (ef->probability);
 
       code_bb->count = et->count;
       code_bb->frequency = EDGE_FREQUENCY (et);
@@ -2974,15 +2974,15 @@ expand_transaction (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
       redirect_edge_pred (fallthru_edge, test_bb);
       fallthru_edge->flags = EDGE_FALSE_VALUE;
       fallthru_edge->probability = PROB_VERY_LIKELY;
-      fallthru_edge->count
-       = apply_probability (test_bb->count, fallthru_edge->probability);
+      fallthru_edge->count = test_bb->count.apply_probability
+                               (fallthru_edge->probability);
 
       // Abort/over edge.
       redirect_edge_pred (abort_edge, test_bb);
       abort_edge->flags = EDGE_TRUE_VALUE;
       abort_edge->probability = PROB_VERY_UNLIKELY;
-      abort_edge->count
-       = apply_probability (test_bb->count, abort_edge->probability);
+      abort_edge->count = test_bb->count.apply_probability
+                               (abort_edge->probability);
 
       transaction_bb = test_bb;
     }
@@ -3022,13 +3022,13 @@ expand_transaction (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
       inst_edge->flags = EDGE_FALSE_VALUE;
       inst_edge->probability = REG_BR_PROB_BASE / 2;
       inst_edge->count
-       = apply_probability (test_bb->count, inst_edge->probability);
+       = test_bb->count.apply_probability (inst_edge->probability);
 
       redirect_edge_pred (uninst_edge, test_bb);
       uninst_edge->flags = EDGE_TRUE_VALUE;
       uninst_edge->probability = REG_BR_PROB_BASE / 2;
       uninst_edge->count
-       = apply_probability (test_bb->count, uninst_edge->probability);
+       = test_bb->count.apply_probability (uninst_edge->probability);
     }
 
   // If we have no previous special cases, and we have PHIs at the beginning
@@ -5076,7 +5076,7 @@ ipa_tm_insert_irr_call (struct cgraph_node *node, struct tm_region *region,
 
   node->create_edge (cgraph_node::get_create
                       (builtin_decl_explicit (BUILT_IN_TM_IRREVOCABLE)),
-                    g, 0,
+                    g, gimple_bb (g)->count,
                     compute_call_stmt_bb_frequency (node->decl,
                                                     gimple_bb (g)));
 }
@@ -5127,7 +5127,7 @@ ipa_tm_insert_gettmclone_call (struct cgraph_node *node,
 
   gsi_insert_before (gsi, g, GSI_SAME_STMT);
 
-  node->create_edge (cgraph_node::get_create (gettm_fn), g, 0,
+  node->create_edge (cgraph_node::get_create (gettm_fn), g, gimple_bb (g)->count,
                     compute_call_stmt_bb_frequency (node->decl,
                                                     gimple_bb (g)));
 
index 7bd242c533699a2e16bddf3cadefb85ee7a61d96..69d3207696d6f9c848ea349f3819853ee8228536 100644 (file)
@@ -906,7 +906,7 @@ shrink_wrap_one_built_in_call_with_conds (gcall *bi_call, vec <gimple *> conds,
 
      Here we take the second approach because it's slightly simpler
      and because it's easy to see that it doesn't lose profile counts.  */
-  bi_call_bb->count = 0;
+  bi_call_bb->count = profile_count::zero ();
   bi_call_bb->frequency = 0;
   while (!edges.is_empty ())
     {
@@ -917,8 +917,8 @@ shrink_wrap_one_built_in_call_with_conds (gcall *bi_call, vec <gimple *> conds,
       gcc_assert (src_bb == nocall_edge->src);
 
       call_edge->probability = REG_BR_PROB_BASE * ERR_PROB;
-      call_edge->count = apply_probability (src_bb->count,
-                                           call_edge->probability);
+      call_edge->count
+        = src_bb->count.apply_probability (call_edge->probability);
       nocall_edge->probability = inverse_probability (call_edge->probability);
       nocall_edge->count = src_bb->count - call_edge->count;
 
index 6cb03ede7645334cda32c115c00b8cf15d9d2235..286cc79647adeb0b083d699fd800cee87fe3bb01 100644 (file)
@@ -6200,7 +6200,8 @@ gimple_duplicate_sese_region (edge entry, edge exit,
   vec<basic_block> doms;
   edge redirected;
   int total_freq = 0, entry_freq = 0;
-  gcov_type total_count = 0, entry_count = 0;
+  profile_count total_count = profile_count::uninitialized ();
+  profile_count entry_count = profile_count::uninitialized ();
 
   if (!can_copy_bbs_p (region, n_region))
     return false;
@@ -6257,7 +6258,7 @@ gimple_duplicate_sese_region (edge entry, edge exit,
       doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
     }
 
-  if (entry->dest->count)
+  if (entry->dest->count.initialized_p ())
     {
       total_count = entry->dest->count;
       entry_count = entry->count;
@@ -6266,7 +6267,7 @@ gimple_duplicate_sese_region (edge entry, edge exit,
       if (entry_count > total_count)
        entry_count = total_count;
     }
-  else
+  if (!(total_count > 0) || !(entry_count > 0))
     {
       total_freq = entry->dest->frequency;
       entry_freq = EDGE_FREQUENCY (entry);
@@ -6280,13 +6281,13 @@ gimple_duplicate_sese_region (edge entry, edge exit,
 
   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
            split_edge_bb_loc (entry), update_dominance);
-  if (total_count)
+  if (total_count > 0 && entry_count > 0)
     {
-      scale_bbs_frequencies_gcov_type (region, n_region,
-                                      total_count - entry_count,
-                                      total_count);
-      scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
-                                      total_count);
+      scale_bbs_frequencies_profile_count (region, n_region,
+                                          total_count - entry_count,
+                                          total_count);
+      scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
+                                          total_count);
     }
   else
     {
@@ -6383,7 +6384,8 @@ gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNU
   basic_block switch_bb, entry_bb, nentry_bb;
   vec<basic_block> doms;
   int total_freq = 0, exit_freq = 0;
-  gcov_type total_count = 0, exit_count = 0;
+  profile_count total_count = profile_count::uninitialized (),
+               exit_count = profile_count::uninitialized ();
   edge exits[2], nexits[2], e;
   gimple_stmt_iterator gsi;
   gimple *cond_stmt;
@@ -6426,7 +6428,7 @@ gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNU
      inside.  */
   doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
 
-  if (exit->src->count)
+  if (exit->src->count > 0)
     {
       total_count = exit->src->count;
       exit_count = exit->count;
@@ -6449,13 +6451,13 @@ gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNU
 
   copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
            split_edge_bb_loc (exit), true);
-  if (total_count)
+  if (total_count.initialized_p ())
     {
-      scale_bbs_frequencies_gcov_type (region, n_region,
-                                      total_count - exit_count,
-                                      total_count);
-      scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
-                                      total_count);
+      scale_bbs_frequencies_profile_count (region, n_region,
+                                          total_count - exit_count,
+                                          total_count);
+      scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
+                                          total_count);
     }
   else
     {
@@ -8522,10 +8524,10 @@ gimple_account_profile_record (basic_block bb, int after_pass,
     {
       record->size[after_pass]
        += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
-      if (profile_status_for_fn (cfun) == PROFILE_READ)
+      if (bb->count.initialized_p ())
        record->time[after_pass]
          += estimate_num_insns (gsi_stmt (i),
-                                &eni_time_weights) * bb->count;
+                                &eni_time_weights) * bb->count.to_gcov_type ();
       else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
        record->time[after_pass]
          += estimate_num_insns (gsi_stmt (i),
@@ -9053,24 +9055,27 @@ execute_fixup_cfg (void)
   basic_block bb;
   gimple_stmt_iterator gsi;
   int todo = 0;
-  gcov_type count_scale;
   edge e;
   edge_iterator ei;
   cgraph_node *node = cgraph_node::get (current_function_decl);
+  profile_count num = node->count;
+  profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
+  bool scale = num.initialized_p () && den.initialized_p () && !(num == den);
 
-  count_scale
-    = GCOV_COMPUTE_SCALE (node->count, ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
-
-  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
-  EXIT_BLOCK_PTR_FOR_FN (cfun)->count
-    = apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count, count_scale);
+  if (scale)
+    {
+      ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
+      EXIT_BLOCK_PTR_FOR_FN (cfun)->count
+        = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
 
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
-    e->count = apply_scale (e->count, count_scale);
+      FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
+       e->count = e->count.apply_scale (num, den);
+    }
 
   FOR_EACH_BB_FN (bb, cfun)
     {
-      bb->count = apply_scale (bb->count, count_scale);
+      if (scale)
+        bb->count = bb->count.apply_scale (num, den);
       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
        {
          gimple *stmt = gsi_stmt (gsi);
@@ -9139,8 +9144,9 @@ execute_fixup_cfg (void)
          gsi_next (&gsi);
        }
 
-      FOR_EACH_EDGE (e, ei, bb->succs)
-        e->count = apply_scale (e->count, count_scale);
+      if (scale)
+       FOR_EACH_EDGE (e, ei, bb->succs)
+         e->count = e->count.apply_scale (num, den);
 
       /* If we have a basic block with no successors that does not
         end with a control statement or a noreturn call end it with
@@ -9172,7 +9178,7 @@ execute_fixup_cfg (void)
            }
        }
     }
-  if (count_scale != REG_BR_PROB_BASE)
+  if (scale)
     compute_function_frequency ();
 
   if (current_loops
index f3ec404ef0938db46419fdd635444a9c8f1b7760..7f20cdc7f8e0211a3092a0593b9c851c43fee669 100644 (file)
@@ -1756,13 +1756,14 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
 
 static basic_block
 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
-         gcov_type count_scale)
+         profile_count num, profile_count den)
 {
   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
   basic_block copy_basic_block;
   tree decl;
   gcov_type freq;
   basic_block prev;
+  bool scale = num.initialized_p () && den.initialized_p () && den > 0;
 
   /* Search for previous copied basic block.  */
   prev = bb->prev_bb;
@@ -1772,7 +1773,8 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
   /* create_basic_block() will append every new block to
      basic_block_info automatically.  */
   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
-  copy_basic_block->count = apply_scale (bb->count, count_scale);
+  if (scale)
+    copy_basic_block->count = bb->count.apply_scale (num, den);
 
   /* We are going to rebuild frequencies from scratch.  These values
      have just small importance to drive canonicalize_loop_headers.  */
@@ -2200,8 +2202,8 @@ update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
    debug stmts are left after a statement that must end the basic block.  */
 
 static bool
-copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
-                  basic_block abnormal_goto_dest)
+copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
+                  basic_block ret_bb, basic_block abnormal_goto_dest)
 {
   basic_block new_bb = (basic_block) bb->aux;
   edge_iterator ei;
@@ -2209,6 +2211,7 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
   gimple_stmt_iterator si;
   int flags;
   bool need_debug_cleanup = false;
+  bool scale = num.initialized_p () && den.initialized_p () && den > 0;
 
   /* Use the indices from the original blocks to create edges for the
      new ones.  */
@@ -2225,7 +2228,8 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
            && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
          flags |= EDGE_FALLTHRU;
        new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
-       new_edge->count = apply_scale (old_edge->count, count_scale);
+       if (scale)
+         new_edge->count = old_edge->count.apply_scale (num, den);
        new_edge->probability = old_edge->probability;
       }
 
@@ -2422,23 +2426,15 @@ remap_decl_1 (tree decl, void *data)
    the cfun to the function of new_fndecl (and current_function_decl too).  */
 
 static void
-initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
+initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
 {
   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
-  gcov_type count_scale;
 
   if (!DECL_ARGUMENTS (new_fndecl))
     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
   if (!DECL_RESULT (new_fndecl))
     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
 
-  if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
-    count_scale
-        = GCOV_COMPUTE_SCALE (count,
-                              ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
-  else
-    count_scale = REG_BR_PROB_BASE;
-
   /* Register specific tree functions.  */
   gimple_register_cfg_hooks ();
 
@@ -2471,14 +2467,22 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
   init_empty_tree_cfg ();
 
   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
-  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
-    (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
-     REG_BR_PROB_BASE);
+
+  /* FIXME: When all counts are known to be zero, scaling is also meaningful.
+   */
+  if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.initialized_p ()
+      && count.initialized_p ()
+      && ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count > 0)
+    {
+      ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
+       ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
+                                   ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
+      EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
+       EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
+                                   ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
+    }
   ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
     = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
-  EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
-    (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
-     REG_BR_PROB_BASE);
   EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
     EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
   if (src_cfun->eh)
@@ -2644,7 +2648,7 @@ redirect_all_calls (copy_body_data * id, basic_block bb)
    when this can happen for COMDATs.  */
 
 void
-freqs_to_counts (struct cgraph_node *node, gcov_type count)
+freqs_to_counts (struct cgraph_node *node, profile_count count)
 {
   basic_block bb;
   edge_iterator ei;
@@ -2653,10 +2657,9 @@ freqs_to_counts (struct cgraph_node *node, gcov_type count)
 
   FOR_ALL_BB_FN(bb, fn)
     {
-      bb->count = apply_scale (count,
-                               GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
+      bb->count = count.apply_scale (bb->frequency, BB_FREQ_MAX);
       FOR_EACH_EDGE (e, ei, bb->succs)
-        e->count = apply_probability (e->src->count, e->probability);
+        e->count = e->src->count.apply_probability (e->probability);
     }
 }
 
@@ -2664,7 +2667,7 @@ freqs_to_counts (struct cgraph_node *node, gcov_type count)
    another function.  Walks FN via CFG, returns new fndecl.  */
 
 static tree
-copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
+copy_cfg_body (copy_body_data * id, profile_count count, int frequency_scale,
               basic_block entry_block_map, basic_block exit_block_map,
               basic_block new_entry)
 {
@@ -2675,10 +2678,12 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
   basic_block bb;
   tree new_fndecl = NULL;
   bool need_debug_cleanup = false;
-  gcov_type count_scale;
   int last;
   int incoming_frequency = 0;
-  gcov_type incoming_count = 0;
+  profile_count incoming_count = profile_count::zero ();
+  profile_count num = count;
+  profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
+  bool scale = num.initialized_p () && den.initialized_p () && den > 0;
 
   /* This can happen for COMDAT routines that end up with 0 counts
      despite being called (see the comments for handle_missing_profiles()
@@ -2686,25 +2691,19 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
      before inlining, using the guessed edge frequencies, so that we don't
      end up with a 0-count inline body which can confuse downstream
      optimizations such as function splitting.  */
-  if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
+  if (!(ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count > 0) && count > 0)
     {
       /* Apply the larger of the call bb count and the total incoming
          call edge count to the callee.  */
-      gcov_type in_count = 0;
+      profile_count in_count = profile_count::zero ();
       struct cgraph_edge *in_edge;
       for (in_edge = id->src_node->callers; in_edge;
            in_edge = in_edge->next_caller)
-        in_count += in_edge->count;
+       if (in_edge->count.initialized_p ())
+          in_count += in_edge->count;
       freqs_to_counts (id->src_node, count > in_count ? count : in_count);
     }
 
-  if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
-    count_scale
-        = GCOV_COMPUTE_SCALE (count,
-                              ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
-  else
-    count_scale = REG_BR_PROB_BASE;
-
   /* Register specific tree functions.  */
   gimple_register_cfg_hooks ();
 
@@ -2724,7 +2723,10 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
            incoming_frequency += EDGE_FREQUENCY (e);
            incoming_count += e->count;
          }
-      incoming_count = apply_scale (incoming_count, count_scale);
+      if (scale)
+        incoming_count = incoming_count.apply_scale (num, den);
+      else
+       incoming_count = profile_count::uninitialized ();
       incoming_frequency
        = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
@@ -2751,7 +2753,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
   FOR_EACH_BB_FN (bb, cfun_to_copy)
     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
       {
-       basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
+       basic_block new_bb = copy_bb (id, bb, frequency_scale, num, den);
        bb->aux = new_bb;
        new_bb->aux = bb;
        new_bb->loop_father = entry_block_map->loop_father;
@@ -2774,7 +2776,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
   FOR_ALL_BB_FN (bb, cfun_to_copy)
     if (!id->blocks_to_copy
        || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
-      need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
+      need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
                                               abnormal_goto_dest);
 
   if (new_entry)
@@ -2979,7 +2981,7 @@ copy_tree_body (copy_body_data *id)
    another function.  */
 
 static tree
-copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
+copy_body (copy_body_data *id, profile_count count, int frequency_scale,
           basic_block entry_block_map, basic_block exit_block_map,
           basic_block new_entry)
 {
@@ -4497,7 +4499,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
       cgraph_edge *edge;
       tree virtual_offset = NULL;
       int freq = cg_edge->frequency;
-      gcov_type count = cg_edge->count;
+      profile_count count = cg_edge->count;
       tree op;
       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
 
index 9d9c5ac962454382f8f7b696af3562717a454d94..904befa980852af80df81aea36a73496e97a4910 100644 (file)
@@ -361,7 +361,7 @@ update_profile_after_ifcombine (basic_block inner_cond_bb,
   outer_to_inner->count = outer_cond_bb->count;
   inner_cond_bb->count = outer_cond_bb->count;
   inner_taken->count += outer2->count;
-  outer2->count = 0;
+  outer2->count = profile_count::zero ();
 
   inner_taken->probability = outer2->probability
                             + RDIV (outer_to_inner->probability
index 31a4b3224faac1556fbe24d25ffaa5c29d1fd2da..91787af257cd308dd6a2f4960f09a0d777ebb32d 100644 (file)
@@ -641,12 +641,12 @@ unloop_loops (bitmap loop_closed_ssa_invalidated,
       stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
       latch_edge = make_edge (latch, create_basic_block (NULL, NULL, latch), flags);
       latch_edge->probability = 0;
-      latch_edge->count = 0;
+      latch_edge->count = profile_count::zero ();
       latch_edge->flags |= flags;
       latch_edge->goto_locus = locus;
 
       add_bb_to_loop (latch_edge->dest, current_loops->tree_root);
-      latch_edge->dest->count = 0;
+      latch_edge->dest->count = profile_count::zero ();
       latch_edge->dest->frequency = 0;
       set_immediate_dominator (CDI_DOMINATORS, latch_edge->dest, latch_edge->src);
 
@@ -916,10 +916,10 @@ try_unroll_loop_completely (struct loop *loop,
           dump_printf_loc (MSG_OPTIMIZED_LOCATIONS | TDF_DETAILS, locus,
                            "loop with %d iterations completely unrolled",
                           (int) (n_unroll + 1));
-          if (profile_info)
+          if (loop->header->count.initialized_p ())
             dump_printf (MSG_OPTIMIZED_LOCATIONS | TDF_DETAILS,
                          " (header execution count %d)",
-                         (int)loop->header->count);
+                         (int)loop->header->count.to_gcov_type ());
           dump_printf (MSG_OPTIMIZED_LOCATIONS | TDF_DETAILS, "\n");
         }
     }
@@ -1088,7 +1088,7 @@ try_peel_loop (struct loop *loop,
          loop->nb_iterations_likely_upper_bound = 0;
        }
     }
-  gcov_type entry_count = 0;
+  profile_count entry_count = profile_count::zero ();
   int entry_freq = 0;
 
   edge e;
@@ -1096,13 +1096,14 @@ try_peel_loop (struct loop *loop,
   FOR_EACH_EDGE (e, ei, loop->header->preds)
     if (e->src != loop->latch)
       {
-       entry_count += e->src->count;
+       if (e->src->count.initialized_p ())
+         entry_count = e->src->count + e->src->count;
        entry_freq += e->src->frequency;
        gcc_assert (!flow_bb_inside_loop_p (loop, e->src));
       }
   int scale = 1;
-  if (loop->header->count)
-    scale = RDIV (entry_count * REG_BR_PROB_BASE, loop->header->count);
+  if (loop->header->count > 0)
+    scale = entry_count.probability_in (loop->header->count);
   else if (loop->header->frequency)
     scale = RDIV (entry_freq * REG_BR_PROB_BASE, loop->header->frequency);
   scale_loop_profile (loop, scale, 0);
index d3a57f124fb294b1bb90e8e0dcf5dfe895e99038..18ed4f6268602ced3bb5e3728ba0c71416f83e05 100644 (file)
@@ -1213,7 +1213,7 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
   use_operand_p op;
   bool ok;
   unsigned i, prob, prob_entry, scale_unrolled, scale_rest;
-  gcov_type freq_e, freq_h;
+  profile_count freq_e, freq_h;
   gcov_type new_est_niter = niter_for_unrolled_loop (loop, factor);
   unsigned irr = loop_preheader_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP;
   auto_vec<edge> to_remove;
@@ -1281,8 +1281,6 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
   new_nonexit->probability = REG_BR_PROB_BASE - exit->probability;
   new_nonexit->flags = EDGE_TRUE_VALUE;
   new_nonexit->count -= exit->count;
-  if (new_nonexit->count < 0)
-    new_nonexit->count = 0;
   scale_bbs_frequencies_int (&loop->latch, 1, new_nonexit->probability,
                             REG_BR_PROB_BASE);
 
@@ -1356,19 +1354,21 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
   freq_h = loop->header->count;
   freq_e = (loop_preheader_edge (loop))->count;
   /* Use frequency only if counts are zero.  */
-  if (freq_h == 0 && freq_e == 0)
+  if (!(freq_h > 0) && !(freq_e > 0))
     {
-      freq_h = loop->header->frequency;
-      freq_e = EDGE_FREQUENCY (loop_preheader_edge (loop));
+      freq_h = profile_count::from_gcov_type (loop->header->frequency);
+      freq_e = profile_count::from_gcov_type
+                (EDGE_FREQUENCY (loop_preheader_edge (loop)));
     }
-  if (freq_h != 0)
+  if (freq_h > 0)
     {
       gcov_type scale;
       /* Avoid dropping loop body profile counter to 0 because of zero count
         in loop's preheader.  */
-      freq_e = MAX (freq_e, 1);
+      if (freq_e == profile_count::zero ())
+        freq_e = profile_count::from_gcov_type (1);
       /* This should not overflow.  */
-      scale = GCOV_COMPUTE_SCALE (freq_e * (new_est_niter + 1), freq_h);
+      scale = freq_e.probability_in (freq_h);
       scale_loop_frequencies (loop, scale, REG_BR_PROB_BASE);
     }
 
@@ -1384,8 +1384,6 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
   prob = new_nonexit->probability;
   new_nonexit->probability = REG_BR_PROB_BASE - new_exit->probability;
   new_nonexit->count = exit_bb->count - new_exit->count;
-  if (new_nonexit->count < 0)
-    new_nonexit->count = 0;
   if (prob > 0)
     scale_bbs_frequencies_int (&loop->latch, 1, new_nonexit->probability,
                               prob);
index e67cd93094636a252c035b54e4dedb8386d0203a..b1f14078b4fa071a8f4648798b3017650d6c9cd3 100644 (file)
@@ -3815,8 +3815,7 @@ estimate_numbers_of_iterations_loop (struct loop *loop)
      recomputing iteration bounds later in the compilation process will just
      introduce random roundoff errors.  */
   if (!loop->any_estimate
-      && loop->header->count != 0
-      && profile_status_for_fn (cfun) >= PROFILE_READ)
+      && loop->header->count > 0)
     {
       gcov_type nit = expected_loop_iterations_unbounded (loop);
       bound = gcov_type_to_wide_int (nit);
index fd97213774950077a6d436568f679f04c04962b7..e77f2bfd1b521fc27b92b017d47af28195f3ca2c 100644 (file)
@@ -355,7 +355,7 @@ connect_loops (struct loop *loop1, struct loop *loop2)
 
   new_e->count = skip_bb->count;
   new_e->probability = PROB_LIKELY;
-  new_e->count = apply_probability (skip_e->count, PROB_LIKELY);
+  new_e->count = skip_e->count.apply_probability (PROB_LIKELY);
   skip_e->count -= new_e->count;
   skip_e->probability = inverse_probability (PROB_LIKELY);
 
index 1845148666db39e87663171e43b6c88929431f60..12bf640d3f840d8dff464218415667e42fde23f8 100644 (file)
@@ -842,9 +842,10 @@ hoist_guard (struct loop *loop, edge guard)
   /* Determine the probability that we skip the loop.  Assume that loop has
      same average number of iterations regardless outcome of guard.  */
   new_edge->probability = guard->probability;
-  int skip_count = guard->src->count
-                  ? RDIV (guard->count * pre_header->count, guard->src->count)
-                  : apply_probability (guard->count, new_edge->probability);
+  profile_count skip_count = guard->src->count > 0
+                  ? guard->count.apply_scale (pre_header->count,
+                                              guard->src->count)
+                  : guard->count.apply_probability (new_edge->probability);
 
   if (skip_count > e->count)
     {
@@ -868,7 +869,7 @@ hoist_guard (struct loop *loop, edge guard)
   /* ... now update profile to represent that original guard will be optimized
      away ...  */
   guard->probability = 0;
-  guard->count = 0;
+  guard->count = profile_count::zero ();
   not_guard->probability = REG_BR_PROB_BASE;
   /* This count is wrong (frequency of not_guard does not change),
      but will be scaled later.  */
index a2b0955df7aaa96362cb4f13ac14ba9acf50d9cd..6831f4474515a427839004a53f4685403800e551 100644 (file)
@@ -5827,7 +5827,7 @@ branch_fixup (void)
 
       edge etrue = make_edge (cond_bb, merge_bb, EDGE_TRUE_VALUE);
       etrue->probability = REG_BR_PROB_BASE / 2;
-      etrue->count = cond_bb->count / 2;
+      etrue->count = cond_bb->count.apply_scale (1, 2);
       edge efalse = find_edge (cond_bb, then_bb);
       efalse->flags = EDGE_FALSE_VALUE;
       efalse->probability -= etrue->probability;
index 8765fdd058cae527bd929e2c31d1987105a5e49e..c86148bc83269cfa165ec7fc356f499dbd8f1b00 100644 (file)
@@ -1562,7 +1562,7 @@ replace_block_by (basic_block bb1, basic_block bb2)
   bb2->count += bb1->count;
 
   /* Merge the outgoing edge counts from bb1 onto bb2.  */
-  gcov_type out_sum = 0;
+  profile_count out_sum = profile_count::zero ();
   FOR_EACH_EDGE (e1, ei, bb1->succs)
     {
       e2 = find_edge (bb2, e1->dest);
@@ -1576,7 +1576,7 @@ replace_block_by (basic_block bb1, basic_block bb2)
      making the bb count inconsistent with the edge weights.  */
   FOR_EACH_EDGE (e2, ei, bb2->succs)
     {
-      e2->probability = GCOV_COMPUTE_SCALE (e2->count, out_sum);
+      e2->probability = e2->count.probability_in (out_sum);
     }
 
   /* Move over any user labels from bb1 after the bb2 labels.  */
index 319826861dcdfdf6cbe157161d4883ccf64e3cc6..a77c279b41b1cf3c4f286cc319e908442704060f 100644 (file)
@@ -341,7 +341,7 @@ create_block_for_threading (basic_block bb,
 
   /* Zero out the profile, since the block is unreachable for now.  */
   rd->dup_blocks[count]->frequency = 0;
-  rd->dup_blocks[count]->count = 0;
+  rd->dup_blocks[count]->count = profile_count::uninitialized ();
   if (duplicate_blocks)
     bitmap_set_bit (*duplicate_blocks, rd->dup_blocks[count]->index);
 }
@@ -694,16 +694,16 @@ any_remaining_duplicated_blocks (vec<jump_thread_edge *> *path,
 static bool
 compute_path_counts (struct redirection_data *rd,
                     ssa_local_info_t *local_info,
-                    gcov_type *path_in_count_ptr,
-                    gcov_type *path_out_count_ptr,
+                    profile_count *path_in_count_ptr,
+                    profile_count *path_out_count_ptr,
                     int *path_in_freq_ptr)
 {
   edge e = rd->incoming_edges->e;
   vec<jump_thread_edge *> *path = THREAD_PATH (e);
   edge elast = path->last ()->e;
-  gcov_type nonpath_count = 0;
+  profile_count nonpath_count = profile_count::zero ();
   bool has_joiner = false;
-  gcov_type path_in_count = 0;
+  profile_count path_in_count = profile_count::zero ();
   int path_in_freq = 0;
 
   /* Start by accumulating incoming edge counts to the path's first bb
@@ -761,11 +761,11 @@ compute_path_counts (struct redirection_data *rd,
 
   /* Now compute the fraction of the total count coming into the first
      path bb that is from the current threading path.  */
-  gcov_type total_count = e->dest->count;
+  profile_count total_count = e->dest->count;
   /* Handle incoming profile insanities.  */
   if (total_count < path_in_count)
     path_in_count = total_count;
-  int onpath_scale = GCOV_COMPUTE_SCALE (path_in_count, total_count);
+  int onpath_scale = path_in_count.probability_in (total_count);
 
   /* Walk the entire path to do some more computation in order to estimate
      how much of the path_in_count will flow out of the duplicated threading
@@ -786,16 +786,16 @@ compute_path_counts (struct redirection_data *rd,
      nonpath_count with any additional counts coming into the path.  Other
      blocks along the path may have additional predecessors from outside
      the path.  */
-  gcov_type path_out_count = path_in_count;
-  gcov_type min_path_count = path_in_count;
+  profile_count path_out_count = path_in_count;
+  profile_count min_path_count = path_in_count;
   for (unsigned int i = 1; i < path->length (); i++)
     {
       edge epath = (*path)[i]->e;
-      gcov_type cur_count = epath->count;
+      profile_count cur_count = epath->count;
       if ((*path)[i]->type == EDGE_COPY_SRC_JOINER_BLOCK)
        {
          has_joiner = true;
-         cur_count = apply_probability (cur_count, onpath_scale);
+         cur_count = cur_count.apply_probability (onpath_scale);
        }
       /* In the joiner case we need to update nonpath_count for any edges
         coming into the path that will contribute to the count flowing
@@ -857,15 +857,15 @@ compute_path_counts (struct redirection_data *rd,
    will get a count/frequency of PATH_IN_COUNT and PATH_IN_FREQ,
    and the duplicate edge EDUP will have a count of PATH_OUT_COUNT.  */
 static void
-update_profile (edge epath, edge edup, gcov_type path_in_count,
-               gcov_type path_out_count, int path_in_freq)
+update_profile (edge epath, edge edup, profile_count path_in_count,
+               profile_count path_out_count, int path_in_freq)
 {
 
   /* First update the duplicated block's count / frequency.  */
   if (edup)
     {
       basic_block dup_block = edup->src;
-      gcc_assert (dup_block->count == 0);
+      gcc_assert (!dup_block->count.initialized_p ());
       gcc_assert (dup_block->frequency == 0);
       dup_block->count = path_in_count;
       dup_block->frequency = path_in_freq;
@@ -876,8 +876,6 @@ update_profile (edge epath, edge edup, gcov_type path_in_count,
      into the duplicated block.  Handle underflow due to precision/
      rounding issues.  */
   epath->src->count -= path_in_count;
-  if (epath->src->count < 0)
-    epath->src->count = 0;
   epath->src->frequency -= path_in_freq;
   if (epath->src->frequency < 0)
     epath->src->frequency = 0;
@@ -890,7 +888,7 @@ update_profile (edge epath, edge edup, gcov_type path_in_count,
   if (edup)
     edup->count = path_out_count;
   epath->count -= path_out_count;
-  gcc_assert (epath->count >= 0);
+  /* FIXME: can epath->count be legally uninitialized here?  */
 }
 
 
@@ -906,13 +904,12 @@ recompute_probabilities (basic_block bb)
   edge_iterator ei;
   FOR_EACH_EDGE (esucc, ei, bb->succs)
     {
-      if (!bb->count)
+      if (!(bb->count > 0))
        continue;
 
       /* Prevent overflow computation due to insane profiles.  */
       if (esucc->count < bb->count)
-       esucc->probability = GCOV_COMPUTE_SCALE (esucc->count,
-                                                bb->count);
+       esucc->probability = esucc->count.probability_in (bb->count);
       else
        /* Can happen with missing/guessed probabilities, since we
           may determine that more is flowing along duplicated
@@ -935,8 +932,8 @@ recompute_probabilities (basic_block bb)
 
 static void
 update_joiner_offpath_counts (edge epath, basic_block dup_bb,
-                             gcov_type path_in_count,
-                             gcov_type path_out_count)
+                             profile_count path_in_count,
+                             profile_count path_out_count)
 {
   /* Compute the count that currently flows off path from the joiner.
      In other words, the total count of joiner's out edges other than
@@ -945,7 +942,7 @@ update_joiner_offpath_counts (edge epath, basic_block dup_bb,
      are sometimes slight insanities where the total out edge count is
      larger than the bb count (possibly due to rounding/truncation
      errors).  */
-  gcov_type total_orig_off_path_count = 0;
+  profile_count total_orig_off_path_count = profile_count::zero ();
   edge enonpath;
   edge_iterator ei;
   FOR_EACH_EDGE (enonpath, ei, epath->src->succs)
@@ -960,7 +957,7 @@ update_joiner_offpath_counts (edge epath, basic_block dup_bb,
      path's cumulative in count and the portion of that count we
      estimated above as flowing from the joiner along the duplicated
      path.  */
-  gcov_type total_dup_off_path_count = path_in_count - path_out_count;
+  profile_count total_dup_off_path_count = path_in_count - path_out_count;
 
   /* Now do the actual updates of the off-path edges.  */
   FOR_EACH_EDGE (enonpath, ei, epath->src->succs)
@@ -981,17 +978,13 @@ update_joiner_offpath_counts (edge epath, basic_block dup_bb,
         among the duplicated off-path edges based on their original
         ratio to the full off-path count (total_orig_off_path_count).
         */
-      int scale = GCOV_COMPUTE_SCALE (enonpath->count,
-                                     total_orig_off_path_count);
+      int scale = enonpath->count.probability_in (total_orig_off_path_count);
       /* Give the duplicated offpath edge a portion of the duplicated
         total.  */
-      enonpathdup->count = apply_scale (scale,
-                                       total_dup_off_path_count);
+      enonpathdup->count = total_dup_off_path_count.apply_probability (scale);
       /* Now update the original offpath edge count, handling underflow
         due to rounding errors.  */
       enonpath->count -= enonpathdup->count;
-      if (enonpath->count < 0)
-       enonpath->count = 0;
     }
 }
 
@@ -1010,7 +1003,7 @@ estimated_freqs_path (struct redirection_data *rd)
   bool non_zero_freq = false;
   FOR_EACH_EDGE (ein, ei, e->dest->preds)
     {
-      if (ein->count)
+      if (ein->count > 0)
        return false;
       non_zero_freq |= ein->src->frequency != 0;
     }
@@ -1018,13 +1011,13 @@ estimated_freqs_path (struct redirection_data *rd)
   for (unsigned int i = 1; i < path->length (); i++)
     {
       edge epath = (*path)[i]->e;
-      if (epath->src->count)
+      if (epath->src->count > 0)
        return false;
       non_zero_freq |= epath->src->frequency != 0;
       edge esucc;
       FOR_EACH_EDGE (esucc, ei, epath->src->succs)
        {
-         if (esucc->count)
+         if (esucc->count > 0)
            return false;
          non_zero_freq |= esucc->src->frequency != 0;
        }
@@ -1055,8 +1048,9 @@ freqs_to_counts_path (struct redirection_data *rd)
       /* Scale up the frequency by REG_BR_PROB_BASE, to avoid rounding
         errors applying the probability when the frequencies are very
         small.  */
-      ein->count = apply_probability (ein->src->frequency * REG_BR_PROB_BASE,
-                                     ein->probability);
+      ein->count = profile_count::from_gcov_type
+               (apply_probability (ein->src->frequency * REG_BR_PROB_BASE,
+                                     ein->probability));
     }
 
   for (unsigned int i = 1; i < path->length (); i++)
@@ -1066,10 +1060,12 @@ freqs_to_counts_path (struct redirection_data *rd)
       /* Scale up the frequency by REG_BR_PROB_BASE, to avoid rounding
         errors applying the edge probability when the frequencies are very
         small.  */
-      epath->src->count = epath->src->frequency * REG_BR_PROB_BASE;
+      epath->src->count = 
+       profile_count::from_gcov_type
+         (epath->src->frequency * REG_BR_PROB_BASE);
       FOR_EACH_EDGE (esucc, ei, epath->src->succs)
-       esucc->count = apply_probability (esucc->src->count,
-                                         esucc->probability);
+       esucc->count = 
+          esucc->src->count.apply_probability (esucc->probability);
     }
 }
 
@@ -1089,15 +1085,15 @@ clear_counts_path (struct redirection_data *rd)
   edge ein, esucc;
   edge_iterator ei;
   FOR_EACH_EDGE (ein, ei, e->dest->preds)
-    ein->count = 0;
+    ein->count = profile_count::uninitialized ();
 
   /* First clear counts along original path.  */
   for (unsigned int i = 1; i < path->length (); i++)
     {
       edge epath = (*path)[i]->e;
       FOR_EACH_EDGE (esucc, ei, epath->src->succs)
-       esucc->count = 0;
-      epath->src->count = 0;
+       esucc->count = profile_count::uninitialized ();
+      epath->src->count = profile_count::uninitialized ();
     }
   /* Also need to clear the counts along duplicated path.  */
   for (unsigned int i = 0; i < 2; i++)
@@ -1106,8 +1102,8 @@ clear_counts_path (struct redirection_data *rd)
       if (!dup)
        continue;
       FOR_EACH_EDGE (esucc, ei, dup->succs)
-       esucc->count = 0;
-      dup->count = 0;
+       esucc->count = profile_count::uninitialized ();
+      dup->count = profile_count::uninitialized ();
     }
 }
 
@@ -1122,8 +1118,8 @@ ssa_fix_duplicate_block_edges (struct redirection_data *rd,
   edge e = rd->incoming_edges->e;
   vec<jump_thread_edge *> *path = THREAD_PATH (e);
   edge elast = path->last ()->e;
-  gcov_type path_in_count = 0;
-  gcov_type path_out_count = 0;
+  profile_count path_in_count = profile_count::zero ();
+  profile_count path_out_count = profile_count::zero ();
   int path_in_freq = 0;
 
   /* This routine updates profile counts, frequencies, and probabilities
@@ -2217,7 +2213,7 @@ duplicate_thread_path (edge entry, edge exit,
   edge exit_copy;
   edge redirected;
   int curr_freq;
-  gcov_type curr_count;
+  profile_count curr_count;
 
   if (!can_copy_bbs_p (region, n_region))
     return false;
@@ -2268,21 +2264,21 @@ duplicate_thread_path (edge entry, edge exit,
       if (curr_freq > region[i]->frequency)
        curr_freq = region[i]->frequency;
       /* Scale current BB.  */
-      if (region[i]->count)
+      if (region[i]->count > 0 && curr_count.initialized_p ())
        {
          /* In the middle of the path we only scale the frequencies.
             In last BB we need to update probabilities of outgoing edges
             because we know which one is taken at the threaded path.  */
          if (i + 1 != n_region)
-           scale_bbs_frequencies_gcov_type (region + i, 1,
-                                            region[i]->count - curr_count,
-                                            region[i]->count);
+           scale_bbs_frequencies_profile_count (region + i, 1,
+                                                region[i]->count - curr_count,
+                                                region[i]->count);
          else
            update_bb_profile_for_threading (region[i],
                                             curr_freq, curr_count,
                                             exit);
-         scale_bbs_frequencies_gcov_type (region_copy + i, 1, curr_count,
-                                          region_copy[i]->count);
+         scale_bbs_frequencies_profile_count (region_copy + i, 1, curr_count,
+                                              region_copy[i]->count);
        }
       else if (region[i]->frequency)
        {
index 0a2a84068a0a07cfd9f79c1eb17595ef208c4db0..66db20fe7bc6fa4afe25e094de522fc232faae86 100644 (file)
@@ -236,8 +236,10 @@ case_bit_test_cmp (const void *p1, const void *p2)
   const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
   const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
 
-  if (d2->target_edge->count != d1->target_edge->count)
-    return d2->target_edge->count - d1->target_edge->count;
+  if (d2->target_edge->count < d1->target_edge->count)
+    return -1;
+  if (d2->target_edge->count > d1->target_edge->count)
+    return 1;
   if (d2->bits != d1->bits)
     return d2->bits - d1->bits;
 
@@ -559,10 +561,10 @@ struct switch_conv_info
   int default_prob;
 
   /* The count of the default edge in the replaced switch.  */
-  gcov_type default_count;
+  profile_count default_count;
 
   /* Combined count of all other (non-default) edges in the replaced switch.  */
-  gcov_type other_count;
+  profile_count other_count;
 
   /* Number of phi nodes in the final bb (that we'll be replacing).  */
   int phi_count;
index bc370af610f8bced9623e4cabe37b6177aa538c0..b7053387e91925c0c7678ea48d61c4442acdce85 100644 (file)
@@ -799,12 +799,10 @@ adjust_return_value (basic_block bb, tree m, tree a)
 /* Subtract COUNT and FREQUENCY from the basic block and it's
    outgoing edge.  */
 static void
-decrease_profile (basic_block bb, gcov_type count, int frequency)
+decrease_profile (basic_block bb, profile_count count, int frequency)
 {
   edge e;
-  bb->count -= count;
-  if (bb->count < 0)
-    bb->count = 0;
+  bb->count = bb->count - count;
   bb->frequency -= frequency;
   if (bb->frequency < 0)
     bb->frequency = 0;
@@ -815,8 +813,6 @@ decrease_profile (basic_block bb, gcov_type count, int frequency)
     }
   e = single_succ_edge (bb);
   e->count -= count;
-  if (e->count < 0)
-    e->count = 0;
 }
 
 /* Returns true if argument PARAM of the tail recursive call needs to be copied
index 42b9f48dbf4145ea273f2119820f858333bddf8a..2a7fdca5690483cf987ba5e9c333452405708bc6 100644 (file)
@@ -566,7 +566,7 @@ slpeel_add_loop_guard (basic_block guard_bb, tree cond,
 
   new_e->count = guard_bb->count;
   new_e->probability = probability;
-  new_e->count = apply_probability (enter_e->count, probability);
+  new_e->count = enter_e->count.apply_probability (probability);
   if (irreducible_p)
     new_e->flags |= EDGE_IRREDUCIBLE_LOOP;
 
index 107f082366a16766eaae67cb46cc36b194fad464..28f4349a3c842d14b0e0c7970e22d12a9e5909d3 100644 (file)
@@ -6666,23 +6666,24 @@ scale_profile_for_vect_loop (struct loop *loop, unsigned vf)
   edge preheader = loop_preheader_edge (loop);
   /* Reduce loop iterations by the vectorization factor.  */
   gcov_type new_est_niter = niter_for_unrolled_loop (loop, vf);
-  gcov_type freq_h = loop->header->count, freq_e = preheader->count;
+  profile_count freq_h = loop->header->count, freq_e = preheader->count;
 
   /* Use frequency only if counts are zero.  */
-  if (freq_h == 0 && freq_e == 0)
+  if (!(freq_h > 0) && !(freq_e > 0))
     {
-      freq_h = loop->header->frequency;
-      freq_e = EDGE_FREQUENCY (preheader);
+      freq_h = profile_count::from_gcov_type (loop->header->frequency);
+      freq_e = profile_count::from_gcov_type (EDGE_FREQUENCY (preheader));
     }
-  if (freq_h != 0)
+  if (freq_h > 0)
     {
       gcov_type scale;
 
       /* Avoid dropping loop body profile counter to 0 because of zero count
         in loop's preheader.  */
-      freq_e = MAX (freq_e, 1);
+      if (!(freq_e > profile_count::from_gcov_type (1)))
+       freq_e = profile_count::from_gcov_type (1);
       /* This should not overflow.  */
-      scale = GCOV_COMPUTE_SCALE (freq_e * (new_est_niter + 1), freq_h);
+      scale = freq_e.apply_scale (new_est_niter + 1, 1).probability_in (freq_h);
       scale_loop_frequencies (loop, scale, REG_BR_PROB_BASE);
     }
 
@@ -6695,8 +6696,6 @@ scale_profile_for_vect_loop (struct loop *loop, unsigned vf)
   int prob = exit_l->probability;
   exit_l->probability = REG_BR_PROB_BASE - exit_e->probability;
   exit_l->count = exit_bb->count - exit_e->count;
-  if (exit_l->count < 0)
-    exit_l->count = 0;
   if (prob > 0)
     scale_bbs_frequencies_int (&loop->latch, 1, exit_l->probability, prob);
 }
index 7f1574af70d6ce01d025cfc972448c3318bb2086..56ec9fe570bd21c887a9be63efed683f9008acf6 100644 (file)
@@ -588,8 +588,9 @@ free_histograms (struct function *fn)
 
 static bool
 check_counter (gimple *stmt, const char * name,
-              gcov_type *count, gcov_type *all, gcov_type bb_count)
+              gcov_type *count, gcov_type *all, profile_count bb_count_d)
 {
+  gcov_type bb_count = bb_count_d.to_gcov_type ();
   if (*all != bb_count || *count > *all)
     {
       location_t locus;
@@ -740,31 +741,31 @@ gimple_divmod_fixed_value (gassign *stmt, tree value, int prob,
   /* Edge e23 connects bb2 to bb3, etc. */
   e12 = split_block (bb, bb1end);
   bb2 = e12->dest;
-  bb2->count = count;
+  bb2->count = profile_count::from_gcov_type (count);
   e23 = split_block (bb2, bb2end);
   bb3 = e23->dest;
-  bb3->count = all - count;
+  bb3->count = profile_count::from_gcov_type (all - count);
   e34 = split_block (bb3, bb3end);
   bb4 = e34->dest;
-  bb4->count = all;
+  bb4->count = profile_count::from_gcov_type (all);
 
   e12->flags &= ~EDGE_FALLTHRU;
   e12->flags |= EDGE_FALSE_VALUE;
   e12->probability = prob;
-  e12->count = count;
+  e12->count = profile_count::from_gcov_type (count);
 
   e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
   e13->probability = REG_BR_PROB_BASE - prob;
-  e13->count = all - count;
+  e13->count = profile_count::from_gcov_type (all - count);
 
   remove_edge (e23);
 
   e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
   e24->probability = REG_BR_PROB_BASE;
-  e24->count = count;
+  e24->count = profile_count::from_gcov_type (count);
 
   e34->probability = REG_BR_PROB_BASE;
-  e34->count = all - count;
+  e34->count = profile_count::from_gcov_type (all - count);
 
   return tmp2;
 }
@@ -905,31 +906,31 @@ gimple_mod_pow2 (gassign *stmt, int prob, gcov_type count, gcov_type all)
   /* Edge e23 connects bb2 to bb3, etc. */
   e12 = split_block (bb, bb1end);
   bb2 = e12->dest;
-  bb2->count = count;
+  bb2->count = profile_count::from_gcov_type (count);
   e23 = split_block (bb2, bb2end);
   bb3 = e23->dest;
-  bb3->count = all - count;
+  bb3->count = profile_count::from_gcov_type (all - count);
   e34 = split_block (bb3, bb3end);
   bb4 = e34->dest;
-  bb4->count = all;
+  bb4->count = profile_count::from_gcov_type (all);
 
   e12->flags &= ~EDGE_FALLTHRU;
   e12->flags |= EDGE_FALSE_VALUE;
   e12->probability = prob;
-  e12->count = count;
+  e12->count = profile_count::from_gcov_type (count);
 
   e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
   e13->probability = REG_BR_PROB_BASE - prob;
-  e13->count = all - count;
+  e13->count = profile_count::from_gcov_type (all - count);
 
   remove_edge (e23);
 
   e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
   e24->probability = REG_BR_PROB_BASE;
-  e24->count = count;
+  e24->count = profile_count::from_gcov_type (count);
 
   e34->probability = REG_BR_PROB_BASE;
-  e34->count = all - count;
+  e34->count = profile_count::from_gcov_type (all - count);
 
   return result;
 }
@@ -1065,42 +1066,42 @@ gimple_mod_subtract (gassign *stmt, int prob1, int prob2, int ncounts,
      to 3 really refer to block 2. */
   e12 = split_block (bb, bb1end);
   bb2 = e12->dest;
-  bb2->count = all - count1;
+  bb2->count = profile_count::from_gcov_type (all - count1);
 
   if (ncounts) /* Assumed to be 0 or 1.  */
     {
       e23 = split_block (bb2, bb2end);
       bb3 = e23->dest;
-      bb3->count = all - count1 - count2;
+      bb3->count = profile_count::from_gcov_type (all - count1 - count2);
     }
 
   e34 = split_block (ncounts ? bb3 : bb2, bb3end);
   bb4 = e34->dest;
-  bb4->count = all;
+  bb4->count = profile_count::from_gcov_type (all);
 
   e12->flags &= ~EDGE_FALLTHRU;
   e12->flags |= EDGE_FALSE_VALUE;
   e12->probability = REG_BR_PROB_BASE - prob1;
-  e12->count = all - count1;
+  e12->count = profile_count::from_gcov_type (all - count1);
 
   e14 = make_edge (bb, bb4, EDGE_TRUE_VALUE);
   e14->probability = prob1;
-  e14->count = count1;
+  e14->count = profile_count::from_gcov_type (count1);
 
   if (ncounts)  /* Assumed to be 0 or 1.  */
     {
       e23->flags &= ~EDGE_FALLTHRU;
       e23->flags |= EDGE_FALSE_VALUE;
-      e23->count = all - count1 - count2;
+      e23->count = profile_count::from_gcov_type (all - count1 - count2);
       e23->probability = REG_BR_PROB_BASE - prob2;
 
       e24 = make_edge (bb2, bb4, EDGE_TRUE_VALUE);
       e24->probability = prob2;
-      e24->count = count2;
+      e24->count = profile_count::from_gcov_type (count2);
     }
 
   e34->probability = REG_BR_PROB_BASE;
-  e34->count = all - count1 - count2;
+  e34->count = profile_count::from_gcov_type (all - count1 - count2);
 
   return result;
 }
@@ -1317,7 +1318,7 @@ check_ic_target (gcall *call_stmt, struct cgraph_node *target)
 
 gcall *
 gimple_ic (gcall *icall_stmt, struct cgraph_node *direct_call,
-          int prob, gcov_type count, gcov_type all)
+          int prob, profile_count count, profile_count all)
 {
   gcall *dcall_stmt;
   gassign *load_stmt;
@@ -1543,12 +1544,13 @@ gimple_ic_transform (gimple_stmt_iterator *gsi)
   count = histogram->hvalue.counters [1];
   all = histogram->hvalue.counters [2];
 
-  bb_all = gimple_bb (stmt)->count;
+  bb_all = gimple_bb (stmt)->count.to_gcov_type ();
   /* The order of CHECK_COUNTER calls is important -
      since check_counter can correct the third parameter
      and we want to make count <= all <= bb_all. */
-  if ( check_counter (stmt, "ic", &all, &bb_all, bb_all)
-      || check_counter (stmt, "ic", &count, &all, all))
+  if (check_counter (stmt, "ic", &all, &bb_all, gimple_bb (stmt)->count)
+      || check_counter (stmt, "ic", &count, &all,
+                       profile_count::from_gcov_type (all)))
     {
       gimple_remove_histogram_value (cfun, stmt, histogram);
       return false;
@@ -1694,32 +1696,32 @@ gimple_stringop_fixed_value (gcall *vcall_stmt, tree icall_size, int prob,
   /* Edge e_ci connects cond_bb to icall_bb, etc. */
   e_ci = split_block (cond_bb, cond_stmt);
   icall_bb = e_ci->dest;
-  icall_bb->count = count;
+  icall_bb->count = profile_count::from_gcov_type (count);
 
   e_iv = split_block (icall_bb, icall_stmt);
   vcall_bb = e_iv->dest;
-  vcall_bb->count = all - count;
+  vcall_bb->count = profile_count::from_gcov_type (all - count);
 
   e_vj = split_block (vcall_bb, vcall_stmt);
   join_bb = e_vj->dest;
-  join_bb->count = all;
+  join_bb->count = profile_count::from_gcov_type (all);
 
   e_ci->flags = (e_ci->flags & ~EDGE_FALLTHRU) | EDGE_TRUE_VALUE;
   e_ci->probability = prob;
-  e_ci->count = count;
+  e_ci->count = profile_count::from_gcov_type (count);
 
   e_cv = make_edge (cond_bb, vcall_bb, EDGE_FALSE_VALUE);
   e_cv->probability = REG_BR_PROB_BASE - prob;
-  e_cv->count = all - count;
+  e_cv->count = profile_count::from_gcov_type (all - count);
 
   remove_edge (e_iv);
 
   e_ij = make_edge (icall_bb, join_bb, EDGE_FALLTHRU);
   e_ij->probability = REG_BR_PROB_BASE;
-  e_ij->count = count;
+  e_ij->count = profile_count::from_gcov_type (count);
 
   e_vj->probability = REG_BR_PROB_BASE;
-  e_vj->count = all - count;
+  e_vj->count = profile_count::from_gcov_type (all - count);
 
   /* Insert PHI node for the call result if necessary.  */
   if (gimple_call_lhs (vcall_stmt)
index a853340453196afbb9b285bb8a9ea6106a4f8b21..92649ecd322efe992164333c2808a49490e9c4c0 100644 (file)
@@ -90,8 +90,8 @@ void gimple_move_stmt_histograms (struct function *, gimple *, gimple *);
 void verify_histograms (void);
 void free_histograms (function *);
 void stringop_block_profile (gimple *, unsigned int *, HOST_WIDE_INT *);
-gcall *gimple_ic (gcall *, struct cgraph_node *, int, gcov_type,
-                 gcov_type);
+gcall *gimple_ic (gcall *, struct cgraph_node *, int, profile_count,
+                 profile_count);
 bool check_ic_target (gcall *, struct cgraph_node *);