]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
Eliminate ENTRY_BLOCK_PTR and EXIT_BLOCK_PTR macros
authorDavid Malcolm <dmalcolm@redhat.com>
Tue, 19 Nov 2013 21:57:13 +0000 (21:57 +0000)
committerDavid Malcolm <dmalcolm@gcc.gnu.org>
Tue, 19 Nov 2013 21:57:13 +0000 (21:57 +0000)
gcc/

* basic-block.h (ENTRY_BLOCK_PTR_FOR_FUNCTION): Rename macro to...
(EXIT_BLOCK_PTR_FOR_FUNCTION): ...this.
(ENTRY_BLOCK_PTR_FOR_FN): Renamed macro to...
(EXIT_BLOCK_PTR_FOR_FN): ...this.
(ENTRY_BLOCK_PTR): Eliminate macro as work towards making uses of
cfun be explicit.
(EXIT_BLOCK_PTR): Likewise.
(FOR_ALL_BB): Rework for now to eliminate use of "ENTRY_BLOCK_PTR".
(FOR_ALL_BB_FN): Update for renaming of
"ENTRY_BLOCK_PTR_FOR_FUNCTION" to "ENTRY_BLOCK_PTR_FOR_FN".

* cfg.c (init_flow): Likewise.
(check_bb_profile): Likewise.
* cfganal.c (pre_and_rev_post_order_compute_fn): Likewise.
* cfgcleanup.c (walk_to_nondebug_insn): Likewise.
* cfghooks.c (account_profile_record): Likewise.
* cfgloop.c (init_loops_structure): Likewise.
* cgraphbuild.c (record_eh_tables): Likewise.
(compute_call_stmt_bb_frequency): Likewise.
* ipa-inline-analysis.c (compute_bb_predicates): Likewise.
* lto-streamer-in.c (input_cfg): Likewise.
* predict.c (maybe_hot_frequency_p): Likewise.
* tree-cfg.c (init_empty_tree_cfg_for_function): Likewise.
* tree-inline.c (initialize_cfun): Likewise.
(copy_cfg_body): Likewise.
(copy_body): Likewise.
(tree_function_versioning): Likewise.

* bb-reorder.c (add_labels_and_missing_jumps): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(duplicate_computed_gotos): Remove usage of EXIT_BLOCK_PTR macro.
(find_rarely_executed_basic_blocks_and_crossing_edges): Remove uses of
macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(connect_traces): Likewise.
(rest_of_handle_reorder_blocks): Remove usage of EXIT_BLOCK_PTR macro.
(bb_to_key): Remove usage of ENTRY_BLOCK_PTR macro.
(fix_crossing_conditional_branches): Remove usage of EXIT_BLOCK_PTR
macro.
(find_traces_1_round): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(fix_up_fall_thru_edges): Remove usage of EXIT_BLOCK_PTR macro.
(find_traces): Remove usage of ENTRY_BLOCK_PTR macro.
(fix_up_crossing_landing_pad): Remove usage of EXIT_BLOCK_PTR macro.
(rotate_loop): Likewise.
* bt-load.c (migrate_btr_def): Remove usage of ENTRY_BLOCK_PTR macro.
* cfg.c (clear_aux_for_edges): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(alloc_aux_for_edges): Likewise.
(clear_bb_flags): Remove usage of ENTRY_BLOCK_PTR macro.
(cached_make_edge): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(compact_blocks): Likewise.
(clear_edges): Likewise.
* cfganal.c (single_pred_before_succ_order): Remove usage of
ENTRY_BLOCK_PTR macro.
(bitmap_union_of_succs): Remove usage of EXIT_BLOCK_PTR macro.
(bitmap_union_of_preds): Remove usage of ENTRY_BLOCK_PTR macro.
(bitmap_intersection_of_succs): Remove usage of EXIT_BLOCK_PTR macro.
(bitmap_intersection_of_preds): Remove usage of ENTRY_BLOCK_PTR macro.
(inverted_post_order_compute): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(compute_dominance_frontiers_1): Remove usage of ENTRY_BLOCK_PTR
macro.
(post_order_compute): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(connect_infinite_loops_to_exit): Remove usage of EXIT_BLOCK_PTR
macro.
(remove_fake_edges): Remove usage of ENTRY_BLOCK_PTR macro.
(add_noreturn_fake_exit_edges): Remove usage of EXIT_BLOCK_PTR macro.
(find_pdom): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(remove_fake_exit_edges): Remove usage of EXIT_BLOCK_PTR macro.
(verify_edge_list): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(print_edge_list): Likewise.
(create_edge_list): Likewise.
(find_unreachable_blocks): Remove usage of ENTRY_BLOCK_PTR macro.
(mark_dfs_back_edges): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
* cfgbuild.c (find_bb_boundaries): Remove usage of ENTRY_BLOCK_PTR
macro.
(find_many_sub_basic_blocks): Remove usage of EXIT_BLOCK_PTR macro.
(make_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
* cfgcleanup.c (delete_unreachable_blocks): Likewise.
(try_optimize_cfg): Likewise.
(try_head_merge_bb): Remove usage of EXIT_BLOCK_PTR macro.
(try_crossjump_to_edge): Remove usage of ENTRY_BLOCK_PTR macro.
(try_crossjump_bb): Remove usage of EXIT_BLOCK_PTR macro.
(merge_blocks_move): Remove usage of ENTRY_BLOCK_PTR macro.
(outgoing_edges_match): Remove usage of EXIT_BLOCK_PTR macro.
(try_forward_edges): Likewise.
(try_simplify_condjump): Likewise.
* cfgexpand.c (gimple_expand_cfg): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(construct_exit_block): Remove usage of EXIT_BLOCK_PTR macro.
(construct_init_block): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(expand_gimple_basic_block): Remove usage of EXIT_BLOCK_PTR macro.
(expand_gimple_tailcall): Likewise.
* cfghooks.c (can_duplicate_block_p): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(tidy_fallthru_edges): Likewise.
(verify_flow_info): Likewise.
* cfgloop.c (flow_bb_inside_loop_p): Likewise.
(num_loop_branches): Remove usage of EXIT_BLOCK_PTR macro.
(disambiguate_multiple_latches): Remove usage of ENTRY_BLOCK_PTR
macro.
(get_loop_exit_edges): Remove usage of EXIT_BLOCK_PTR macro.
(bb_loop_header_p): Remove usage of ENTRY_BLOCK_PTR macro.
(get_loop_body_in_bfs_order): Remove usage of EXIT_BLOCK_PTR macro.
(get_loop_body_in_dom_order): Likewise.
(get_loop_body): Likewise.
* cfgloopanal.c (mark_irreducible_loops): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
* cfgloopmanip.c (create_preheader): Remove usage of ENTRY_BLOCK_PTR
macro.
(remove_path): Remove usage of EXIT_BLOCK_PTR macro.
(fix_bb_placement): Likewise.
* cfgrtl.c (rtl_block_empty_p): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(rtl_can_remove_branch_p): Remove usage of EXIT_BLOCK_PTR macro.
(cfg_layout_split_edge): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(rtl_flow_call_edges_add): Remove usage of EXIT_BLOCK_PTR macro.
(cfg_layout_can_merge_blocks_p): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(cfg_layout_redirect_edge_and_branch): Remove usage of ENTRY_BLOCK_PTR
macro.
(fixup_fallthru_exit_predecessor): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(fixup_reorder_chain): Likewise.
(relink_block_chain): Likewise.
(cfg_layout_delete_block): Remove usage of EXIT_BLOCK_PTR macro.
(rtl_verify_bb_layout): Remove usage of ENTRY_BLOCK_PTR macro.
(cfg_layout_duplicate_bb): Remove usage of EXIT_BLOCK_PTR macro.
(force_one_exit_fallthru): Likewise.
(rtl_verify_fallthru): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(rtl_verify_edges): Likewise.
(commit_edge_insertions): Likewise.
(commit_one_edge_insertion): Likewise.
(rtl_split_edge): Likewise.
(force_nonfallthru_and_redirect): Likewise.
(outof_cfg_layout_mode): Remove usage of EXIT_BLOCK_PTR macro.
(skip_insns_after_block): Likewise.
(fixup_partition_crossing): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(purge_dead_edges): Remove usage of EXIT_BLOCK_PTR macro.
(rtl_can_merge_blocks): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(contains_no_active_insn_p): Likewise.
(emit_insn_at_entry): Remove usage of ENTRY_BLOCK_PTR macro.
(entry_of_function): Likewise.
(last_bb_in_partition): Remove usage of EXIT_BLOCK_PTR macro.
(fixup_new_cold_bb): Likewise.
(patch_jump_insn): Likewise.
(try_redirect_by_replacing_jump): Likewise.
(block_label): Likewise.
(could_fall_through): Likewise.
(can_fallthru): Likewise.
* cgraphbuild.c (cgraph_rebuild_references): Remove usage of
ENTRY_BLOCK_PTR macro.
(rebuild_cgraph_edges): Likewise.
* cgraphunit.c (init_lowered_empty_function): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(expand_thunk): Remove usage of EXIT_BLOCK_PTR macro.
* combine.c (get_last_value): Remove usage of ENTRY_BLOCK_PTR macro.
(distribute_links): Remove usage of EXIT_BLOCK_PTR macro.
(get_last_value_validate): Remove usage of ENTRY_BLOCK_PTR macro.
(try_combine): Remove usage of EXIT_BLOCK_PTR macro.
(reg_num_sign_bit_copies_for_combine): Remove usage of ENTRY_BLOCK_PTR
macro.
(reg_nonzero_bits_for_combine): Likewise.
(set_nonzero_bits_and_sign_copies): Likewise.
(combine_instructions): Likewise.
* cprop.c (one_cprop_pass): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(bypass_conditional_jumps): Likewise.
(bypass_block): Remove usage of EXIT_BLOCK_PTR macro.
(find_implicit_sets): Likewise.
(cprop_jump): Likewise.
* cse.c (cse_cc_succs): Likewise.
(cse_find_path): Likewise.
* df-problems.c (df_lr_confluence_0): Likewise.
* df-scan.c (df_entry_block_defs_collect): Remove usage of
ENTRY_BLOCK_PTR macro.
(df_exit_block_uses_collect): Remove usage of EXIT_BLOCK_PTR macro.
* dominance.c (iterate_fix_dominators): Remove usage of
ENTRY_BLOCK_PTR macro.
(calc_idoms): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(determine_dominators_for_sons): Remove usage of ENTRY_BLOCK_PTR
macro.
(calc_dfs_tree): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(prune_bbs_to_update_dominators): Remove usage of ENTRY_BLOCK_PTR
macro.
(calc_dfs_tree_nonrec): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
* domwalk.c (cmp_bb_postorder): Likewise.
* dse.c (dse_step1): Remove usage of EXIT_BLOCK_PTR macro.
* except.c (finish_eh_generation): Remove usage of ENTRY_BLOCK_PTR
macro.
(sjlj_emit_function_enter): Likewise.
* final.c (compute_alignments): Likewise.
* function.c (thread_prologue_and_epilogue_insns): Remove uses of
macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(reposition_prologue_and_epilogue_notes): Remove usage of
EXIT_BLOCK_PTR macro.
(convert_jumps_to_returns): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(regno_clobbered_at_setjmp): Remove usage of ENTRY_BLOCK_PTR macro.
(next_block_for_reg): Remove usage of EXIT_BLOCK_PTR macro.
* gcse.c (hoist_code): Remove usage of ENTRY_BLOCK_PTR macro.
(update_bb_reg_pressure): Remove usage of EXIT_BLOCK_PTR macro.
(compute_code_hoist_vbeinout): Likewise.
(should_hoist_expr_to_dom): Remove usage of ENTRY_BLOCK_PTR macro.
(pre_expr_reaches_here_p_work): Likewise.
* gimple-iterator.c (gsi_commit_edge_inserts): Likewise.
(gimple_find_edge_insert_loc): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
* gimple-ssa-strength-reduction.c (slsr_process_phi): Remove usage of
ENTRY_BLOCK_PTR macro.
* graph.c (draw_cfg_nodes_for_loop): Remove usage of EXIT_BLOCK_PTR
macro.
* graphite-clast-to-gimple.c (translate_clast_user): Remove usage of
ENTRY_BLOCK_PTR macro.
* graphite-scop-detection.c (build_scops): Likewise.
(create_sese_edges): Remove usage of EXIT_BLOCK_PTR macro.
(scopdet_basic_block_info): Remove usage of ENTRY_BLOCK_PTR macro.
* haifa-sched.c (restore_bb_notes): Remove usage of EXIT_BLOCK_PTR
macro.
(unlink_bb_notes): Likewise.
(create_check_block_twin): Likewise.
(init_before_recovery): Likewise.
(sched_extend_bb): Likewise.
(priority): Likewise.
* hw-doloop.c (reorder_loops): Likewise.
(discover_loop): Likewise.
* ifcvt.c (dead_or_predicable): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(find_if_case_1): Remove usage of EXIT_BLOCK_PTR macro.
(block_has_only_trap): Likewise.
(cond_exec_find_if_block): Likewise.
(merge_if_block): Likewise.
* ipa-inline-analysis.c (param_change_prob): Remove usage of
ENTRY_BLOCK_PTR macro.
(record_modified): Likewise.
* ipa-pure-const.c (execute_warn_function_noreturn): Remove usage of
EXIT_BLOCK_PTR macro.
(local_pure_const): Likewise.
* ipa-split.c (split_function): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(find_split_points): Likewise.
(consider_split): Likewise.
(find_return_bb): Remove usage of EXIT_BLOCK_PTR macro.
(verify_non_ssa_vars): Remove usage of ENTRY_BLOCK_PTR macro.
* ira-build.c (ira_loop_tree_body_rev_postorder): Likewise.
* ira-color.c (print_loop_title): Remove usage of EXIT_BLOCK_PTR
macro.
* ira-emit.c (entered_from_non_parent_p): Remove usage of
ENTRY_BLOCK_PTR macro.
(ira_emit): Remove usage of EXIT_BLOCK_PTR macro.
* ira-int.h (ira_assert): Remove usage of ENTRY_BLOCK_PTR macro.
* ira.c (split_live_ranges_for_shrink_wrap): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
* lcm.c (compute_rev_insert_delete): Remove usage of ENTRY_BLOCK_PTR
macro.
(compute_nearerout): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(compute_farthest): Likewise.
(compute_available): Likewise.
(compute_insert_delete): Remove usage of EXIT_BLOCK_PTR macro.
(compute_laterin): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(compute_earliest): Likewise.
(compute_antinout_edge): Likewise.
* loop-iv.c (simplify_using_initial_values): Remove usage of
ENTRY_BLOCK_PTR macro.
* loop-unswitch.c (unswitch_loop): Remove usage of EXIT_BLOCK_PTR
macro.
* lra-assigns.c (find_hard_regno_for): Remove usage of ENTRY_BLOCK_PTR
macro.
* lra-constraints.c (lra_inheritance): Remove usage of EXIT_BLOCK_PTR
macro.
* lra-lives.c (lra_create_live_ranges): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
* lra.c (has_nonexceptional_receiver): Remove usage of EXIT_BLOCK_PTR
macro.
* lto-streamer-in.c (input_function): Remove usage of ENTRY_BLOCK_PTR
macro.
* lto-streamer-out.c (output_cfg): Likewise.
* mcf.c (adjust_cfg_counts): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(create_fixup_graph): Remove usage of ENTRY_BLOCK_PTR macro.
* mode-switching.c (optimize_mode_switching): Likewise.
(create_pre_exit): Remove usage of EXIT_BLOCK_PTR macro.
* modulo-sched.c (rest_of_handle_sms): Likewise.
(canon_loop): Likewise.
* omp-low.c (build_omp_regions): Remove usage of ENTRY_BLOCK_PTR
macro.
* postreload-gcse.c (eliminate_partially_redundant_loads): Remove uses
of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
* predict.c (rebuild_frequencies): Remove usage of ENTRY_BLOCK_PTR
macro.
(propagate_freq): Remove usage of EXIT_BLOCK_PTR macro.
(estimate_bb_frequencies): Remove usage of ENTRY_BLOCK_PTR macro.
(tree_estimate_probability_bb): Remove usage of EXIT_BLOCK_PTR macro.
(expensive_function_p): Remove usage of ENTRY_BLOCK_PTR macro.
(tree_bb_level_predictions): Remove usage of EXIT_BLOCK_PTR macro.
(counts_to_freqs): Remove usage of ENTRY_BLOCK_PTR macro.
(apply_return_prediction): Remove usage of EXIT_BLOCK_PTR macro.
(estimate_loops): Remove usage of ENTRY_BLOCK_PTR macro.
(gimple_predict_edge): Likewise.
(probably_never_executed): Likewise.
* profile.c (find_spanning_tree): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(branch_prob): Likewise.
(compute_branch_probabilities): Likewise.
(compute_frequency_overlap): Remove usage of ENTRY_BLOCK_PTR macro.
(is_inconsistent): Remove usage of EXIT_BLOCK_PTR macro.
(read_profile_edge_counts): Remove usage of ENTRY_BLOCK_PTR macro.
(set_bb_counts): Likewise.
(correct_negative_edge_counts): Likewise.
(get_exec_counts): Likewise.
(instrument_values): Likewise.
(instrument_edges): Likewise.
* reg-stack.c (convert_regs): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(compensate_edges): Remove usage of ENTRY_BLOCK_PTR macro.
(convert_regs_exit): Remove usage of EXIT_BLOCK_PTR macro.
(convert_regs_entry): Remove usage of ENTRY_BLOCK_PTR macro.
(reg_to_stack): Likewise.
* regs.h (REG_N_SETS): Likewise.
* reload.c (find_dummy_reload): Likewise.
(combine_reloads): Likewise.
(push_reload): Likewise.
* reload1.c (has_nonexceptional_receiver): Remove usage of
EXIT_BLOCK_PTR macro.
* resource.c (mark_target_live_regs): Remove usage of ENTRY_BLOCK_PTR
macro.
(find_basic_block): Likewise.
* sched-ebb.c (ebb_add_block): Remove usage of EXIT_BLOCK_PTR macro.
(schedule_ebbs): Likewise.
* sched-int.h (sel_sched_p): Likewise.
* sched-rgn.c (compute_dom_prob_ps): Remove usage of ENTRY_BLOCK_PTR
macro.
(rgn_add_block): Remove usage of EXIT_BLOCK_PTR macro.
(haifa_find_rgns): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(propagate_deps): Remove usage of EXIT_BLOCK_PTR macro.
(extend_rgns): Likewise.
(find_single_block_region): Likewise.
* sel-sched-ir.c (sel_remove_loop_preheader): Remove usage of
ENTRY_BLOCK_PTR macro.
(setup_nop_and_exit_insns): Remove usage of EXIT_BLOCK_PTR macro.
(sel_create_recovery_block): Likewise.
(bb_ends_ebb_p): Likewise.
(sel_bb_end): Likewise.
(sel_bb_head): Likewise.
(free_lv_sets): Likewise.
(init_lv_sets): Likewise.
(tidy_control_flow): Likewise.
(maybe_tidy_empty_bb): Likewise.
* sel-sched-ir.h (_succ_iter_cond): Likewise.
(_succ_iter_start): Likewise.
(sel_bb_empty_or_nop_p): Likewise.
(get_loop_exit_edges_unique_dests): Likewise.
(inner_loop_header_p): Likewise.
* sel-sched.c (create_block_for_bookkeeping): Likewise.
(find_block_for_bookkeeping): Likewise.
* store-motion.c (remove_reachable_equiv_notes): Likewise.
(insert_store): Likewise.
* trans-mem.c (ipa_tm_transform_clone): Remove usage of
ENTRY_BLOCK_PTR macro.
(tm_memopt_compute_available): Remove usage of EXIT_BLOCK_PTR macro.
(ipa_tm_scan_irr_function): Remove usage of ENTRY_BLOCK_PTR macro.
(gate_tm_init): Likewise.
(tm_region_init): Likewise.
* tree-cfg.c (execute_fixup_cfg): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(execute_warn_function_return): Remove usage of EXIT_BLOCK_PTR macro.
(split_critical_edges): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(print_loops): Remove usage of ENTRY_BLOCK_PTR macro.
(move_sese_region_to_fn): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(gimple_redirect_edge_and_branch): Remove usage of ENTRY_BLOCK_PTR
macro.
(gimple_verify_flow_info): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(remove_edge_and_dominated_blocks): Remove usage of EXIT_BLOCK_PTR
macro.
(make_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(gimple_flow_call_edges_add): Remove usage of EXIT_BLOCK_PTR macro.
(make_blocks): Remove usage of ENTRY_BLOCK_PTR macro.
(build_gimple_cfg): Likewise.
(gimple_duplicate_bb): Remove usage of EXIT_BLOCK_PTR macro.
(gimple_can_merge_blocks_p): Likewise.
* tree-cfgcleanup.c (tree_forwarder_block_p): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
* tree-complex.c (update_parameter_components): Remove usage of
ENTRY_BLOCK_PTR macro.
* tree-if-conv.c (get_loop_body_in_if_conv_order): Remove usage of
EXIT_BLOCK_PTR macro.
* tree-inline.c (tree_function_versioning): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(delete_unreachable_blocks_update_callgraph): Likewise.
(initialize_cfun): Likewise.
(copy_cfg_body): Remove usage of ENTRY_BLOCK_PTR macro.
(copy_edges_for_bb): Remove usage of EXIT_BLOCK_PTR macro.
(remap_ssa_name): Remove usage of ENTRY_BLOCK_PTR macro.
* tree-into-ssa.c (update_ssa): Likewise.
(maybe_register_def): Remove usage of EXIT_BLOCK_PTR macro.
(insert_updated_phi_nodes_for): Remove usage of ENTRY_BLOCK_PTR macro.
(rewrite_into_ssa): Likewise.
(rewrite_debug_stmt_uses): Likewise.
* tree-outof-ssa.c (expand_phi_nodes): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
* tree-profile.c (gimple_gen_ic_func_profiler): Remove usage of
ENTRY_BLOCK_PTR macro.
* tree-scalar-evolution.h (block_before_loop): Likewise.
* tree-sra.c (sra_ipa_reset_debug_stmts): Likewise.
(dump_dereferences_table): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(analyze_caller_dereference_legality): Remove usage of ENTRY_BLOCK_PTR
macro.
(propagate_dereference_distances): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(initialize_parameter_reductions): Remove usage of ENTRY_BLOCK_PTR
macro.
* tree-ssa-ccp.c (gsi_prev_dom_bb_nondebug): Likewise.
(optimize_stack_restore): Remove usage of EXIT_BLOCK_PTR macro.
* tree-ssa-coalesce.c (create_outofssa_var_map): Likewise.
* tree-ssa-dce.c (eliminate_unnecessary_stmts): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(remove_dead_stmt): Remove usage of EXIT_BLOCK_PTR macro.
(propagate_necessity): Remove usage of ENTRY_BLOCK_PTR macro.
(mark_control_dependent_edges_necessary): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
* tree-ssa-dom.c (eliminate_degenerate_phis): Remove usage of
ENTRY_BLOCK_PTR macro.
(tree_ssa_dominator_optimize): Remove usage of EXIT_BLOCK_PTR macro.
* tree-ssa-live.c (verify_live_on_entry): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(calculate_live_on_exit): Likewise.
(set_var_live_on_entry): Remove usage of ENTRY_BLOCK_PTR macro.
(loe_visit_block): Likewise.
* tree-ssa-live.h (live_on_exit): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(live_on_entry): Likewise.
* tree-ssa-loop-ivopts.c (find_interesting_uses): Remove usage of
EXIT_BLOCK_PTR macro.
* tree-ssa-loop-manip.c (compute_live_loop_exits): Remove usage of
ENTRY_BLOCK_PTR macro.
* tree-ssa-loop-niter.c (simplify_using_initial_conditions): Likewise.
(bound_difference): Likewise.
* tree-ssa-loop-prefetch.c (may_use_storent_in_loop_p): Remove usage
of EXIT_BLOCK_PTR macro.
* tree-ssa-loop-unswitch.c (simplify_using_entry_checks): Remove usage
of ENTRY_BLOCK_PTR macro.
* tree-ssa-math-opts.c (register_division_in): Likewise.
* tree-ssa-phiprop.c (tree_ssa_phiprop): Likewise.
* tree-ssa-pre.c (compute_avail): Likewise.
(compute_antic): Remove usage of EXIT_BLOCK_PTR macro.
(insert): Remove usage of ENTRY_BLOCK_PTR macro.
* tree-ssa-propagate.c (ssa_prop_init): Likewise.
(simulate_block): Remove usage of EXIT_BLOCK_PTR macro.
(cfg_blocks_add): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
(add_control_edge): Remove usage of EXIT_BLOCK_PTR macro.
* tree-ssa-reassoc.c (do_reassoc): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(build_and_add_sum): Remove usage of ENTRY_BLOCK_PTR macro.
* tree-ssa-sink.c (nearest_common_dominator_of_uses): Likewise.
(execute_sink_code): Remove usage of EXIT_BLOCK_PTR macro.
* tree-ssa-uninit.c (find_dom): Remove usage of ENTRY_BLOCK_PTR macro.
(compute_control_dep_chain): Remove usage of EXIT_BLOCK_PTR macro.
(find_pdom): Likewise.
(warn_uninitialized_vars): Remove usage of ENTRY_BLOCK_PTR macro.
* tree-stdarg.c (reachable_at_most_once): Likewise.
* tree-tailcall.c (tree_optimize_tail_calls_1): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(eliminate_tail_call): Likewise.
* tsan.c (instrument_func_entry): Remove usage of ENTRY_BLOCK_PTR
macro.
(instrument_func_exit): Remove usage of EXIT_BLOCK_PTR macro.
* var-tracking.c (vt_initialize): Remove uses of macros:
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
(vt_add_function_parameter): Remove usage of ENTRY_BLOCK_PTR macro.
(vt_find_locations): Remove usage of EXIT_BLOCK_PTR macro.
(vt_stack_adjustments): Remove uses of macros: ENTRY_BLOCK_PTR,
EXIT_BLOCK_PTR.
* varasm.c (assemble_start_function): Remove usage of ENTRY_BLOCK_PTR
macro.
* config/bfin/bfin.c (hwloop_optimize): Likewise.
* config/nds32/nds32.c (nds32_fp_as_gp_check_available): Remove usage
of EXIT_BLOCK_PTR macro.
* config/arm/arm.c (require_pic_register): Remove usage of
ENTRY_BLOCK_PTR macro.
(arm_r3_live_at_start_p): Likewise.
(any_sibcall_could_use_r3): Remove usage of EXIT_BLOCK_PTR macro.
* config/rs6000/rs6000.c (rs6000_emit_prologue): Likewise.
* config/frv/frv.c (frv_optimize_membar_global): Likewise.
* config/alpha/alpha.c (alpha_gp_save_rtx): Remove usage of
ENTRY_BLOCK_PTR macro.
* config/i386/i386.c (ix86_count_insn): Likewise.
(ix86_seh_fixup_eh_fallthru): Remove usage of EXIT_BLOCK_PTR macro.
(ix86_pad_short_function): Likewise.
(ix86_compute_frame_layout): Remove usage of ENTRY_BLOCK_PTR macro.
(ix86_pad_returns): Remove usage of EXIT_BLOCK_PTR macro.
(ix86_eax_live_at_start_p): Remove usage of ENTRY_BLOCK_PTR macro.
(add_condition_to_bb): Remove usage of EXIT_BLOCK_PTR macro.
(ix86_expand_epilogue): Likewise.
* config/ia64/ia64.c (ia64_asm_unwind_emit): Likewise.
(ia64_expand_prologue): Likewise.

From-SVN: r205055

114 files changed:
gcc/ChangeLog
gcc/basic-block.h
gcc/bb-reorder.c
gcc/bt-load.c
gcc/cfg.c
gcc/cfganal.c
gcc/cfgbuild.c
gcc/cfgcleanup.c
gcc/cfgexpand.c
gcc/cfghooks.c
gcc/cfgloop.c
gcc/cfgloopanal.c
gcc/cfgloopmanip.c
gcc/cfgrtl.c
gcc/cgraphbuild.c
gcc/cgraphunit.c
gcc/combine.c
gcc/config/alpha/alpha.c
gcc/config/arm/arm.c
gcc/config/bfin/bfin.c
gcc/config/frv/frv.c
gcc/config/i386/i386.c
gcc/config/ia64/ia64.c
gcc/config/nds32/nds32.c
gcc/config/rs6000/rs6000.c
gcc/cprop.c
gcc/cse.c
gcc/df-problems.c
gcc/df-scan.c
gcc/dominance.c
gcc/domwalk.c
gcc/dse.c
gcc/except.c
gcc/final.c
gcc/function.c
gcc/gcse.c
gcc/gimple-iterator.c
gcc/gimple-ssa-strength-reduction.c
gcc/graph.c
gcc/graphite-clast-to-gimple.c
gcc/graphite-scop-detection.c
gcc/haifa-sched.c
gcc/hw-doloop.c
gcc/ifcvt.c
gcc/ipa-inline-analysis.c
gcc/ipa-pure-const.c
gcc/ipa-split.c
gcc/ira-build.c
gcc/ira-color.c
gcc/ira-emit.c
gcc/ira-int.h
gcc/ira.c
gcc/lcm.c
gcc/loop-iv.c
gcc/loop-unswitch.c
gcc/lra-assigns.c
gcc/lra-constraints.c
gcc/lra-lives.c
gcc/lra.c
gcc/lto-streamer-in.c
gcc/lto-streamer-out.c
gcc/mcf.c
gcc/mode-switching.c
gcc/modulo-sched.c
gcc/omp-low.c
gcc/postreload-gcse.c
gcc/predict.c
gcc/profile.c
gcc/reg-stack.c
gcc/regs.h
gcc/reload.c
gcc/reload1.c
gcc/resource.c
gcc/sched-ebb.c
gcc/sched-int.h
gcc/sched-rgn.c
gcc/sel-sched-ir.c
gcc/sel-sched-ir.h
gcc/sel-sched.c
gcc/store-motion.c
gcc/trans-mem.c
gcc/tree-cfg.c
gcc/tree-cfgcleanup.c
gcc/tree-complex.c
gcc/tree-if-conv.c
gcc/tree-inline.c
gcc/tree-into-ssa.c
gcc/tree-outof-ssa.c
gcc/tree-profile.c
gcc/tree-scalar-evolution.h
gcc/tree-sra.c
gcc/tree-ssa-ccp.c
gcc/tree-ssa-coalesce.c
gcc/tree-ssa-dce.c
gcc/tree-ssa-dom.c
gcc/tree-ssa-live.c
gcc/tree-ssa-live.h
gcc/tree-ssa-loop-ivopts.c
gcc/tree-ssa-loop-manip.c
gcc/tree-ssa-loop-niter.c
gcc/tree-ssa-loop-prefetch.c
gcc/tree-ssa-loop-unswitch.c
gcc/tree-ssa-math-opts.c
gcc/tree-ssa-phiprop.c
gcc/tree-ssa-pre.c
gcc/tree-ssa-propagate.c
gcc/tree-ssa-reassoc.c
gcc/tree-ssa-sink.c
gcc/tree-ssa-uninit.c
gcc/tree-stdarg.c
gcc/tree-tailcall.c
gcc/tsan.c
gcc/var-tracking.c
gcc/varasm.c

index 810843b7cfc59c1c9fce1c6bef8be43bedae73d8..f06ccfbd8adca0720ca5f10f86eb3e2aad728fbd 100644 (file)
@@ -1,3 +1,520 @@
+2013-11-19  David Malcolm  <dmalcolm@redhat.com>
+
+       * basic-block.h (ENTRY_BLOCK_PTR_FOR_FUNCTION): Rename macro to...
+       (EXIT_BLOCK_PTR_FOR_FUNCTION): ...this.
+       (ENTRY_BLOCK_PTR_FOR_FN): Renamed macro to...
+       (EXIT_BLOCK_PTR_FOR_FN): ...this.
+       (ENTRY_BLOCK_PTR): Eliminate macro as work towards making uses of
+       cfun be explicit.
+       (EXIT_BLOCK_PTR): Likewise.
+       (FOR_ALL_BB): Rework for now to eliminate use of "ENTRY_BLOCK_PTR".
+       (FOR_ALL_BB_FN): Update for renaming of
+       "ENTRY_BLOCK_PTR_FOR_FUNCTION" to "ENTRY_BLOCK_PTR_FOR_FN".
+
+       * cfg.c (init_flow): Likewise.
+       (check_bb_profile): Likewise.
+       * cfganal.c (pre_and_rev_post_order_compute_fn): Likewise.
+       * cfgcleanup.c (walk_to_nondebug_insn): Likewise.
+       * cfghooks.c (account_profile_record): Likewise.
+       * cfgloop.c (init_loops_structure): Likewise.
+       * cgraphbuild.c (record_eh_tables): Likewise.
+       (compute_call_stmt_bb_frequency): Likewise.
+       * ipa-inline-analysis.c (compute_bb_predicates): Likewise.
+       * lto-streamer-in.c (input_cfg): Likewise.
+       * predict.c (maybe_hot_frequency_p): Likewise.
+       * tree-cfg.c (init_empty_tree_cfg_for_function): Likewise.
+       * tree-inline.c (initialize_cfun): Likewise.
+       (copy_cfg_body): Likewise.
+       (copy_body): Likewise.
+       (tree_function_versioning): Likewise.
+
+       * bb-reorder.c (add_labels_and_missing_jumps): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (duplicate_computed_gotos): Remove usage of EXIT_BLOCK_PTR macro.
+       (find_rarely_executed_basic_blocks_and_crossing_edges): Remove uses of
+       macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (connect_traces): Likewise.
+       (rest_of_handle_reorder_blocks): Remove usage of EXIT_BLOCK_PTR macro.
+       (bb_to_key): Remove usage of ENTRY_BLOCK_PTR macro.
+       (fix_crossing_conditional_branches): Remove usage of EXIT_BLOCK_PTR
+       macro.
+       (find_traces_1_round): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (fix_up_fall_thru_edges): Remove usage of EXIT_BLOCK_PTR macro.
+       (find_traces): Remove usage of ENTRY_BLOCK_PTR macro.
+       (fix_up_crossing_landing_pad): Remove usage of EXIT_BLOCK_PTR macro.
+       (rotate_loop): Likewise.
+       * bt-load.c (migrate_btr_def): Remove usage of ENTRY_BLOCK_PTR macro.
+       * cfg.c (clear_aux_for_edges): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (alloc_aux_for_edges): Likewise.
+       (clear_bb_flags): Remove usage of ENTRY_BLOCK_PTR macro.
+       (cached_make_edge): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (compact_blocks): Likewise.
+       (clear_edges): Likewise.
+       * cfganal.c (single_pred_before_succ_order): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       (bitmap_union_of_succs): Remove usage of EXIT_BLOCK_PTR macro.
+       (bitmap_union_of_preds): Remove usage of ENTRY_BLOCK_PTR macro.
+       (bitmap_intersection_of_succs): Remove usage of EXIT_BLOCK_PTR macro.
+       (bitmap_intersection_of_preds): Remove usage of ENTRY_BLOCK_PTR macro.
+       (inverted_post_order_compute): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (compute_dominance_frontiers_1): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (post_order_compute): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (connect_infinite_loops_to_exit): Remove usage of EXIT_BLOCK_PTR
+       macro.
+       (remove_fake_edges): Remove usage of ENTRY_BLOCK_PTR macro.
+       (add_noreturn_fake_exit_edges): Remove usage of EXIT_BLOCK_PTR macro.
+       (find_pdom): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (remove_fake_exit_edges): Remove usage of EXIT_BLOCK_PTR macro.
+       (verify_edge_list): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (print_edge_list): Likewise.
+       (create_edge_list): Likewise.
+       (find_unreachable_blocks): Remove usage of ENTRY_BLOCK_PTR macro.
+       (mark_dfs_back_edges): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       * cfgbuild.c (find_bb_boundaries): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (find_many_sub_basic_blocks): Remove usage of EXIT_BLOCK_PTR macro.
+       (make_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       * cfgcleanup.c (delete_unreachable_blocks): Likewise.
+       (try_optimize_cfg): Likewise.
+       (try_head_merge_bb): Remove usage of EXIT_BLOCK_PTR macro.
+       (try_crossjump_to_edge): Remove usage of ENTRY_BLOCK_PTR macro.
+       (try_crossjump_bb): Remove usage of EXIT_BLOCK_PTR macro.
+       (merge_blocks_move): Remove usage of ENTRY_BLOCK_PTR macro.
+       (outgoing_edges_match): Remove usage of EXIT_BLOCK_PTR macro.
+       (try_forward_edges): Likewise.
+       (try_simplify_condjump): Likewise.
+       * cfgexpand.c (gimple_expand_cfg): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (construct_exit_block): Remove usage of EXIT_BLOCK_PTR macro.
+       (construct_init_block): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (expand_gimple_basic_block): Remove usage of EXIT_BLOCK_PTR macro.
+       (expand_gimple_tailcall): Likewise.
+       * cfghooks.c (can_duplicate_block_p): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (tidy_fallthru_edges): Likewise.
+       (verify_flow_info): Likewise.
+       * cfgloop.c (flow_bb_inside_loop_p): Likewise.
+       (num_loop_branches): Remove usage of EXIT_BLOCK_PTR macro.
+       (disambiguate_multiple_latches): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (get_loop_exit_edges): Remove usage of EXIT_BLOCK_PTR macro.
+       (bb_loop_header_p): Remove usage of ENTRY_BLOCK_PTR macro.
+       (get_loop_body_in_bfs_order): Remove usage of EXIT_BLOCK_PTR macro.
+       (get_loop_body_in_dom_order): Likewise.
+       (get_loop_body): Likewise.
+       * cfgloopanal.c (mark_irreducible_loops): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       * cfgloopmanip.c (create_preheader): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (remove_path): Remove usage of EXIT_BLOCK_PTR macro.
+       (fix_bb_placement): Likewise.
+       * cfgrtl.c (rtl_block_empty_p): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (rtl_can_remove_branch_p): Remove usage of EXIT_BLOCK_PTR macro.
+       (cfg_layout_split_edge): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (rtl_flow_call_edges_add): Remove usage of EXIT_BLOCK_PTR macro.
+       (cfg_layout_can_merge_blocks_p): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (cfg_layout_redirect_edge_and_branch): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (fixup_fallthru_exit_predecessor): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (fixup_reorder_chain): Likewise.
+       (relink_block_chain): Likewise.
+       (cfg_layout_delete_block): Remove usage of EXIT_BLOCK_PTR macro.
+       (rtl_verify_bb_layout): Remove usage of ENTRY_BLOCK_PTR macro.
+       (cfg_layout_duplicate_bb): Remove usage of EXIT_BLOCK_PTR macro.
+       (force_one_exit_fallthru): Likewise.
+       (rtl_verify_fallthru): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (rtl_verify_edges): Likewise.
+       (commit_edge_insertions): Likewise.
+       (commit_one_edge_insertion): Likewise.
+       (rtl_split_edge): Likewise.
+       (force_nonfallthru_and_redirect): Likewise.
+       (outof_cfg_layout_mode): Remove usage of EXIT_BLOCK_PTR macro.
+       (skip_insns_after_block): Likewise.
+       (fixup_partition_crossing): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (purge_dead_edges): Remove usage of EXIT_BLOCK_PTR macro.
+       (rtl_can_merge_blocks): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (contains_no_active_insn_p): Likewise.
+       (emit_insn_at_entry): Remove usage of ENTRY_BLOCK_PTR macro.
+       (entry_of_function): Likewise.
+       (last_bb_in_partition): Remove usage of EXIT_BLOCK_PTR macro.
+       (fixup_new_cold_bb): Likewise.
+       (patch_jump_insn): Likewise.
+       (try_redirect_by_replacing_jump): Likewise.
+       (block_label): Likewise.
+       (could_fall_through): Likewise.
+       (can_fallthru): Likewise.
+       * cgraphbuild.c (cgraph_rebuild_references): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       (rebuild_cgraph_edges): Likewise.
+       * cgraphunit.c (init_lowered_empty_function): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (expand_thunk): Remove usage of EXIT_BLOCK_PTR macro.
+       * combine.c (get_last_value): Remove usage of ENTRY_BLOCK_PTR macro.
+       (distribute_links): Remove usage of EXIT_BLOCK_PTR macro.
+       (get_last_value_validate): Remove usage of ENTRY_BLOCK_PTR macro.
+       (try_combine): Remove usage of EXIT_BLOCK_PTR macro.
+       (reg_num_sign_bit_copies_for_combine): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (reg_nonzero_bits_for_combine): Likewise.
+       (set_nonzero_bits_and_sign_copies): Likewise.
+       (combine_instructions): Likewise.
+       * cprop.c (one_cprop_pass): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (bypass_conditional_jumps): Likewise.
+       (bypass_block): Remove usage of EXIT_BLOCK_PTR macro.
+       (find_implicit_sets): Likewise.
+       (cprop_jump): Likewise.
+       * cse.c (cse_cc_succs): Likewise.
+       (cse_find_path): Likewise.
+       * df-problems.c (df_lr_confluence_0): Likewise.
+       * df-scan.c (df_entry_block_defs_collect): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       (df_exit_block_uses_collect): Remove usage of EXIT_BLOCK_PTR macro.
+       * dominance.c (iterate_fix_dominators): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       (calc_idoms): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (determine_dominators_for_sons): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (calc_dfs_tree): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (prune_bbs_to_update_dominators): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (calc_dfs_tree_nonrec): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       * domwalk.c (cmp_bb_postorder): Likewise.
+       * dse.c (dse_step1): Remove usage of EXIT_BLOCK_PTR macro.
+       * except.c (finish_eh_generation): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (sjlj_emit_function_enter): Likewise.
+       * final.c (compute_alignments): Likewise.
+       * function.c (thread_prologue_and_epilogue_insns): Remove uses of
+       macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (reposition_prologue_and_epilogue_notes): Remove usage of
+       EXIT_BLOCK_PTR macro.
+       (convert_jumps_to_returns): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (regno_clobbered_at_setjmp): Remove usage of ENTRY_BLOCK_PTR macro.
+       (next_block_for_reg): Remove usage of EXIT_BLOCK_PTR macro.
+       * gcse.c (hoist_code): Remove usage of ENTRY_BLOCK_PTR macro.
+       (update_bb_reg_pressure): Remove usage of EXIT_BLOCK_PTR macro.
+       (compute_code_hoist_vbeinout): Likewise.
+       (should_hoist_expr_to_dom): Remove usage of ENTRY_BLOCK_PTR macro.
+       (pre_expr_reaches_here_p_work): Likewise.
+       * gimple-iterator.c (gsi_commit_edge_inserts): Likewise.
+       (gimple_find_edge_insert_loc): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       * gimple-ssa-strength-reduction.c (slsr_process_phi): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       * graph.c (draw_cfg_nodes_for_loop): Remove usage of EXIT_BLOCK_PTR
+       macro.
+       * graphite-clast-to-gimple.c (translate_clast_user): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       * graphite-scop-detection.c (build_scops): Likewise.
+       (create_sese_edges): Remove usage of EXIT_BLOCK_PTR macro.
+       (scopdet_basic_block_info): Remove usage of ENTRY_BLOCK_PTR macro.
+       * haifa-sched.c (restore_bb_notes): Remove usage of EXIT_BLOCK_PTR
+       macro.
+       (unlink_bb_notes): Likewise.
+       (create_check_block_twin): Likewise.
+       (init_before_recovery): Likewise.
+       (sched_extend_bb): Likewise.
+       (priority): Likewise.
+       * hw-doloop.c (reorder_loops): Likewise.
+       (discover_loop): Likewise.
+       * ifcvt.c (dead_or_predicable): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (find_if_case_1): Remove usage of EXIT_BLOCK_PTR macro.
+       (block_has_only_trap): Likewise.
+       (cond_exec_find_if_block): Likewise.
+       (merge_if_block): Likewise.
+       * ipa-inline-analysis.c (param_change_prob): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       (record_modified): Likewise.
+       * ipa-pure-const.c (execute_warn_function_noreturn): Remove usage of
+       EXIT_BLOCK_PTR macro.
+       (local_pure_const): Likewise.
+       * ipa-split.c (split_function): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (find_split_points): Likewise.
+       (consider_split): Likewise.
+       (find_return_bb): Remove usage of EXIT_BLOCK_PTR macro.
+       (verify_non_ssa_vars): Remove usage of ENTRY_BLOCK_PTR macro.
+       * ira-build.c (ira_loop_tree_body_rev_postorder): Likewise.
+       * ira-color.c (print_loop_title): Remove usage of EXIT_BLOCK_PTR
+       macro.
+       * ira-emit.c (entered_from_non_parent_p): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       (ira_emit): Remove usage of EXIT_BLOCK_PTR macro.
+       * ira-int.h (ira_assert): Remove usage of ENTRY_BLOCK_PTR macro.
+       * ira.c (split_live_ranges_for_shrink_wrap): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       * lcm.c (compute_rev_insert_delete): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (compute_nearerout): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (compute_farthest): Likewise.
+       (compute_available): Likewise.
+       (compute_insert_delete): Remove usage of EXIT_BLOCK_PTR macro.
+       (compute_laterin): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (compute_earliest): Likewise.
+       (compute_antinout_edge): Likewise.
+       * loop-iv.c (simplify_using_initial_values): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       * loop-unswitch.c (unswitch_loop): Remove usage of EXIT_BLOCK_PTR
+       macro.
+       * lra-assigns.c (find_hard_regno_for): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       * lra-constraints.c (lra_inheritance): Remove usage of EXIT_BLOCK_PTR
+       macro.
+       * lra-lives.c (lra_create_live_ranges): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       * lra.c (has_nonexceptional_receiver): Remove usage of EXIT_BLOCK_PTR
+       macro.
+       * lto-streamer-in.c (input_function): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       * lto-streamer-out.c (output_cfg): Likewise.
+       * mcf.c (adjust_cfg_counts): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (create_fixup_graph): Remove usage of ENTRY_BLOCK_PTR macro.
+       * mode-switching.c (optimize_mode_switching): Likewise.
+       (create_pre_exit): Remove usage of EXIT_BLOCK_PTR macro.
+       * modulo-sched.c (rest_of_handle_sms): Likewise.
+       (canon_loop): Likewise.
+       * omp-low.c (build_omp_regions): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       * postreload-gcse.c (eliminate_partially_redundant_loads): Remove uses
+       of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       * predict.c (rebuild_frequencies): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (propagate_freq): Remove usage of EXIT_BLOCK_PTR macro.
+       (estimate_bb_frequencies): Remove usage of ENTRY_BLOCK_PTR macro.
+       (tree_estimate_probability_bb): Remove usage of EXIT_BLOCK_PTR macro.
+       (expensive_function_p): Remove usage of ENTRY_BLOCK_PTR macro.
+       (tree_bb_level_predictions): Remove usage of EXIT_BLOCK_PTR macro.
+       (counts_to_freqs): Remove usage of ENTRY_BLOCK_PTR macro.
+       (apply_return_prediction): Remove usage of EXIT_BLOCK_PTR macro.
+       (estimate_loops): Remove usage of ENTRY_BLOCK_PTR macro.
+       (gimple_predict_edge): Likewise.
+       (probably_never_executed): Likewise.
+       * profile.c (find_spanning_tree): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (branch_prob): Likewise.
+       (compute_branch_probabilities): Likewise.
+       (compute_frequency_overlap): Remove usage of ENTRY_BLOCK_PTR macro.
+       (is_inconsistent): Remove usage of EXIT_BLOCK_PTR macro.
+       (read_profile_edge_counts): Remove usage of ENTRY_BLOCK_PTR macro.
+       (set_bb_counts): Likewise.
+       (correct_negative_edge_counts): Likewise.
+       (get_exec_counts): Likewise.
+       (instrument_values): Likewise.
+       (instrument_edges): Likewise.
+       * reg-stack.c (convert_regs): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (compensate_edges): Remove usage of ENTRY_BLOCK_PTR macro.
+       (convert_regs_exit): Remove usage of EXIT_BLOCK_PTR macro.
+       (convert_regs_entry): Remove usage of ENTRY_BLOCK_PTR macro.
+       (reg_to_stack): Likewise.
+       * regs.h (REG_N_SETS): Likewise.
+       * reload.c (find_dummy_reload): Likewise.
+       (combine_reloads): Likewise.
+       (push_reload): Likewise.
+       * reload1.c (has_nonexceptional_receiver): Remove usage of
+       EXIT_BLOCK_PTR macro.
+       * resource.c (mark_target_live_regs): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (find_basic_block): Likewise.
+       * sched-ebb.c (ebb_add_block): Remove usage of EXIT_BLOCK_PTR macro.
+       (schedule_ebbs): Likewise.
+       * sched-int.h (sel_sched_p): Likewise.
+       * sched-rgn.c (compute_dom_prob_ps): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (rgn_add_block): Remove usage of EXIT_BLOCK_PTR macro.
+       (haifa_find_rgns): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (propagate_deps): Remove usage of EXIT_BLOCK_PTR macro.
+       (extend_rgns): Likewise.
+       (find_single_block_region): Likewise.
+       * sel-sched-ir.c (sel_remove_loop_preheader): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       (setup_nop_and_exit_insns): Remove usage of EXIT_BLOCK_PTR macro.
+       (sel_create_recovery_block): Likewise.
+       (bb_ends_ebb_p): Likewise.
+       (sel_bb_end): Likewise.
+       (sel_bb_head): Likewise.
+       (free_lv_sets): Likewise.
+       (init_lv_sets): Likewise.
+       (tidy_control_flow): Likewise.
+       (maybe_tidy_empty_bb): Likewise.
+       * sel-sched-ir.h (_succ_iter_cond): Likewise.
+       (_succ_iter_start): Likewise.
+       (sel_bb_empty_or_nop_p): Likewise.
+       (get_loop_exit_edges_unique_dests): Likewise.
+       (inner_loop_header_p): Likewise.
+       * sel-sched.c (create_block_for_bookkeeping): Likewise.
+       (find_block_for_bookkeeping): Likewise.
+       * store-motion.c (remove_reachable_equiv_notes): Likewise.
+       (insert_store): Likewise.
+       * trans-mem.c (ipa_tm_transform_clone): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       (tm_memopt_compute_available): Remove usage of EXIT_BLOCK_PTR macro.
+       (ipa_tm_scan_irr_function): Remove usage of ENTRY_BLOCK_PTR macro.
+       (gate_tm_init): Likewise.
+       (tm_region_init): Likewise.
+       * tree-cfg.c (execute_fixup_cfg): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (execute_warn_function_return): Remove usage of EXIT_BLOCK_PTR macro.
+       (split_critical_edges): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (print_loops): Remove usage of ENTRY_BLOCK_PTR macro.
+       (move_sese_region_to_fn): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (gimple_redirect_edge_and_branch): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (gimple_verify_flow_info): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (remove_edge_and_dominated_blocks): Remove usage of EXIT_BLOCK_PTR
+       macro.
+       (make_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (gimple_flow_call_edges_add): Remove usage of EXIT_BLOCK_PTR macro.
+       (make_blocks): Remove usage of ENTRY_BLOCK_PTR macro.
+       (build_gimple_cfg): Likewise.
+       (gimple_duplicate_bb): Remove usage of EXIT_BLOCK_PTR macro.
+       (gimple_can_merge_blocks_p): Likewise.
+       * tree-cfgcleanup.c (tree_forwarder_block_p): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       * tree-complex.c (update_parameter_components): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       * tree-if-conv.c (get_loop_body_in_if_conv_order): Remove usage of
+       EXIT_BLOCK_PTR macro.
+       * tree-inline.c (tree_function_versioning): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (delete_unreachable_blocks_update_callgraph): Likewise.
+       (initialize_cfun): Likewise.
+       (copy_cfg_body): Remove usage of ENTRY_BLOCK_PTR macro.
+       (copy_edges_for_bb): Remove usage of EXIT_BLOCK_PTR macro.
+       (remap_ssa_name): Remove usage of ENTRY_BLOCK_PTR macro.
+       * tree-into-ssa.c (update_ssa): Likewise.
+       (maybe_register_def): Remove usage of EXIT_BLOCK_PTR macro.
+       (insert_updated_phi_nodes_for): Remove usage of ENTRY_BLOCK_PTR macro.
+       (rewrite_into_ssa): Likewise.
+       (rewrite_debug_stmt_uses): Likewise.
+       * tree-outof-ssa.c (expand_phi_nodes): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       * tree-profile.c (gimple_gen_ic_func_profiler): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       * tree-scalar-evolution.h (block_before_loop): Likewise.
+       * tree-sra.c (sra_ipa_reset_debug_stmts): Likewise.
+       (dump_dereferences_table): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (analyze_caller_dereference_legality): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (propagate_dereference_distances): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (initialize_parameter_reductions): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       * tree-ssa-ccp.c (gsi_prev_dom_bb_nondebug): Likewise.
+       (optimize_stack_restore): Remove usage of EXIT_BLOCK_PTR macro.
+       * tree-ssa-coalesce.c (create_outofssa_var_map): Likewise.
+       * tree-ssa-dce.c (eliminate_unnecessary_stmts): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (remove_dead_stmt): Remove usage of EXIT_BLOCK_PTR macro.
+       (propagate_necessity): Remove usage of ENTRY_BLOCK_PTR macro.
+       (mark_control_dependent_edges_necessary): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       * tree-ssa-dom.c (eliminate_degenerate_phis): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       (tree_ssa_dominator_optimize): Remove usage of EXIT_BLOCK_PTR macro.
+       * tree-ssa-live.c (verify_live_on_entry): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (calculate_live_on_exit): Likewise.
+       (set_var_live_on_entry): Remove usage of ENTRY_BLOCK_PTR macro.
+       (loe_visit_block): Likewise.
+       * tree-ssa-live.h (live_on_exit): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (live_on_entry): Likewise.
+       * tree-ssa-loop-ivopts.c (find_interesting_uses): Remove usage of
+       EXIT_BLOCK_PTR macro.
+       * tree-ssa-loop-manip.c (compute_live_loop_exits): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       * tree-ssa-loop-niter.c (simplify_using_initial_conditions): Likewise.
+       (bound_difference): Likewise.
+       * tree-ssa-loop-prefetch.c (may_use_storent_in_loop_p): Remove usage
+       of EXIT_BLOCK_PTR macro.
+       * tree-ssa-loop-unswitch.c (simplify_using_entry_checks): Remove usage
+       of ENTRY_BLOCK_PTR macro.
+       * tree-ssa-math-opts.c (register_division_in): Likewise.
+       * tree-ssa-phiprop.c (tree_ssa_phiprop): Likewise.
+       * tree-ssa-pre.c (compute_avail): Likewise.
+       (compute_antic): Remove usage of EXIT_BLOCK_PTR macro.
+       (insert): Remove usage of ENTRY_BLOCK_PTR macro.
+       * tree-ssa-propagate.c (ssa_prop_init): Likewise.
+       (simulate_block): Remove usage of EXIT_BLOCK_PTR macro.
+       (cfg_blocks_add): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       (add_control_edge): Remove usage of EXIT_BLOCK_PTR macro.
+       * tree-ssa-reassoc.c (do_reassoc): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (build_and_add_sum): Remove usage of ENTRY_BLOCK_PTR macro.
+       * tree-ssa-sink.c (nearest_common_dominator_of_uses): Likewise.
+       (execute_sink_code): Remove usage of EXIT_BLOCK_PTR macro.
+       * tree-ssa-uninit.c (find_dom): Remove usage of ENTRY_BLOCK_PTR macro.
+       (compute_control_dep_chain): Remove usage of EXIT_BLOCK_PTR macro.
+       (find_pdom): Likewise.
+       (warn_uninitialized_vars): Remove usage of ENTRY_BLOCK_PTR macro.
+       * tree-stdarg.c (reachable_at_most_once): Likewise.
+       * tree-tailcall.c (tree_optimize_tail_calls_1): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (eliminate_tail_call): Likewise.
+       * tsan.c (instrument_func_entry): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       (instrument_func_exit): Remove usage of EXIT_BLOCK_PTR macro.
+       * var-tracking.c (vt_initialize): Remove uses of macros:
+       ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
+       (vt_add_function_parameter): Remove usage of ENTRY_BLOCK_PTR macro.
+       (vt_find_locations): Remove usage of EXIT_BLOCK_PTR macro.
+       (vt_stack_adjustments): Remove uses of macros: ENTRY_BLOCK_PTR,
+       EXIT_BLOCK_PTR.
+       * varasm.c (assemble_start_function): Remove usage of ENTRY_BLOCK_PTR
+       macro.
+       * config/bfin/bfin.c (hwloop_optimize): Likewise.
+       * config/nds32/nds32.c (nds32_fp_as_gp_check_available): Remove usage
+       of EXIT_BLOCK_PTR macro.
+       * config/arm/arm.c (require_pic_register): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       (arm_r3_live_at_start_p): Likewise.
+       (any_sibcall_could_use_r3): Remove usage of EXIT_BLOCK_PTR macro.
+       * config/rs6000/rs6000.c (rs6000_emit_prologue): Likewise.
+       * config/frv/frv.c (frv_optimize_membar_global): Likewise.
+       * config/alpha/alpha.c (alpha_gp_save_rtx): Remove usage of
+       ENTRY_BLOCK_PTR macro.
+       * config/i386/i386.c (ix86_count_insn): Likewise.
+       (ix86_seh_fixup_eh_fallthru): Remove usage of EXIT_BLOCK_PTR macro.
+       (ix86_pad_short_function): Likewise.
+       (ix86_compute_frame_layout): Remove usage of ENTRY_BLOCK_PTR macro.
+       (ix86_pad_returns): Remove usage of EXIT_BLOCK_PTR macro.
+       (ix86_eax_live_at_start_p): Remove usage of ENTRY_BLOCK_PTR macro.
+       (add_condition_to_bb): Remove usage of EXIT_BLOCK_PTR macro.
+       (ix86_expand_epilogue): Likewise.
+       * config/ia64/ia64.c (ia64_asm_unwind_emit): Likewise.
+       (ia64_expand_prologue): Likewise.
+
 2013-11-19  Catherine Moore  <clm@codesourcery.com>
 
        * doc/invoke.texi (mfix-rm7000, mno-fix-rm7000): Document.
index 38391beffb388ddda49a46dec2a3923f4a72d5e7..58bacc33f879a48e14244e2c2fc0160a848ea9d7 100644 (file)
@@ -312,8 +312,8 @@ struct GTY(()) control_flow_graph {
 };
 
 /* Defines for accessing the fields of the CFG structure for function FN.  */
-#define ENTRY_BLOCK_PTR_FOR_FUNCTION(FN)     ((FN)->cfg->x_entry_block_ptr)
-#define EXIT_BLOCK_PTR_FOR_FUNCTION(FN)             ((FN)->cfg->x_exit_block_ptr)
+#define ENTRY_BLOCK_PTR_FOR_FN(FN)          ((FN)->cfg->x_entry_block_ptr)
+#define EXIT_BLOCK_PTR_FOR_FN(FN)           ((FN)->cfg->x_exit_block_ptr)
 #define basic_block_info_for_function(FN)    ((FN)->cfg->x_basic_block_info)
 #define n_basic_blocks_for_fn(FN)           ((FN)->cfg->x_n_basic_blocks)
 #define n_edges_for_fn(FN)                  ((FN)->cfg->x_n_edges)
@@ -327,8 +327,6 @@ struct GTY(()) control_flow_graph {
   ((*basic_block_info_for_function (FN))[(N)] = (BB))
 
 /* Defines for textual backward source compatibility.  */
-#define ENTRY_BLOCK_PTR                (cfun->cfg->x_entry_block_ptr)
-#define EXIT_BLOCK_PTR         (cfun->cfg->x_exit_block_ptr)
 #define basic_block_info       (cfun->cfg->x_basic_block_info)
 #define last_basic_block       (cfun->cfg->x_last_basic_block)
 #define label_to_block_map     (cfun->cfg->x_label_to_block_map)
@@ -378,10 +376,10 @@ struct GTY(()) control_flow_graph {
    exit block).  */
 
 #define FOR_ALL_BB(BB) \
-  for (BB = ENTRY_BLOCK_PTR; BB; BB = BB->next_bb)
+  for (BB = ENTRY_BLOCK_PTR_FOR_FN (cfun); BB; BB = BB->next_bb)
 
 #define FOR_ALL_BB_FN(BB, FN) \
-  for (BB = ENTRY_BLOCK_PTR_FOR_FUNCTION (FN); BB; BB = BB->next_bb)
+  for (BB = ENTRY_BLOCK_PTR_FOR_FN (FN); BB; BB = BB->next_bb)
 
 \f
 /* Stuff for recording basic block info.  */
index 45bf1289ec0772c9b8455a3e5487701cb2e88ca5..fc7b5b758eaf60004bd1a666a02bf4220a2114f3 100644 (file)
@@ -275,7 +275,7 @@ find_traces (int *n_traces, struct trace *traces)
   heap = fibheap_new ();
   max_entry_frequency = 0;
   max_entry_count = 0;
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
     {
       bbd[e->dest->index].heap = heap;
       bbd[e->dest->index].node = fibheap_insert (heap, bb_to_key (e->dest),
@@ -348,7 +348,7 @@ rotate_loop (edge back_edge, struct trace *trace, int trace_n)
       edge_iterator ei;
 
       FOR_EACH_EDGE (e, ei, bb->succs)
-       if (e->dest != EXIT_BLOCK_PTR
+       if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
            && bb_visited_trace (e->dest) != trace_n
            && (e->flags & EDGE_CAN_FALLTHRU)
            && !(e->flags & EDGE_COMPLEX))
@@ -524,7 +524,7 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
            {
              gcc_assert (!(e->flags & EDGE_FAKE));
 
-             if (e->dest == EXIT_BLOCK_PTR)
+             if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
                continue;
 
              if (bb_visited_trace (e->dest)
@@ -605,7 +605,7 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
          FOR_EACH_EDGE (e, ei, bb->succs)
            {
              if (e == best_edge
-                 || e->dest == EXIT_BLOCK_PTR
+                 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
                  || bb_visited_trace (e->dest))
                continue;
 
@@ -680,7 +680,8 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
                             header is not the first block of the function
                             we can rotate the loop.  */
 
-                         if (best_edge->dest != ENTRY_BLOCK_PTR->next_bb)
+                         if (best_edge->dest
+                             != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
                            {
                              if (dump_file)
                                {
@@ -776,7 +777,7 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
         is an end of the trace).  */
       FOR_EACH_EDGE (e, ei, bb->succs)
        {
-         if (e->dest == EXIT_BLOCK_PTR
+         if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
              || bb_visited_trace (e->dest))
            continue;
 
@@ -885,7 +886,8 @@ bb_to_key (basic_block bb)
      or whose predecessor edge is EDGE_DFS_BACK.  */
   FOR_EACH_EDGE (e, ei, bb->preds)
     {
-      if ((e->src != ENTRY_BLOCK_PTR && bbd[e->src->index].end_of_trace >= 0)
+      if ((e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+          && bbd[e->src->index].end_of_trace >= 0)
          || (e->flags & EDGE_DFS_BACK))
        {
          int edge_freq = EDGE_FREQUENCY (e);
@@ -1098,7 +1100,7 @@ connect_traces (int n_traces, struct trace *traces)
            {
              int si = e->src->index;
 
-             if (e->src != ENTRY_BLOCK_PTR
+             if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
                  && (e->flags & EDGE_CAN_FALLTHRU)
                  && !(e->flags & EDGE_COMPLEX)
                  && bbd[si].end_of_trace >= 0
@@ -1141,7 +1143,7 @@ connect_traces (int n_traces, struct trace *traces)
            {
              int di = e->dest->index;
 
-             if (e->dest != EXIT_BLOCK_PTR
+             if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
                  && (e->flags & EDGE_CAN_FALLTHRU)
                  && !(e->flags & EDGE_COMPLEX)
                  && bbd[di].start_of_trace >= 0
@@ -1212,7 +1214,7 @@ connect_traces (int n_traces, struct trace *traces)
              bool try_copy = false;
 
              FOR_EACH_EDGE (e, ei, traces[t].last->succs)
-               if (e->dest != EXIT_BLOCK_PTR
+               if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
                    && (e->flags & EDGE_CAN_FALLTHRU)
                    && !(e->flags & EDGE_COMPLEX)
                    && (!best || e->probability > best->probability))
@@ -1237,7 +1239,7 @@ connect_traces (int n_traces, struct trace *traces)
                      {
                        int di = e2->dest->index;
 
-                       if (e2->dest == EXIT_BLOCK_PTR
+                       if (e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
                            || ((e2->flags & EDGE_CAN_FALLTHRU)
                                && !(e2->flags & EDGE_COMPLEX)
                                && bbd[di].start_of_trace >= 0
@@ -1253,7 +1255,7 @@ connect_traces (int n_traces, struct trace *traces)
                          {
                            best = e;
                            best2 = e2;
-                           if (e2->dest != EXIT_BLOCK_PTR)
+                           if (e2->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
                              best2_len = traces[bbd[di].start_of_trace].length;
                            else
                              best2_len = INT_MAX;
@@ -1282,7 +1284,7 @@ connect_traces (int n_traces, struct trace *traces)
                               traces[t].last->index, best->dest->index);
                      if (!next_bb)
                        fputc ('\n', dump_file);
-                     else if (next_bb == EXIT_BLOCK_PTR)
+                     else if (next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
                        fprintf (dump_file, "exit\n");
                      else
                        fprintf (dump_file, "%d\n", next_bb->index);
@@ -1290,7 +1292,7 @@ connect_traces (int n_traces, struct trace *traces)
 
                  new_bb = copy_bb (best->dest, best, traces[t].last, t);
                  traces[t].last = new_bb;
-                 if (next_bb && next_bb != EXIT_BLOCK_PTR)
+                 if (next_bb && next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
                    {
                      t = bbd[next_bb->index].start_of_trace;
                      traces[last_trace].last->aux = traces[t].first;
@@ -1413,7 +1415,7 @@ fix_up_crossing_landing_pad (eh_landing_pad old_lp, basic_block old_bb)
   JUMP_LABEL (jump) = post_label;
 
   /* Create new basic block to be dest for lp.  */
-  last_bb = EXIT_BLOCK_PTR->prev_bb;
+  last_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
   new_bb = create_basic_block (new_label, jump, last_bb);
   new_bb->aux = last_bb->aux;
   last_bb->aux = new_bb;
@@ -1663,8 +1665,8 @@ find_rarely_executed_basic_blocks_and_crossing_edges (void)
         /* We should never have EDGE_CROSSING set yet.  */
        gcc_checking_assert ((flags & EDGE_CROSSING) == 0);
 
-       if (e->src != ENTRY_BLOCK_PTR
-           && e->dest != EXIT_BLOCK_PTR
+       if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+           && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
            && BB_PARTITION (e->src) != BB_PARTITION (e->dest))
          {
            crossing_edges.safe_push (e);
@@ -1731,14 +1733,14 @@ add_labels_and_missing_jumps (vec<edge> crossing_edges)
       basic_block dest = e->dest;
       rtx label, new_jump;
 
-      if (dest == EXIT_BLOCK_PTR)
+      if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
        continue;
 
       /* Make sure dest has a label.  */
       label = block_label (dest);
 
       /* Nothing to do for non-fallthru edges.  */
-      if (src == ENTRY_BLOCK_PTR)
+      if (src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        continue;
       if ((e->flags & EDGE_FALLTHRU) == 0)
        continue;
@@ -1832,7 +1834,7 @@ fix_up_fall_thru_edges (void)
              }
        }
 
-      if (fall_thru && (fall_thru->dest != EXIT_BLOCK_PTR))
+      if (fall_thru && (fall_thru->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
        {
          /* Check to see if the fall-thru edge is a crossing edge.  */
 
@@ -2066,7 +2068,7 @@ fix_crossing_conditional_branches (void)
                  new_jump = emit_jump_insn (gen_jump (old_label));
                  JUMP_LABEL (new_jump) = old_label;
 
-                 last_bb = EXIT_BLOCK_PTR->prev_bb;
+                 last_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
                  new_bb = create_basic_block (new_label, new_jump, last_bb);
                  new_bb->aux = last_bb->aux;
                  last_bb->aux = new_bb;
@@ -2319,7 +2321,7 @@ rest_of_handle_reorder_blocks (void)
   cleanup_cfg (CLEANUP_EXPENSIVE);
 
   FOR_EACH_BB (bb)
-    if (bb->next_bb != EXIT_BLOCK_PTR)
+    if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
       bb->aux = bb->next_bb;
   cfg_layout_finalize ();
 
@@ -2415,7 +2417,7 @@ duplicate_computed_gotos (void)
       int size, all_flags;
 
       /* Build the reorder chain for the original order of blocks.  */
-      if (bb->next_bb != EXIT_BLOCK_PTR)
+      if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
        bb->aux = bb->next_bb;
 
       /* Obviously the block has to end in a computed jump.  */
@@ -2465,7 +2467,7 @@ duplicate_computed_gotos (void)
         the exit block or the next block.
         The destination must have more than one predecessor.  */
       if (!single_succ_p (bb)
-         || single_succ (bb) == EXIT_BLOCK_PTR
+         || single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
          || single_succ (bb) == bb->next_bb
          || single_pred_p (single_succ (bb)))
        continue;
index 348e40bdb46dc3cd1d399b21c3163f915c0a2d5a..09eea06e379868e877754f5a9ceefaf79e9e16ac 100644 (file)
@@ -1328,7 +1328,8 @@ migrate_btr_def (btr_def def, int min_cost)
   def_basic_block_freq = basic_block_freq (def->bb);
 
   for (attempt = get_immediate_dominator (CDI_DOMINATORS, def->bb);
-       !give_up && attempt && attempt != ENTRY_BLOCK_PTR && def->cost >= min_cost;
+       !give_up && attempt && attempt != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+       && def->cost >= min_cost;
        attempt = get_immediate_dominator (CDI_DOMINATORS, attempt))
     {
       /* Try to move the instruction that sets the target register into
index 166ad38c4969208d8a221c1bfd0a28a7c9570def..e35eee9a9bcd803b090822c89392c164c7915ec8 100644 (file)
--- a/gcc/cfg.c
+++ b/gcc/cfg.c
@@ -70,16 +70,16 @@ init_flow (struct function *the_fun)
   if (!the_fun->cfg)
     the_fun->cfg = ggc_alloc_cleared_control_flow_graph ();
   n_edges_for_fn (the_fun) = 0;
-  ENTRY_BLOCK_PTR_FOR_FUNCTION (the_fun)
+  ENTRY_BLOCK_PTR_FOR_FN (the_fun)
     = ggc_alloc_cleared_basic_block_def ();
-  ENTRY_BLOCK_PTR_FOR_FUNCTION (the_fun)->index = ENTRY_BLOCK;
-  EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun)
+  ENTRY_BLOCK_PTR_FOR_FN (the_fun)->index = ENTRY_BLOCK;
+  EXIT_BLOCK_PTR_FOR_FN (the_fun)
     = ggc_alloc_cleared_basic_block_def ();
-  EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun)->index = EXIT_BLOCK;
-  ENTRY_BLOCK_PTR_FOR_FUNCTION (the_fun)->next_bb
-    = EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun);
-  EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun)->prev_bb
-    = ENTRY_BLOCK_PTR_FOR_FUNCTION (the_fun);
+  EXIT_BLOCK_PTR_FOR_FN (the_fun)->index = EXIT_BLOCK;
+  ENTRY_BLOCK_PTR_FOR_FN (the_fun)->next_bb
+    = EXIT_BLOCK_PTR_FOR_FN (the_fun);
+  EXIT_BLOCK_PTR_FOR_FN (the_fun)->prev_bb
+    = ENTRY_BLOCK_PTR_FOR_FN (the_fun);
 }
 \f
 /* Helper function for remove_edge and clear_edges.  Frees edge structure
@@ -109,10 +109,10 @@ clear_edges (void)
       vec_safe_truncate (bb->preds, 0);
     }
 
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
     free_edge (e);
-  vec_safe_truncate (EXIT_BLOCK_PTR->preds, 0);
-  vec_safe_truncate (ENTRY_BLOCK_PTR->succs, 0);
+  vec_safe_truncate (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds, 0);
+  vec_safe_truncate (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs, 0);
 
   gcc_assert (!n_edges_for_fn (cfun));
 }
@@ -153,8 +153,8 @@ compact_blocks (void)
 {
   int i;
 
-  SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
-  SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
+  SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (cfun));
+  SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   if (df)
     df_compact_blocks ();
@@ -282,8 +282,8 @@ edge
 cached_make_edge (sbitmap edge_cache, basic_block src, basic_block dst, int flags)
 {
   if (edge_cache == NULL
-      || src == ENTRY_BLOCK_PTR
-      || dst == EXIT_BLOCK_PTR)
+      || src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
+      || dst == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return make_edge (src, dst, flags);
 
   /* Does the requested edge already exist?  */
@@ -387,7 +387,7 @@ clear_bb_flags (void)
 {
   basic_block bb;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     bb->flags &= BB_FLAGS_TO_PRESERVE;
 }
 \f
@@ -411,7 +411,7 @@ check_bb_profile (basic_block bb, FILE * file, int indent, int flags)
   if (profile_status_for_function (fun) == PROFILE_ABSENT)
     return;
 
-  if (bb != EXIT_BLOCK_PTR_FOR_FUNCTION (fun))
+  if (bb != EXIT_BLOCK_PTR_FOR_FN (fun))
     {
       FOR_EACH_EDGE (e, ei, bb->succs)
        sum += e->probability;
@@ -428,7 +428,7 @@ check_bb_profile (basic_block bb, FILE * file, int indent, int flags)
                 (flags & TDF_COMMENT) ? ";; " : "", s_indent,
                 (int) lsum, (int) bb->count);
     }
-    if (bb != ENTRY_BLOCK_PTR_FOR_FUNCTION (fun))
+    if (bb != ENTRY_BLOCK_PTR_FOR_FN (fun))
     {
       sum = 0;
       FOR_EACH_EDGE (e, ei, bb->preds)
@@ -641,7 +641,8 @@ alloc_aux_for_edges (int size)
     {
       basic_block bb;
 
-      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                     EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
        {
          edge e;
          edge_iterator ei;
@@ -660,7 +661,8 @@ clear_aux_for_edges (void)
   basic_block bb;
   edge e;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     {
       edge_iterator ei;
       FOR_EACH_EDGE (e, ei, bb->succs)
index 1c90f8c5b7e7023adc3befb9c00a3eac71121caa..30376b3db4c0c555e0b4ab3163110853a8197c06 100644 (file)
@@ -86,7 +86,7 @@ mark_dfs_back_edges (void)
   bitmap_clear (visited);
 
   /* Push the first edge on to the stack.  */
-  stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
+  stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
 
   while (sp)
     {
@@ -101,7 +101,8 @@ mark_dfs_back_edges (void)
       ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
 
       /* Check if the edge destination has been visited yet.  */
-      if (dest != EXIT_BLOCK_PTR && ! bitmap_bit_p (visited, dest->index))
+      if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && ! bitmap_bit_p (visited,
+                                                                 dest->index))
        {
          /* Mark that we have visited the destination.  */
          bitmap_set_bit (visited, dest->index);
@@ -118,12 +119,14 @@ mark_dfs_back_edges (void)
        }
       else
        {
-         if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
+         if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
+             && src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
              && pre[src->index] >= pre[dest->index]
              && post[dest->index] == 0)
            ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
 
-         if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
+         if (ei_one_before_end_p (ei)
+             && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
            post[src->index] = postnum++;
 
          if (!ei_one_before_end_p (ei))
@@ -163,7 +166,7 @@ find_unreachable_blocks (void)
      be only one.  It isn't inconceivable that we might one day directly
      support Fortran alternate entry points.  */
 
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
     {
       *tos++ = e->dest;
 
@@ -217,7 +220,8 @@ create_edge_list (void)
   /* Determine the number of edges in the flow graph by counting successor
      edges on each basic block.  */
   num_edges = 0;
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     {
       num_edges += EDGE_COUNT (bb->succs);
     }
@@ -229,7 +233,8 @@ create_edge_list (void)
   num_edges = 0;
 
   /* Follow successors of blocks, and register these edges.  */
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     FOR_EACH_EDGE (e, ei, bb->succs)
       elist->index_to_edge[num_edges++] = e;
 
@@ -261,12 +266,12 @@ print_edge_list (FILE *f, struct edge_list *elist)
   for (x = 0; x < elist->num_edges; x++)
     {
       fprintf (f, " %-4d - edge(", x);
-      if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
+      if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        fprintf (f, "entry,");
       else
        fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
 
-      if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
+      if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
        fprintf (f, "exit)\n");
       else
        fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
@@ -285,7 +290,8 @@ verify_edge_list (FILE *f, struct edge_list *elist)
   basic_block bb, p, s;
   edge_iterator ei;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     {
       FOR_EACH_EDGE (e, ei, bb->succs)
        {
@@ -310,8 +316,9 @@ verify_edge_list (FILE *f, struct edge_list *elist)
   /* We've verified that all the edges are in the list, now lets make sure
      there are no spurious edges in the list.  This is an expensive check!  */
 
-  FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
-    FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
+  FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
+    FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
       {
        int found_edge = 0;
 
@@ -348,9 +355,9 @@ void
 control_dependences::set_control_dependence_map_bit (basic_block bb,
                                                     int edge_index)
 {
-  if (bb == ENTRY_BLOCK_PTR)
+  if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
     return;
-  gcc_assert (bb != EXIT_BLOCK_PTR);
+  gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
   bitmap_set_bit (control_dependence_map[bb->index], edge_index);
 }
 
@@ -367,15 +374,15 @@ control_dependences::clear_control_dependence_bitmap (basic_block bb)
 static inline basic_block
 find_pdom (basic_block block)
 {
-  gcc_assert (block != ENTRY_BLOCK_PTR);
+  gcc_assert (block != ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
-  if (block == EXIT_BLOCK_PTR)
-    return EXIT_BLOCK_PTR;
+  if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
+    return EXIT_BLOCK_PTR_FOR_FN (cfun);
   else
     {
       basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
       if (! bb)
-       return EXIT_BLOCK_PTR;
+       return EXIT_BLOCK_PTR_FOR_FN (cfun);
       return bb;
     }
 }
@@ -389,15 +396,17 @@ control_dependences::find_control_dependence (int edge_index)
   basic_block current_block;
   basic_block ending_block;
 
-  gcc_assert (INDEX_EDGE_PRED_BB (m_el, edge_index) != EXIT_BLOCK_PTR);
+  gcc_assert (INDEX_EDGE_PRED_BB (m_el, edge_index)
+             != EXIT_BLOCK_PTR_FOR_FN (cfun));
 
-  if (INDEX_EDGE_PRED_BB (m_el, edge_index) == ENTRY_BLOCK_PTR)
-    ending_block = single_succ (ENTRY_BLOCK_PTR);
+  if (INDEX_EDGE_PRED_BB (m_el, edge_index) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
+    ending_block = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
   else
     ending_block = find_pdom (INDEX_EDGE_PRED_BB (m_el, edge_index));
 
   for (current_block = INDEX_EDGE_SUCC_BB (m_el, edge_index);
-       current_block != ending_block && current_block != EXIT_BLOCK_PTR;
+       current_block != ending_block
+       && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
        current_block = find_pdom (current_block))
     {
       edge e = INDEX_EDGE (m_el, edge_index);
@@ -523,7 +532,7 @@ remove_fake_edges (void)
 {
   basic_block bb;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
     remove_fake_predecessors (bb);
 }
 
@@ -532,7 +541,7 @@ remove_fake_edges (void)
 void
 remove_fake_exit_edges (void)
 {
-  remove_fake_predecessors (EXIT_BLOCK_PTR);
+  remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
 }
 
 
@@ -547,7 +556,7 @@ add_noreturn_fake_exit_edges (void)
 
   FOR_EACH_BB (bb)
     if (EDGE_COUNT (bb->succs) == 0)
-      make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
+      make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
 }
 
 /* This function adds a fake edge between any infinite loops to the
@@ -564,14 +573,14 @@ add_noreturn_fake_exit_edges (void)
 void
 connect_infinite_loops_to_exit (void)
 {
-  basic_block unvisited_block = EXIT_BLOCK_PTR;
+  basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
   basic_block deadend_block;
   struct depth_first_search_dsS dfs_ds;
 
   /* Perform depth-first search in the reverse graph to find nodes
      reachable from the exit block.  */
   flow_dfs_compute_reverse_init (&dfs_ds);
-  flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
+  flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   /* Repeatedly add fake edges, updating the unreachable nodes.  */
   while (1)
@@ -582,7 +591,7 @@ connect_infinite_loops_to_exit (void)
        break;
 
       deadend_block = dfs_find_deadend (unvisited_block);
-      make_edge (deadend_block, EXIT_BLOCK_PTR, EDGE_FAKE);
+      make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
       flow_dfs_compute_reverse_add_bb (&dfs_ds, deadend_block);
     }
 
@@ -619,7 +628,7 @@ post_order_compute (int *post_order, bool include_entry_exit,
   bitmap_clear (visited);
 
   /* Push the first edge on to the stack.  */
-  stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
+  stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
 
   while (sp)
     {
@@ -633,7 +642,8 @@ post_order_compute (int *post_order, bool include_entry_exit,
       dest = ei_edge (ei)->dest;
 
       /* Check if the edge destination has been visited yet.  */
-      if (dest != EXIT_BLOCK_PTR && ! bitmap_bit_p (visited, dest->index))
+      if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
+         && ! bitmap_bit_p (visited, dest->index))
        {
          /* Mark that we have visited the destination.  */
          bitmap_set_bit (visited, dest->index);
@@ -647,7 +657,8 @@ post_order_compute (int *post_order, bool include_entry_exit,
        }
       else
        {
-         if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
+         if (ei_one_before_end_p (ei)
+             && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
            post_order[post_order_num++] = src->index;
 
          if (!ei_one_before_end_p (ei))
@@ -671,7 +682,8 @@ post_order_compute (int *post_order, bool include_entry_exit,
     {
       basic_block b;
       basic_block next_bb;
-      for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
+      for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
+          != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
        {
          next_bb = b->next_bb;
 
@@ -813,7 +825,8 @@ inverted_post_order_compute (int *post_order)
             }
           else
             {
-              if (bb != EXIT_BLOCK_PTR && ei_one_before_end_p (ei))
+             if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
+                 && ei_one_before_end_p (ei))
                 post_order[post_order_num++] = bb->index;
 
               if (!ei_one_before_end_p (ei))
@@ -826,7 +839,8 @@ inverted_post_order_compute (int *post_order)
       /* Detect any infinite loop and activate the kludge.
          Note that this doesn't check EXIT_BLOCK itself
          since EXIT_BLOCK is always added after the outer do-while loop.  */
-      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                     EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
         if (!bitmap_bit_p (visited, bb->index))
           {
             has_unvisited_bb = true;
@@ -859,7 +873,7 @@ inverted_post_order_compute (int *post_order)
         {
           /* No blocks are reachable from EXIT at all.
              Find a dead-end from the ENTRY, and restart the iteration. */
-          basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR);
+         basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
           gcc_assert (be != NULL);
           bitmap_set_bit (visited, be->index);
           stack[sp++] = ei_start (be->preds);
@@ -923,7 +937,7 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
   bitmap_clear (visited);
 
   /* Push the first edge on to the stack.  */
-  stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->succs);
+  stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs);
 
   while (sp)
     {
@@ -937,7 +951,7 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
       dest = ei_edge (ei)->dest;
 
       /* Check if the edge destination has been visited yet.  */
-      if (dest != EXIT_BLOCK_PTR_FOR_FUNCTION (fn)
+      if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
          && ! bitmap_bit_p (visited, dest->index))
        {
          /* Mark that we have visited the destination.  */
@@ -960,7 +974,7 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
       else
        {
          if (ei_one_before_end_p (ei)
-             && src != ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)
+             && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
              && rev_post_order)
            /* There are no more successors for the SRC node
               so assign its reverse completion number.  */
@@ -1230,7 +1244,7 @@ compute_dominance_frontiers_1 (bitmap_head *frontiers)
            {
              basic_block runner = p->src;
              basic_block domsb;
-             if (runner == ENTRY_BLOCK_PTR)
+             if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
                continue;
 
              domsb = get_immediate_dominator (CDI_DOMINATORS, b);
@@ -1337,7 +1351,7 @@ bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
   for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
     {
       e = EDGE_SUCC (b, ix);
-      if (e->dest == EXIT_BLOCK_PTR)
+      if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
        continue;
 
       bitmap_copy (dst, src[e->dest->index]);
@@ -1353,7 +1367,7 @@ bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
        SBITMAP_ELT_TYPE *p, *r;
 
        e = EDGE_SUCC (b, ix);
-       if (e->dest == EXIT_BLOCK_PTR)
+       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
          continue;
 
        p = src[e->dest->index]->elms;
@@ -1378,7 +1392,7 @@ bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
   for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
     {
       e = EDGE_PRED (b, ix);
-      if (e->src == ENTRY_BLOCK_PTR)
+      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        continue;
 
       bitmap_copy (dst, src[e->src->index]);
@@ -1394,7 +1408,7 @@ bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
        SBITMAP_ELT_TYPE *p, *r;
 
        e = EDGE_PRED (b, ix);
-       if (e->src == ENTRY_BLOCK_PTR)
+       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
          continue;
 
        p = src[e->src->index]->elms;
@@ -1419,7 +1433,7 @@ bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
   for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
     {
       e = EDGE_SUCC (b, ix);
-      if (e->dest == EXIT_BLOCK_PTR)
+      if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
        continue;
 
       bitmap_copy (dst, src[e->dest->index]);
@@ -1435,7 +1449,7 @@ bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
        SBITMAP_ELT_TYPE *p, *r;
 
        e = EDGE_SUCC (b, ix);
-       if (e->dest == EXIT_BLOCK_PTR)
+       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
          continue;
 
        p = src[e->dest->index]->elms;
@@ -1460,7 +1474,7 @@ bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
   for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
     {
       e = EDGE_PRED (b, ix);
-      if (e->src== ENTRY_BLOCK_PTR)
+      if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
        continue;
 
       bitmap_copy (dst, src[e->src->index]);
@@ -1476,7 +1490,7 @@ bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
        SBITMAP_ELT_TYPE *p, *r;
 
        e = EDGE_PRED (b, ix);
-       if (e->src == ENTRY_BLOCK_PTR)
+       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
          continue;
 
        p = src[e->src->index]->elms;
@@ -1504,7 +1518,7 @@ single_pred_before_succ_order (void)
 
   bitmap_clear (visited);
 
-  MARK_VISITED (ENTRY_BLOCK_PTR);
+  MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
   FOR_EACH_BB (x)
     {
       if (VISITED_P (x))
index a9ed5f14b17325df17d5a07d8d484c0dc405c795..08534d4bdde18fe2e2eb80e24b17204c0d19e26e 100644 (file)
@@ -213,8 +213,8 @@ make_edges (basic_block min, basic_block max, int update_p)
 
   /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
      is always the entry.  */
-  if (min == ENTRY_BLOCK_PTR->next_bb)
-    make_edge (ENTRY_BLOCK_PTR, min, EDGE_FALLTHRU);
+  if (min == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
+    make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), min, EDGE_FALLTHRU);
 
   FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
     {
@@ -233,14 +233,14 @@ make_edges (basic_block min, basic_block max, int update_p)
          if (update_p)
            {
              FOR_EACH_EDGE (e, ei, bb->succs)
-               if (e->dest != EXIT_BLOCK_PTR)
+               if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
                  bitmap_set_bit (edge_cache, e->dest->index);
            }
        }
 
       if (LABEL_P (BB_HEAD (bb))
          && LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
-       cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
+       cached_make_edge (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
 
       /* Examine the last instruction of the block, and discover the
         ways we can leave the block.  */
@@ -294,7 +294,7 @@ make_edges (basic_block min, basic_block max, int update_p)
 
          /* Returns create an exit out.  */
          else if (returnjump_p (insn))
-           cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
+           cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
 
          /* Recognize asm goto and do the right thing.  */
          else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
@@ -318,7 +318,7 @@ make_edges (basic_block min, basic_block max, int update_p)
         worry about EH edges, since we wouldn't have created the sibling call
         in the first place.  */
       if (code == CALL_INSN && SIBLING_CALL_P (insn))
-       cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
+       cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
                          EDGE_SIBCALL | EDGE_ABNORMAL);
 
       /* If this is a CALL_INSN, then mark it as reaching the active EH
@@ -359,7 +359,7 @@ make_edges (basic_block min, basic_block max, int update_p)
 
       /* Find out if we can drop through to the next block.  */
       insn = NEXT_INSN (insn);
-      e = find_edge (bb, EXIT_BLOCK_PTR);
+      e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
       if (e && e->flags & EDGE_FALLTHRU)
        insn = NULL;
 
@@ -369,8 +369,9 @@ make_edges (basic_block min, basic_block max, int update_p)
        insn = NEXT_INSN (insn);
 
       if (!insn)
-       cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
-      else if (bb->next_bb != EXIT_BLOCK_PTR)
+       cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
+                         EDGE_FALLTHRU);
+      else if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
        {
          if (insn == BB_HEAD (bb->next_bb))
            cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
@@ -480,7 +481,7 @@ find_bb_boundaries (basic_block bb)
          remove_edge (fallthru);
          flow_transfer_insn = NULL_RTX;
          if (code == CODE_LABEL && LABEL_ALT_ENTRY_P (insn))
-           make_edge (ENTRY_BLOCK_PTR, bb, 0);
+           make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
        }
       else if (code == BARRIER)
        {
@@ -607,7 +608,7 @@ find_many_sub_basic_blocks (sbitmap blocks)
       break;
 
   min = max = bb;
-  for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
+  for (; bb != EXIT_BLOCK_PTR_FOR_FN (cfun); bb = bb->next_bb)
     if (STATE (bb) != BLOCK_ORIGINAL)
       max = bb;
 
index a2192cbaf75fc0e10ef5e89db0b607d4e972c128..9c126102a543bc075cd9f85c7c4dd70909a97141 100644 (file)
@@ -134,7 +134,7 @@ try_simplify_condjump (basic_block cbranch_block)
      unconditional jump.  */
   jump_block = cbranch_fallthru_edge->dest;
   if (!single_pred_p (jump_block)
-      || jump_block->next_bb == EXIT_BLOCK_PTR
+      || jump_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
       || !FORWARDER_BLOCK_P (jump_block))
     return false;
   jump_dest_block = single_succ (jump_block);
@@ -157,7 +157,7 @@ try_simplify_condjump (basic_block cbranch_block)
      unconditional branch.  */
   cbranch_dest_block = cbranch_jump_edge->dest;
 
-  if (cbranch_dest_block == EXIT_BLOCK_PTR
+  if (cbranch_dest_block == EXIT_BLOCK_PTR_FOR_FN (cfun)
       || !can_fallthru (jump_block, cbranch_dest_block))
     return false;
 
@@ -455,7 +455,7 @@ try_forward_edges (int mode, basic_block b)
         bb-reorder.c:partition_hot_cold_basic_blocks for complete
         details.  */
 
-      if (first != EXIT_BLOCK_PTR
+      if (first != EXIT_BLOCK_PTR_FOR_FN (cfun)
          && find_reg_note (BB_END (first), REG_CROSSING_JUMP, NULL_RTX))
        return changed;
 
@@ -467,7 +467,7 @@ try_forward_edges (int mode, basic_block b)
 
          if (FORWARDER_BLOCK_P (target)
              && !(single_succ_edge (target)->flags & EDGE_CROSSING)
-             && single_succ (target) != EXIT_BLOCK_PTR)
+             && single_succ (target) != EXIT_BLOCK_PTR_FOR_FN (cfun))
            {
              /* Bypass trivial infinite loops.  */
              new_target = single_succ (target);
@@ -580,7 +580,7 @@ try_forward_edges (int mode, basic_block b)
          e->goto_locus = goto_locus;
 
          /* Don't force if target is exit block.  */
-         if (threaded && target != EXIT_BLOCK_PTR)
+         if (threaded && target != EXIT_BLOCK_PTR_FOR_FN (cfun))
            {
              notice_new_block (redirect_edge_and_branch_force (e, target));
              if (dump_file)
@@ -793,7 +793,7 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
        fprintf (dump_file, "Merged %d and %d without moving.\n",
                 b_index, c_index);
 
-      return b->prev_bb == ENTRY_BLOCK_PTR ? b : b->prev_bb;
+      return b->prev_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? b : b->prev_bb;
     }
 
   /* Otherwise we will need to move code around.  Do that only if expensive
@@ -831,7 +831,7 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
       if (! c_has_outgoing_fallthru)
        {
          merge_blocks_move_successor_nojumps (b, c);
-         return next == ENTRY_BLOCK_PTR ? next->next_bb : next;
+         return next == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? next->next_bb : next;
        }
 
       /* If B does not have an incoming fallthru, then it can be moved
@@ -843,7 +843,7 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
        {
          basic_block bb;
 
-         if (b_fallthru_edge->src == ENTRY_BLOCK_PTR)
+         if (b_fallthru_edge->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
            return NULL;
          bb = force_nonfallthru (b_fallthru_edge);
          if (bb)
@@ -851,7 +851,7 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
        }
 
       merge_blocks_move_predecessor_nojumps (b, c);
-      return next == ENTRY_BLOCK_PTR ? next->next_bb : next;
+      return next == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? next->next_bb : next;
     }
 
   return NULL;
@@ -1267,7 +1267,7 @@ walk_to_nondebug_insn (rtx *i1, basic_block *bb1, bool follow_fallthru,
         return;
 
       fallthru = find_fallthru_edge ((*bb1)->preds);
-      if (!fallthru || fallthru->src == ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun)
+      if (!fallthru || fallthru->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
           || !single_succ_p (fallthru->src))
         return;
 
@@ -1540,7 +1540,8 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
      whether they went through the prologue.  Sibcalls are fine, we know
      that we either didn't need or inserted an epilogue before them.  */
   if (crtl->shrink_wrapped
-      && single_succ_p (bb1) && single_succ (bb1) == EXIT_BLOCK_PTR
+      && single_succ_p (bb1)
+      && single_succ (bb1) == EXIT_BLOCK_PTR_FOR_FN (cfun)
       && !JUMP_P (BB_END (bb1))
       && !(CALL_P (BB_END (bb1)) && SIBLING_CALL_P (BB_END (bb1))))
     return false;
@@ -1902,7 +1903,8 @@ try_crossjump_to_edge (int mode, edge e1, edge e2,
     e2 = single_pred_edge (src2), src2 = e2->src;
 
   /* Nothing to do if we reach ENTRY, or a common source block.  */
-  if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
+  if (src1 == ENTRY_BLOCK_PTR_FOR_FN (cfun) || src2
+      == ENTRY_BLOCK_PTR_FOR_FN (cfun))
     return false;
   if (src1 == src2)
     return false;
@@ -2146,7 +2148,7 @@ try_crossjump_bb (int mode, basic_block bb)
   /* Don't crossjump if this block ends in a computed jump,
      unless we are optimizing for size.  */
   if (optimize_bb_for_size_p (bb)
-      && bb != EXIT_BLOCK_PTR
+      && bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
       && computed_jump_p (BB_END (bb)))
     return false;
 
@@ -2287,7 +2289,7 @@ try_head_merge_bb (basic_block bb)
   /* Don't crossjump if this block ends in a computed jump,
      unless we are optimizing for size.  */
   if (optimize_bb_for_size_p (bb)
-      && bb != EXIT_BLOCK_PTR
+      && bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
       && computed_jump_p (BB_END (bb)))
     return false;
 
@@ -2303,7 +2305,7 @@ try_head_merge_bb (basic_block bb)
     }
 
   for (ix = 0; ix < nedges; ix++)
-    if (EDGE_SUCC (bb, ix)->dest == EXIT_BLOCK_PTR)
+    if (EDGE_SUCC (bb, ix)->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
       return false;
 
   for (ix = 0; ix < nedges; ix++)
@@ -2623,7 +2625,8 @@ try_optimize_cfg (int mode)
                     "\n\ntry_optimize_cfg iteration %i\n\n",
                     iterations);
 
-         for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR;)
+         for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
+              != EXIT_BLOCK_PTR_FOR_FN (cfun);)
            {
              basic_block c;
              edge s;
@@ -2640,7 +2643,8 @@ try_optimize_cfg (int mode)
              if (EDGE_COUNT (b->preds) == 0
                  || (EDGE_COUNT (b->succs) == 0
                      && trivially_empty_bb_p (b)
-                     && single_succ_edge (ENTRY_BLOCK_PTR)->dest != b))
+                     && single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest
+                     != b))
                {
                  c = b->prev_bb;
                  if (EDGE_COUNT (b->preds) > 0)
@@ -2681,7 +2685,7 @@ try_optimize_cfg (int mode)
                  delete_basic_block (b);
                  changed = true;
                  /* Avoid trying to remove ENTRY_BLOCK_PTR.  */
-                 b = (c == ENTRY_BLOCK_PTR ? c->next_bb : c);
+                 b = (c == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? c->next_bb : c);
                  continue;
                }
 
@@ -2696,7 +2700,7 @@ try_optimize_cfg (int mode)
                     if CASE_DROPS_THRU, this can be a tablejump with
                     some element going to the same place as the
                     default (fallthru).  */
-                 && (single_pred (b) == ENTRY_BLOCK_PTR
+                 && (single_pred (b) == ENTRY_BLOCK_PTR_FOR_FN (cfun)
                      || !JUMP_P (BB_END (single_pred (b)))
                      || ! label_is_jump_target_p (BB_HEAD (b),
                                                   BB_END (single_pred (b)))))
@@ -2723,7 +2727,8 @@ try_optimize_cfg (int mode)
                             "Deleting fallthru block %i.\n",
                             b->index);
 
-                 c = b->prev_bb == ENTRY_BLOCK_PTR ? b->next_bb : b->prev_bb;
+                 c = ((b->prev_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
+                      ? b->next_bb : b->prev_bb);
                  redirect_edge_succ_nodup (single_pred_edge (b),
                                            single_succ (b));
                  delete_basic_block (b);
@@ -2736,7 +2741,7 @@ try_optimize_cfg (int mode)
              if (single_succ_p (b)
                  && (s = single_succ_edge (b))
                  && !(s->flags & EDGE_COMPLEX)
-                 && (c = s->dest) != EXIT_BLOCK_PTR
+                 && (c = s->dest) != EXIT_BLOCK_PTR_FOR_FN (cfun)
                  && single_pred_p (c)
                  && b != c)
                {
@@ -2780,7 +2785,7 @@ try_optimize_cfg (int mode)
                 can either delete the jump entirely, or replace it
                 with a simple unconditional jump.  */
              if (single_succ_p (b)
-                 && single_succ (b) != EXIT_BLOCK_PTR
+                 && single_succ (b) != EXIT_BLOCK_PTR_FOR_FN (cfun)
                  && onlyjump_p (BB_END (b))
                  && !find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX)
                  && try_redirect_by_replacing_jump (single_succ_edge (b),
@@ -2819,7 +2824,7 @@ try_optimize_cfg (int mode)
            }
 
          if ((mode & CLEANUP_CROSSJUMP)
-             && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
+             && try_crossjump_bb (mode, EXIT_BLOCK_PTR_FOR_FN (cfun)))
            changed = true;
 
          if (block_was_dirty)
@@ -2876,7 +2881,8 @@ delete_unreachable_blocks (void)
   if (MAY_HAVE_DEBUG_INSNS && current_ir_type () == IR_GIMPLE
       && dom_info_available_p (CDI_DOMINATORS))
     {
-      for (b = EXIT_BLOCK_PTR->prev_bb; b != ENTRY_BLOCK_PTR; b = prev_bb)
+      for (b = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
+          b != ENTRY_BLOCK_PTR_FOR_FN (cfun); b = prev_bb)
        {
          prev_bb = b->prev_bb;
 
@@ -2912,7 +2918,8 @@ delete_unreachable_blocks (void)
     }
   else
     {
-      for (b = EXIT_BLOCK_PTR->prev_bb; b != ENTRY_BLOCK_PTR; b = prev_bb)
+      for (b = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
+          b != ENTRY_BLOCK_PTR_FOR_FN (cfun); b = prev_bb)
        {
          prev_bb = b->prev_bb;
 
index 4ff1a8954281b401c74ea0e37b8470ccc6533d9f..d431c8dea5eb5914652d0d2608a64af3d684ce93 100644 (file)
@@ -3363,7 +3363,7 @@ expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
     {
       if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
        {
-         if (e->dest != EXIT_BLOCK_PTR)
+         if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
            {
              e->dest->count -= e->count;
              e->dest->frequency -= EDGE_FREQUENCY (e);
@@ -3399,7 +3399,8 @@ expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
       delete_insn (NEXT_INSN (last));
     }
 
-  e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
+  e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
+                | EDGE_SIBCALL);
   e->probability += probability;
   e->count += count;
   BB_END (bb) = last;
@@ -4840,9 +4841,9 @@ expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
       gimple ret_stmt = gsi_stmt (gsi);
 
       gcc_assert (single_succ_p (bb));
-      gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
+      gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
 
-      if (bb->next_bb == EXIT_BLOCK_PTR
+      if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
          && !gimple_return_retval (ret_stmt))
        {
          gsi_remove (&gsi, false);
@@ -5184,17 +5185,17 @@ construct_init_block (void)
   int flags;
 
   /* Multiple entry points not supported yet.  */
-  gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
-  init_rtl_bb_info (ENTRY_BLOCK_PTR);
-  init_rtl_bb_info (EXIT_BLOCK_PTR);
-  ENTRY_BLOCK_PTR->flags |= BB_RTL;
-  EXIT_BLOCK_PTR->flags |= BB_RTL;
+  gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
+  init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
+  init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
+  ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
+  EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
 
-  e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
+  e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
 
   /* When entry edge points to first basic block, we don't need jump,
      otherwise we have to jump into proper target.  */
-  if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
+  if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
     {
       tree label = gimple_block_label (e->dest);
 
@@ -5206,11 +5207,11 @@ construct_init_block (void)
 
   init_block = create_basic_block (NEXT_INSN (get_insns ()),
                                   get_last_insn (),
-                                  ENTRY_BLOCK_PTR);
-  init_block->frequency = ENTRY_BLOCK_PTR->frequency;
-  init_block->count = ENTRY_BLOCK_PTR->count;
-  if (current_loops && ENTRY_BLOCK_PTR->loop_father)
-    add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
+                                  ENTRY_BLOCK_PTR_FOR_FN (cfun));
+  init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
+  init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
+  if (current_loops && ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father)
+    add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
   if (e)
     {
       first_block = e->dest;
@@ -5218,9 +5219,9 @@ construct_init_block (void)
       e = make_edge (init_block, first_block, flags);
     }
   else
-    e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
+    e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
   e->probability = REG_BR_PROB_BASE;
-  e->count = ENTRY_BLOCK_PTR->count;
+  e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
 
   update_bb_for_insn (init_block);
   return init_block;
@@ -5251,9 +5252,9 @@ construct_exit_block (void)
   edge e, e2;
   unsigned ix;
   edge_iterator ei;
-  rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
+  rtx orig_end = BB_END (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
 
-  rtl_profile_for_bb (EXIT_BLOCK_PTR);
+  rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   /* Make sure the locus is set to the end of the function, so that
      epilogue line numbers and warnings are set properly.  */
@@ -5268,30 +5269,30 @@ construct_exit_block (void)
     return;
   /* While emitting the function end we could move end of the last basic block.
    */
-  BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
+  BB_END (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb) = orig_end;
   while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
     head = NEXT_INSN (head);
   exit_block = create_basic_block (NEXT_INSN (head), end,
-                                  EXIT_BLOCK_PTR->prev_bb);
-  exit_block->frequency = EXIT_BLOCK_PTR->frequency;
-  exit_block->count = EXIT_BLOCK_PTR->count;
-  if (current_loops && EXIT_BLOCK_PTR->loop_father)
-    add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
+                                  EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
+  exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
+  exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
+  if (current_loops && EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father)
+    add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
 
   ix = 0;
-  while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
+  while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
     {
-      e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
+      e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
       if (!(e->flags & EDGE_ABNORMAL))
        redirect_edge_succ (e, exit_block);
       else
        ix++;
     }
 
-  e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
+  e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
   e->probability = REG_BR_PROB_BASE;
-  e->count = EXIT_BLOCK_PTR->count;
-  FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
+  e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
+  FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     if (e2 != e)
       {
        e->count -= e2->count;
@@ -5521,7 +5522,7 @@ gimple_expand_cfg (void)
   /* Dominators are not kept up-to-date as we may create new basic-blocks.  */
   free_dominance_info (CDI_DOMINATORS);
 
-  rtl_profile_for_bb (ENTRY_BLOCK_PTR);
+  rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
   insn_locations_init ();
   if (!DECL_IS_BUILTIN (current_function_decl))
@@ -5685,11 +5686,12 @@ gimple_expand_cfg (void)
 
   /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
      remaining edges later.  */
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
     e->flags &= ~EDGE_EXECUTABLE;
 
   lab_rtx_for_bb = pointer_map_create ();
-  FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
+                 next_bb)
     bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
 
   if (MAY_HAVE_DEBUG_INSNS)
@@ -5734,7 +5736,8 @@ gimple_expand_cfg (void)
      split edges which edge insertions might do.  */
   rebuild_jump_labels (get_insns ());
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     {
       edge e;
       edge_iterator ei;
@@ -5745,8 +5748,8 @@ gimple_expand_cfg (void)
              rebuild_jump_labels_chain (e->insns.r);
              /* Put insns after parm birth, but before
                 NOTE_INSNS_FUNCTION_BEG.  */
-             if (e->src == ENTRY_BLOCK_PTR
-                 && single_succ_p (ENTRY_BLOCK_PTR))
+             if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
+                 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
                {
                  rtx insns = e->insns.r;
                  e->insns.r = NULL_RTX;
@@ -5767,7 +5770,8 @@ gimple_expand_cfg (void)
   /* We're done expanding trees to RTL.  */
   currently_expanding_to_rtl = 0;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb,
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     {
       edge e;
       edge_iterator ei;
index 20b90bfebf3e993efdfbe1eb61cb90ee345ec85d..2535c9027bea3d3f5bd1d0e12dc409673a393cc8 100644 (file)
@@ -102,10 +102,10 @@ verify_flow_info (void)
   edge_checksum = XCNEWVEC (size_t, last_basic_block);
 
   /* Check bb chain & numbers.  */
-  last_bb_seen = ENTRY_BLOCK_PTR;
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
+  last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
     {
-      if (bb != EXIT_BLOCK_PTR
+      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
          && bb != BASIC_BLOCK (bb->index))
        {
          error ("bb %d on wrong place", bb->index);
@@ -234,21 +234,21 @@ verify_flow_info (void)
     edge e;
     edge_iterator ei;
 
-    FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+    FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
       edge_checksum[e->dest->index] += (size_t) e;
 
-    FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+    FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
       edge_checksum[e->dest->index] -= (size_t) e;
   }
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     if (edge_checksum[bb->index])
       {
        error ("basic block %i edge lists are corrupted", bb->index);
        err = 1;
       }
 
-  last_bb_seen = ENTRY_BLOCK_PTR;
+  last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
 
   /* Clean up.  */
   free (last_visited);
@@ -938,10 +938,11 @@ tidy_fallthru_edges (void)
   if (!cfg_hooks->tidy_fallthru_edge)
     return;
 
-  if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
+  if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return;
 
-  FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
+  FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb,
+                 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb, next_bb)
     {
       edge s;
 
@@ -1011,7 +1012,7 @@ can_duplicate_block_p (const_basic_block bb)
     internal_error ("%s does not support can_duplicate_block_p",
                    cfg_hooks->name);
 
-  if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
+  if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
     return false;
 
   return cfg_hooks->can_duplicate_block_p (bb);
@@ -1409,7 +1410,7 @@ account_profile_record (struct profile_record *record, int after_pass)
 
   FOR_ALL_BB (bb)
    {
-      if (bb != EXIT_BLOCK_PTR_FOR_FUNCTION (cfun)
+      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
          && profile_status != PROFILE_ABSENT)
        {
          sum = 0;
@@ -1424,7 +1425,7 @@ account_profile_record (struct profile_record *record, int after_pass)
              && (lsum - bb->count > 100 || lsum - bb->count < -100))
            record->num_mismatched_count_out[after_pass]++;
        }
-      if (bb != ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun)
+      if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
          && profile_status != PROFILE_ABSENT)
        {
          sum = 0;
@@ -1440,8 +1441,8 @@ account_profile_record (struct profile_record *record, int after_pass)
          if (lsum - bb->count > 100 || lsum - bb->count < -100)
            record->num_mismatched_count_in[after_pass]++;
        }
-      if (bb == ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun)
-         || bb == EXIT_BLOCK_PTR_FOR_FUNCTION (cfun))
+      if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
+         || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
        continue;
       gcc_assert (cfg_hooks->account_profile_record);
       cfg_hooks->account_profile_record (bb, after_pass, record);
index a5eb4da3490b619b0e9e6f5d8255f500a52f5aa3..4b3ad5bd3bdc23d34b97364a140e0cd81934de33 100644 (file)
@@ -352,10 +352,10 @@ init_loops_structure (struct function *fn,
   /* Dummy loop containing whole function.  */
   root = alloc_loop ();
   root->num_nodes = n_basic_blocks_for_fn (fn);
-  root->latch = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
-  root->header = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
-  ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->loop_father = root;
-  EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->loop_father = root;
+  root->latch = EXIT_BLOCK_PTR_FOR_FN (fn);
+  root->header = ENTRY_BLOCK_PTR_FOR_FN (fn);
+  ENTRY_BLOCK_PTR_FOR_FN (fn)->loop_father = root;
+  EXIT_BLOCK_PTR_FOR_FN (fn)->loop_father = root;
 
   loops->larray->quick_push (root);
   loops->tree_root = root;
@@ -382,7 +382,7 @@ bb_loop_header_p (basic_block header)
   FOR_EACH_EDGE (e, ei, header->preds)
     {
       basic_block latch = e->src;
-      if (latch != ENTRY_BLOCK_PTR
+      if (latch != ENTRY_BLOCK_PTR_FOR_FN (cfun)
          && dominated_by_p (CDI_DOMINATORS, latch, header))
        return true;
     }
@@ -745,7 +745,7 @@ disambiguate_multiple_latches (struct loop *loop)
      block.  This would cause problems if the entry edge was the one from the
      entry block.  To avoid having to handle this case specially, split
      such entry edge.  */
-  e = find_edge (ENTRY_BLOCK_PTR, loop->header);
+  e = find_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), loop->header);
   if (e)
     split_edge (e);
 
@@ -781,7 +781,8 @@ flow_bb_inside_loop_p (const struct loop *loop, const_basic_block bb)
 {
   struct loop *source_loop;
 
-  if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
+  if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
+      || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return 0;
 
   source_loop = bb->loop_father;
@@ -826,13 +827,13 @@ get_loop_body (const struct loop *loop)
 
   body = XNEWVEC (basic_block, loop->num_nodes);
 
-  if (loop->latch == EXIT_BLOCK_PTR)
+  if (loop->latch == EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       /* There may be blocks unreachable from EXIT_BLOCK, hence we need to
         special-case the fake loop that contains the whole function.  */
       gcc_assert (loop->num_nodes == (unsigned) n_basic_blocks_for_fn (cfun));
       body[tv++] = loop->header;
-      body[tv++] = EXIT_BLOCK_PTR;
+      body[tv++] = EXIT_BLOCK_PTR_FOR_FN (cfun);
       FOR_EACH_BB (bb)
        body[tv++] = bb;
     }
@@ -886,7 +887,7 @@ get_loop_body_in_dom_order (const struct loop *loop)
 
   tovisit = XNEWVEC (basic_block, loop->num_nodes);
 
-  gcc_assert (loop->latch != EXIT_BLOCK_PTR);
+  gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   tv = 0;
   fill_sons_in_loop (loop, loop->header, tovisit, &tv);
@@ -921,7 +922,7 @@ get_loop_body_in_bfs_order (const struct loop *loop)
   unsigned int vc = 1;
 
   gcc_assert (loop->num_nodes);
-  gcc_assert (loop->latch != EXIT_BLOCK_PTR);
+  gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   blocks = XNEWVEC (basic_block, loop->num_nodes);
   visited = BITMAP_ALLOC (NULL);
@@ -1143,7 +1144,7 @@ get_loop_exit_edges (const struct loop *loop)
   edge_iterator ei;
   struct loop_exit *exit;
 
-  gcc_assert (loop->latch != EXIT_BLOCK_PTR);
+  gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   /* If we maintain the lists of exits, use them.  Otherwise we must
      scan the body of the loop.  */
@@ -1175,7 +1176,7 @@ num_loop_branches (const struct loop *loop)
   unsigned i, n;
   basic_block * body;
 
-  gcc_assert (loop->latch != EXIT_BLOCK_PTR);
+  gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   body = get_loop_body (loop);
   n = 0;
index 9300237c670020ad69b391239173b94a064c9cae..0cee6c68b28bca1dc4d149b4bd41498c941cf194 100644 (file)
@@ -85,7 +85,8 @@ mark_irreducible_loops (void)
   gcc_assert (current_loops != NULL);
 
   /* Reset the flags.  */
-  FOR_BB_BETWEEN (act, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (act, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     {
       act->flags &= ~BB_IRREDUCIBLE_LOOP;
       FOR_EACH_EDGE (e, ei, act->succs)
@@ -95,11 +96,12 @@ mark_irreducible_loops (void)
   /* Create the edge lists.  */
   g = new_graph (last_basic_block + num);
 
-  FOR_BB_BETWEEN (act, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (act, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     FOR_EACH_EDGE (e, ei, act->succs)
       {
        /* Ignore edges to exit.  */
-       if (e->dest == EXIT_BLOCK_PTR)
+       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
          continue;
 
        src = BB_REPR (act);
index 714c7e1177f8468851bd519beb1e1fa932c269ae..6baa15afadeb810a8001a5cf4162e9bcd0d43ac1 100644 (file)
@@ -92,7 +92,7 @@ fix_bb_placement (basic_block bb)
 
   FOR_EACH_EDGE (e, ei, bb->succs)
     {
-      if (e->dest == EXIT_BLOCK_PTR)
+      if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
        continue;
 
       act = e->dest->loop_father;
@@ -352,7 +352,8 @@ remove_path (edge e)
     bitmap_set_bit (seen, rem_bbs[i]->index);
   if (!irred_invalidated)
     FOR_EACH_EDGE (ae, ei, e->src->succs)
-      if (ae != e && ae->dest != EXIT_BLOCK_PTR && !bitmap_bit_p (seen, ae->dest->index)
+      if (ae != e && ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
+         && !bitmap_bit_p (seen, ae->dest->index)
          && ae->flags & EDGE_IRREDUCIBLE_LOOP)
        {
          irred_invalidated = true;
@@ -363,7 +364,8 @@ remove_path (edge e)
     {
       bb = rem_bbs[i];
       FOR_EACH_EDGE (ae, ei, rem_bbs[i]->succs)
-       if (ae->dest != EXIT_BLOCK_PTR && !bitmap_bit_p (seen, ae->dest->index))
+       if (ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
+           && !bitmap_bit_p (seen, ae->dest->index))
          {
            bitmap_set_bit (seen, ae->dest->index);
            bord_bbs[n_bord_bbs++] = ae->dest;
@@ -1519,7 +1521,7 @@ create_preheader (struct loop *loop, int flags)
 
       /* We do not allow entry block to be the loop preheader, since we
             cannot emit code there.  */
-      if (single_entry->src == ENTRY_BLOCK_PTR)
+      if (single_entry->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
         need_forwarder_block = true;
       else
         {
index c81d3a5503c08139d0ebde60c3c787fddbaa8ca9..7ad38721606af64809610a9c90d4c001c7b103a0 100644 (file)
@@ -501,7 +501,7 @@ rtx
 entry_of_function (void)
 {
   return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
-         BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
+         BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
 }
 
 /* Emit INSN at the entry point of the function, ensuring that it is only
@@ -509,7 +509,7 @@ entry_of_function (void)
 void
 emit_insn_at_entry (rtx insn)
 {
-  edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
+  edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
   edge e = ei_safe_edge (ei);
   gcc_assert (e->flags & EDGE_FALLTHRU);
 
@@ -573,7 +573,7 @@ contains_no_active_insn_p (const_basic_block bb)
 {
   rtx insn;
 
-  if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
+  if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
       || !single_succ_p (bb))
     return false;
 
@@ -620,7 +620,7 @@ can_fallthru (basic_block src, basic_block target)
   edge e;
   edge_iterator ei;
 
-  if (target == EXIT_BLOCK_PTR)
+  if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return true;
   if (src->next_bb != target)
     return false;
@@ -630,7 +630,7 @@ can_fallthru (basic_block src, basic_block target)
     return false;
 
   FOR_EACH_EDGE (e, ei, src->succs)
-    if (e->dest == EXIT_BLOCK_PTR
+    if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
        && e->flags & EDGE_FALLTHRU)
       return false;
 
@@ -650,10 +650,10 @@ could_fall_through (basic_block src, basic_block target)
   edge e;
   edge_iterator ei;
 
-  if (target == EXIT_BLOCK_PTR)
+  if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return true;
   FOR_EACH_EDGE (e, ei, src->succs)
-    if (e->dest == EXIT_BLOCK_PTR
+    if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
        && e->flags & EDGE_FALLTHRU)
       return 0;
   return true;
@@ -958,7 +958,8 @@ rtl_can_merge_blocks (basic_block a, basic_block b)
          /* Must be simple edge.  */
          && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
          && a->next_bb == b
-         && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
+         && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+         && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
          /* If the jump insn has side effects,
             we can't kill the edge.  */
          && (!JUMP_P (BB_END (a))
@@ -972,7 +973,7 @@ rtl_can_merge_blocks (basic_block a, basic_block b)
 rtx
 block_label (basic_block block)
 {
-  if (block == EXIT_BLOCK_PTR)
+  if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return NULL_RTX;
 
   if (!LABEL_P (BB_HEAD (block)))
@@ -1084,13 +1085,13 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
                 INSN_UID (insn), e->dest->index, target->index);
       if (!redirect_jump (insn, block_label (target), 0))
        {
-         gcc_assert (target == EXIT_BLOCK_PTR);
+         gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
          return NULL;
        }
     }
 
   /* Cannot do anything for target exit block.  */
-  else if (target == EXIT_BLOCK_PTR)
+  else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return NULL;
 
   /* Or replace possibly complicated jump insn by simple jump insn.  */
@@ -1178,7 +1179,7 @@ patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
       int j;
       rtx new_label = block_label (new_bb);
 
-      if (new_bb == EXIT_BLOCK_PTR)
+      if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
        return false;
       if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
        vec = XVEC (PATTERN (tmp), 0);
@@ -1211,7 +1212,7 @@ patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
       int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
       rtx new_label, note;
 
-      if (new_bb == EXIT_BLOCK_PTR)
+      if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
        return false;
       new_label = block_label (new_bb);
 
@@ -1268,7 +1269,7 @@ patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
             target is exit block on some arches.  */
          if (!redirect_jump (insn, block_label (new_bb), 0))
            {
-             gcc_assert (new_bb == EXIT_BLOCK_PTR);
+             gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
              return false;
            }
        }
@@ -1324,7 +1325,8 @@ fixup_partition_crossing (edge e)
 {
   rtx note;
 
-  if (e->src == ENTRY_BLOCK_PTR || e->dest == EXIT_BLOCK_PTR)
+  if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
+      == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return;
   /* If we redirected an existing edge, it may already be marked
      crossing, even though the new src is missing a reg crossing note.
@@ -1392,7 +1394,7 @@ fixup_new_cold_bb (basic_block bb)
          boundary fixup by calling fixup_partition_crossing itself.  */
       if ((e->flags & EDGE_FALLTHRU)
           && BB_PARTITION (bb) != BB_PARTITION (e->dest)
-          && e->dest != EXIT_BLOCK_PTR)
+         && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
         force_nonfallthru (e);
       else
         fixup_partition_crossing (e);
@@ -1470,7 +1472,8 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
   /* In the case the last instruction is conditional jump to the next
      instruction, first redirect the jump itself and then continue
      by creating a basic block afterwards to redirect fallthru edge.  */
-  if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
+  if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+      && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
       && any_condjump_p (BB_END (e->src))
       && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
     {
@@ -1512,7 +1515,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
   else
     {
       gcc_assert (e->flags & EDGE_FALLTHRU);
-      if (e->src == ENTRY_BLOCK_PTR)
+      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        {
          /* We can't redirect the entry block.  Create an empty block
             at the start of the function which we use to add the new
@@ -1521,16 +1524,18 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
          edge_iterator ei;
          bool found = false;
 
-         basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
+         basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
+                                              ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
          /* Change the existing edge's source to be the new block, and add
             a new edge from the entry block to the new block.  */
          e->src = bb;
-         for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
+         for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
+              (tmp = ei_safe_edge (ei)); )
            {
              if (tmp == e)
                {
-                 ENTRY_BLOCK_PTR->succs->unordered_remove (ei.index);
+                 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
                  found = true;
                  break;
                }
@@ -1541,14 +1546,15 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
          gcc_assert (found);
 
          vec_safe_push (bb->succs, e);
-         make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
+         make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
+                                EDGE_FALLTHRU);
        }
     }
 
   /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
      don't point to the target or fallthru label.  */
   if (JUMP_P (BB_END (e->src))
-      && target != EXIT_BLOCK_PTR
+      && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
       && (e->flags & EDGE_FALLTHRU)
       && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
     {
@@ -1650,7 +1656,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
 
   loc = e->goto_locus;
   e->flags &= ~EDGE_FALLTHRU;
-  if (target == EXIT_BLOCK_PTR)
+  if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       if (jump_label == ret_rtx)
        {
@@ -1784,7 +1790,7 @@ static basic_block
 last_bb_in_partition (basic_block start_bb)
 {
   basic_block bb;
-  FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     {
       if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
         return bb;
@@ -1820,14 +1826,15 @@ rtl_split_edge (edge edge_in)
     }
 
   /* Create the basic block note.  */
-  if (edge_in->dest != EXIT_BLOCK_PTR)
+  if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
     before = BB_HEAD (edge_in->dest);
   else
     before = NULL_RTX;
 
   /* If this is a fall through edge to the exit block, the blocks might be
      not adjacent, and the right place is after the source.  */
-  if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
+  if ((edge_in->flags & EDGE_FALLTHRU)
+      && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       before = NEXT_INSN (BB_END (edge_in->src));
       bb = create_basic_block (before, NULL, edge_in->src);
@@ -1835,7 +1842,7 @@ rtl_split_edge (edge edge_in)
     }
   else
     {
-      if (edge_in->src == ENTRY_BLOCK_PTR)
+      if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
         {
           bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
           BB_COPY_PARTITION (bb, edge_in->dest);
@@ -1873,7 +1880,7 @@ rtl_split_edge (edge edge_in)
 
   /* Can't allow a region crossing edge to be fallthrough.  */
   if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
-      && edge_in->dest != EXIT_BLOCK_PTR)
+      && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       new_bb = force_nonfallthru (single_succ_edge (bb));
       gcc_assert (!new_bb);
@@ -1888,7 +1895,7 @@ rtl_split_edge (edge edge_in)
     }
   else
     {
-      if (edge_in->src != ENTRY_BLOCK_PTR)
+      if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
        {
          /* For asm goto even splitting of fallthru edge might
             need insn patching, as other labels might point to the
@@ -1896,7 +1903,7 @@ rtl_split_edge (edge edge_in)
          rtx last = BB_END (edge_in->src);
          if (last
              && JUMP_P (last)
-             && edge_in->dest != EXIT_BLOCK_PTR
+             && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
              && extract_asm_operands (PATTERN (last)) != NULL_RTX
              && patch_jump_insn (last, before, bb))
            df_set_bb_dirty (edge_in->src);
@@ -1943,7 +1950,7 @@ commit_one_edge_insertion (edge e)
 
   /* Figure out where to put these insns.  If the destination has
      one predecessor, insert there.  Except for the exit block.  */
-  if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
+  if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       bb = e->dest;
 
@@ -1972,7 +1979,7 @@ commit_one_edge_insertion (edge e)
      the basic block.  */
   else if ((e->flags & EDGE_ABNORMAL) == 0
           && single_succ_p (e->src)
-          && e->src != ENTRY_BLOCK_PTR
+          && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
           && (!JUMP_P (BB_END (e->src))
               || simplejump_p (BB_END (e->src))))
     {
@@ -2025,7 +2032,7 @@ commit_one_edge_insertion (edge e)
         to EXIT.  */
 
       e = single_succ_edge (bb);
-      gcc_assert (e->dest == EXIT_BLOCK_PTR
+      gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
                  && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
 
       e->flags &= ~EDGE_FALLTHRU;
@@ -2057,7 +2064,8 @@ commit_edge_insertions (void)
   verify_flow_info ();
 #endif
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     {
       edge e;
       edge_iterator ei;
@@ -2428,8 +2436,8 @@ rtl_verify_edges (void)
            n_fallthru++, fallthru = e;
 
          is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
-                        && e->src != ENTRY_BLOCK_PTR
-                        && e->dest != EXIT_BLOCK_PTR);
+                        && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+                        && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
           has_crossing_edge |= is_crossing;
          if (e->flags & EDGE_CROSSING)
            {
@@ -2832,8 +2840,8 @@ rtl_verify_fallthru (void)
                break;
            }
        }
-      else if (e->src != ENTRY_BLOCK_PTR
-              && e->dest != EXIT_BLOCK_PTR)
+      else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+              && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
        {
          rtx insn;
 
@@ -2872,10 +2880,10 @@ rtl_verify_bb_layout (void)
   rtx x;
   int num_bb_notes;
   const rtx rtx_first = get_insns ();
-  basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
+  basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
 
   num_bb_notes = 0;
-  last_bb_seen = ENTRY_BLOCK_PTR;
+  last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
 
   for (x = rtx_first; x; x = NEXT_INSN (x))
     {
@@ -3062,7 +3070,7 @@ purge_dead_edges (basic_block bb)
              ei_next (&ei);
              continue;
            }
-         else if (e->dest != EXIT_BLOCK_PTR
+         else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
                   && BB_HEAD (e->dest) == JUMP_LABEL (insn))
            /* If the destination block is the target of the jump,
               keep the edge.  */
@@ -3070,7 +3078,8 @@ purge_dead_edges (basic_block bb)
              ei_next (&ei);
              continue;
            }
-         else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
+         else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
+                  && returnjump_p (insn))
            /* If the destination block is the exit block, and this
               instruction is a return, then keep the edge.  */
            {
@@ -3319,7 +3328,7 @@ skip_insns_after_block (basic_block bb)
   rtx insn, last_insn, next_head, prev;
 
   next_head = NULL_RTX;
-  if (bb->next_bb != EXIT_BLOCK_PTR)
+  if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
     next_head = BB_HEAD (bb->next_bb);
 
   for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
@@ -3468,7 +3477,7 @@ outof_cfg_layout_mode (void)
   basic_block bb;
 
   FOR_EACH_BB (bb)
-    if (bb->next_bb != EXIT_BLOCK_PTR)
+    if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
       bb->aux = bb->next_bb;
 
   cfg_layout_finalize ();
@@ -3577,7 +3586,8 @@ relink_block_chain (bool stay_in_cfglayout_mode)
   if (dump_file)
     {
       fprintf (dump_file, "Reordered sequence:\n");
-      for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
+      for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
+          NUM_FIXED_BLOCKS;
           bb;
           bb = (basic_block) bb->aux, index++)
        {
@@ -3595,15 +3605,15 @@ relink_block_chain (bool stay_in_cfglayout_mode)
     }
 
   /* Now reorder the blocks.  */
-  prev_bb = ENTRY_BLOCK_PTR;
-  bb = ENTRY_BLOCK_PTR->next_bb;
+  prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+  bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
   for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
     {
       bb->prev_bb = prev_bb;
       prev_bb->next_bb = bb;
     }
-  prev_bb->next_bb = EXIT_BLOCK_PTR;
-  EXIT_BLOCK_PTR->prev_bb = prev_bb;
+  prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
+  EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
 
   /* Then, clean up the aux fields.  */
   FOR_ALL_BB (bb)
@@ -3644,7 +3654,8 @@ fixup_reorder_chain (void)
   /* First do the bulk reordering -- rechain the blocks without regard to
      the needed changes to jumps and labels.  */
 
-  for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
+  for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
+       bb->aux)
     {
       if (BB_HEADER (bb))
        {
@@ -3687,7 +3698,8 @@ fixup_reorder_chain (void)
   /* Now add jumps and labels as needed to match the blocks new
      outgoing edges.  */
 
-  for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
+  for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
+       bb->aux)
     {
       edge e_fall, e_taken, e;
       rtx bb_end_insn;
@@ -3728,7 +3740,7 @@ fixup_reorder_chain (void)
 
              /* If the old fallthru is still next, nothing to do.  */
              if (bb->aux == e_fall->dest
-                 || e_fall->dest == EXIT_BLOCK_PTR)
+                 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
                continue;
 
              /* The degenerated case of conditional jump jumping to the next
@@ -3749,7 +3761,8 @@ fixup_reorder_chain (void)
                  if (note
                      && XINT (note, 0) < REG_BR_PROB_BASE / 2
                      && invert_jump (bb_end_insn,
-                                     (e_fall->dest == EXIT_BLOCK_PTR
+                                     (e_fall->dest
+                                      == EXIT_BLOCK_PTR_FOR_FN (cfun)
                                       ? NULL_RTX
                                       : label_for_bb (e_fall->dest)), 0))
                    {
@@ -3771,7 +3784,8 @@ fixup_reorder_chain (void)
              /* Otherwise we can try to invert the jump.  This will
                 basically never fail, however, keep up the pretense.  */
              else if (invert_jump (bb_end_insn,
-                                   (e_fall->dest == EXIT_BLOCK_PTR
+                                   (e_fall->dest
+                                    == EXIT_BLOCK_PTR_FOR_FN (cfun)
                                     ? NULL_RTX
                                     : label_for_bb (e_fall->dest)), 0))
                {
@@ -3793,7 +3807,7 @@ fixup_reorder_chain (void)
                 __builtin_unreachable ()), nothing to do.  */
              if (! e_fall
                  || bb->aux == e_fall->dest
-                 || e_fall->dest == EXIT_BLOCK_PTR)
+                 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
                continue;
 
              /* Otherwise we'll have to use the fallthru fixup below.  */
@@ -3820,7 +3834,7 @@ fixup_reorder_chain (void)
            continue;
 
          /* A fallthru to exit block.  */
-         if (e_fall->dest == EXIT_BLOCK_PTR)
+         if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
            continue;
        }
 
@@ -3880,7 +3894,7 @@ fixup_reorder_chain (void)
                  continue;
                }
              dest = e->dest;
-             if (dest == EXIT_BLOCK_PTR)
+             if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
                {
                  /* Non-fallthru edges to the exit block cannot be split.  */
                  if (!(e->flags & EDGE_FALLTHRU))
@@ -3958,13 +3972,13 @@ fixup_fallthru_exit_predecessor (void)
      value.  */
   gcc_assert (reload_completed);
 
-  e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
+  e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
   if (e)
     bb = e->src;
 
   if (bb && bb->aux)
     {
-      basic_block c = ENTRY_BLOCK_PTR->next_bb;
+      basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
 
       /* If the very first block is the one with the fall-through exit
         edge, we have to split that block.  */
@@ -4000,7 +4014,7 @@ force_one_exit_fallthru (void)
   edge_iterator ei;
   basic_block forwarder, bb;
 
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     if (e->flags & EDGE_FALLTHRU)
       {
        if (predecessor == NULL)
@@ -4018,7 +4032,8 @@ force_one_exit_fallthru (void)
   /* Exit has several fallthru predecessors.  Create a forwarder block for
      them.  */
   forwarder = split_edge (predecessor);
-  for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
+  for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
+       (e = ei_safe_edge (ei)); )
     {
       if (e->src == forwarder
          || !(e->flags & EDGE_FALLTHRU))
@@ -4166,7 +4181,7 @@ cfg_layout_duplicate_bb (basic_block bb)
   insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
   new_bb = create_basic_block (insn,
                               insn ? get_last_insn () : NULL,
-                              EXIT_BLOCK_PTR->prev_bb);
+                              EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
 
   BB_COPY_PARTITION (new_bb, bb);
   if (BB_HEADER (bb))
@@ -4313,14 +4328,14 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
   if (e->dest == dest)
     return e;
 
-  if (e->src != ENTRY_BLOCK_PTR
+  if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
       && (ret = try_redirect_by_replacing_jump (e, dest, true)))
     {
       df_set_bb_dirty (src);
       return ret;
     }
 
-  if (e->src == ENTRY_BLOCK_PTR
+  if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
       && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
     {
       if (dump_file)
@@ -4447,7 +4462,7 @@ cfg_layout_delete_block (basic_block bb)
            set_last_insn (insn);
        }
     }
-  if (bb->next_bb != EXIT_BLOCK_PTR)
+  if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
     to = &BB_HEADER (bb->next_bb);
   else
     to = &cfg_layout_function_footer;
@@ -4504,7 +4519,7 @@ cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
   if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
     {
       edge e = find_fallthru_edge (b->succs);
-      if (e && e->dest == EXIT_BLOCK_PTR)
+      if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
        return false;
     }
 
@@ -4515,7 +4530,8 @@ cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
          && a != b
          /* Must be simple edge.  */
          && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
-         && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
+         && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+         && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
          /* If the jump insn has side effects, we can't kill the edge.
             When not optimizing, try_redirect_by_replacing_jump will
             not allow us to redirect an edge by replacing a table jump.  */
@@ -4634,11 +4650,11 @@ static basic_block
 cfg_layout_split_edge (edge e)
 {
   basic_block new_bb =
-    create_basic_block (e->src != ENTRY_BLOCK_PTR
+    create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
                        ? NEXT_INSN (BB_END (e->src)) : get_insns (),
                        NULL_RTX, e->src);
 
-  if (e->dest == EXIT_BLOCK_PTR)
+  if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
     BB_COPY_PARTITION (new_bb, e->src);
   else
     BB_COPY_PARTITION (new_bb, e->dest);
@@ -4663,7 +4679,8 @@ rtl_block_empty_p (basic_block bb)
 {
   rtx insn;
 
-  if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
+  if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
+      || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return true;
 
   FOR_BB_INSNS (bb, insn)
@@ -4770,7 +4787,8 @@ rtl_flow_call_edges_add (sbitmap blocks)
   if (! blocks)
     check_last_block = true;
   else
-    check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
+    check_last_block = bitmap_bit_p (blocks,
+                                    EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
 
   /* In the last basic block, before epilogue generation, there will be
      a fallthru edge to EXIT.  Special care is required if the last insn
@@ -4786,7 +4804,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
      Handle this by adding a dummy instruction in a new last basic block.  */
   if (check_last_block)
     {
-      basic_block bb = EXIT_BLOCK_PTR->prev_bb;
+      basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
       rtx insn = BB_END (bb);
 
       /* Back up past insns that must be kept in the same block as a call.  */
@@ -4798,7 +4816,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
        {
          edge e;
 
-         e = find_edge (bb, EXIT_BLOCK_PTR);
+         e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
          if (e)
            {
              insert_insn_on_edge (gen_use (const0_rtx), e);
@@ -4846,7 +4864,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
 #ifdef ENABLE_CHECKING
              if (split_at_insn == BB_END (bb))
                {
-                 e = find_edge (bb, EXIT_BLOCK_PTR);
+                 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
                  gcc_assert (e == NULL);
                }
 #endif
@@ -4860,7 +4878,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
                    blocks_split++;
                }
 
-             make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
+             make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
            }
 
          if (insn == BB_HEAD (bb))
@@ -4952,7 +4970,7 @@ rtl_can_remove_branch_p (const_edge e)
   const_rtx insn = BB_END (src), set;
 
   /* The conditions are taken from try_redirect_by_replacing_jump.  */
-  if (target == EXIT_BLOCK_PTR)
+  if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return false;
 
   if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
index 7834b065d529d1129c9d80b085ac693e4fc39b91..21f6ebe8d09185ad1415fefdccab932874d2b1f9 100644 (file)
@@ -198,7 +198,7 @@ record_eh_tables (struct cgraph_node *node, struct function *fun)
 int
 compute_call_stmt_bb_frequency (tree decl, basic_block bb)
 {
-  int entry_freq = ENTRY_BLOCK_PTR_FOR_FUNCTION
+  int entry_freq = ENTRY_BLOCK_PTR_FOR_FN
                     (DECL_STRUCT_FUNCTION (decl))->frequency;
   int freq = bb->frequency;
 
@@ -441,7 +441,7 @@ rebuild_cgraph_edges (void)
   cgraph_node_remove_callees (node);
   ipa_remove_all_references (&node->ref_list);
 
-  node->count = ENTRY_BLOCK_PTR->count;
+  node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
 
   FOR_EACH_BB (bb)
     {
@@ -493,7 +493,7 @@ cgraph_rebuild_references (void)
     else
       i++;
 
-  node->count = ENTRY_BLOCK_PTR->count;
+  node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
 
   FOR_EACH_BB (bb)
     {
index b84e1989be9380f3321354aff54b7235b4494671..fb23abed8daa859f452c063a0c5a695e8f0a45ca 100644 (file)
@@ -1336,10 +1336,10 @@ init_lowered_empty_function (tree decl, bool in_ssa)
   loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
 
   /* Create BB for body of the function and connect it properly.  */
-  bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
-  make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
-  make_edge (bb, EXIT_BLOCK_PTR, 0);
-  add_bb_to_loop (bb, ENTRY_BLOCK_PTR->loop_father);
+  bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
+  make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
+  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
+  add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
 
   return bb;
 }
@@ -1627,7 +1627,7 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks)
                  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
                  make_edge (bb, then_bb, EDGE_TRUE_VALUE);
                  make_edge (bb, else_bb, EDGE_FALSE_VALUE);
-                 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
+                 make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
                  make_edge (then_bb, return_bb, EDGE_FALLTHRU);
                  make_edge (else_bb, return_bb, EDGE_FALLTHRU);
                  bsi = gsi_last_bb (then_bb);
index fb5c8814edb8ccb757f07901bdb72436d61132b5..d685a7f6a660acb66b4782193bb3a52f4ec0cdeb 100644 (file)
@@ -1157,7 +1157,7 @@ combine_instructions (rtx f, unsigned int nregs)
   setup_incoming_promotions (first);
   /* Allow the entry block and the first block to fall into the same EBB.
      Conceptually the incoming promotions are assigned to the entry block.  */
-  last_bb = ENTRY_BLOCK_PTR;
+  last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
 
   create_log_links ();
   FOR_EACH_BB (this_basic_block)
@@ -1209,7 +1209,7 @@ combine_instructions (rtx f, unsigned int nregs)
   label_tick = label_tick_ebb_start = 1;
   init_reg_last ();
   setup_incoming_promotions (first);
-  last_bb = ENTRY_BLOCK_PTR;
+  last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
 
   FOR_EACH_BB (this_basic_block)
     {
@@ -1592,7 +1592,7 @@ set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
       /* If this register is undefined at the start of the file, we can't
         say what its contents were.  */
       && ! REGNO_REG_SET_P
-           (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x))
+          (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), REGNO (x))
       && HWI_COMPUTABLE_MODE_P (GET_MODE (x)))
     {
       reg_stat_type *rsp = &reg_stat[REGNO (x)];
@@ -3938,7 +3938,7 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
        ni2dest = SET_DEST (newi2pat);
 
       for (insn = NEXT_INSN (i3);
-          insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
+          insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
                    || insn != BB_HEAD (this_basic_block->next_bb));
           insn = NEXT_INSN (insn))
        {
@@ -4054,7 +4054,8 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
              && ! find_reg_note (i2, REG_UNUSED,
                                  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
            for (temp = NEXT_INSN (i2);
-                temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
+                temp
+                && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
                          || BB_HEAD (this_basic_block) != temp);
                 temp = NEXT_INSN (temp))
              if (temp != i3 && INSN_P (temp))
@@ -9468,7 +9469,8 @@ reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
              && REG_N_SETS (REGNO (x)) == 1
              && !REGNO_REG_SET_P
-                 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
+                 (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
+                  REGNO (x)))))
     {
       *nonzero &= rsp->last_set_nonzero_bits;
       return NULL;
@@ -9535,7 +9537,8 @@ reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
              && REG_N_SETS (REGNO (x)) == 1
              && !REGNO_REG_SET_P
-                 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
+                 (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
+                  REGNO (x)))))
     {
       *result = rsp->last_set_sign_bit_copies;
       return NULL;
@@ -12564,7 +12567,8 @@ get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
              || (! (regno >= FIRST_PSEUDO_REGISTER
                     && REG_N_SETS (regno) == 1
                     && (!REGNO_REG_SET_P
-                        (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno)))
+                        (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
+                         regno)))
                  && rsp->last_set_label > tick))
          {
            if (replace)
@@ -12679,7 +12683,7 @@ get_last_value (const_rtx x)
          && (regno < FIRST_PSEUDO_REGISTER
              || REG_N_SETS (regno) != 1
              || REGNO_REG_SET_P
-                (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno))))
+                (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), regno))))
     return 0;
 
   /* If the value was set in a later insn than the ones we are processing,
@@ -13740,7 +13744,7 @@ distribute_links (struct insn_link *links)
         since most links don't point very far away.  */
 
       for (insn = NEXT_INSN (link->insn);
-          (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
+          (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
                     || BB_HEAD (this_basic_block->next_bb) != insn));
           insn = NEXT_INSN (insn))
        if (DEBUG_INSN_P (insn))
index a5171ea0def92a6783ee4fc32c44bc98c9abcb50..c55835e7fe19596b51ebc21b7d081ae5035fe8df 100644 (file)
@@ -4835,7 +4835,8 @@ alpha_gp_save_rtx (void)
         label.  Emit the sequence properly on the edge.  We are only
         invoked from dw2_build_landing_pads and finish_eh_generation
         will call commit_edge_insertions thanks to a kludge.  */
-      insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
+      insert_insn_on_edge (seq,
+                          single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
 
       cfun->machine->gp_save_rtx = m;
     }
index 3cd53b07ca22d9b24a8c78d3ae9fd31ec12185c7..e8b5f8376ec9c55975570cc1fc043bf82abc5185 100644 (file)
@@ -5943,7 +5943,8 @@ require_pic_register (void)
                 we can't yet emit instructions directly in the final
                 insn stream.  Queue the insns on the entry edge, they will
                 be committed after everything else is expanded.  */
-             insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
+             insert_insn_on_edge (seq,
+                                  single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
            }
        }
     }
@@ -18386,7 +18387,8 @@ arm_r3_live_at_start_p (void)
   /* Just look at cfg info, which is still close enough to correct at this
      point.  This gives false positives for broken functions that might use
      uninitialized data that happens to be allocated in r3, but who cares?  */
-  return REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), 3);
+  return REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
+                         3);
 }
 
 /* Compute the number of bytes used to store the static chain register on the
@@ -19919,7 +19921,7 @@ any_sibcall_could_use_r3 (void)
 
   if (!crtl->tail_call_emit)
     return false;
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     if (e->flags & EDGE_SIBCALL)
       {
        rtx call = BB_END (e->src);
index 0d473cbafbcc901a0a2d8d9953dbb897534f8f31..d7af939ebdaec8c714a0d7be1dc9c69fcf80bf82 100644 (file)
@@ -3600,7 +3600,7 @@ hwloop_optimize (hwloop_info loop)
 
       if (single_pred_p (bb)
          && single_pred_edge (bb)->flags & EDGE_FALLTHRU
-         && single_pred (bb) != ENTRY_BLOCK_PTR)
+         && single_pred (bb) != ENTRY_BLOCK_PTR_FOR_FN (cfun))
        {
          bb = single_pred (bb);
          last_insn = BB_END (bb);
index 6e74fe48eaac49a43dffcf9d5bbc82dee77200ae..a5eb2c1c8444690fc8ebfe3adc741af99722bf2b 100644 (file)
@@ -8027,7 +8027,7 @@ frv_optimize_membar_global (basic_block bb, struct frv_io *first_io,
   /* We need to keep the membar if there is an edge to the exit block.  */
   FOR_EACH_EDGE (succ, ei, bb->succs)
   /* for (succ = bb->succ; succ != 0; succ = succ->succ_next) */
-    if (succ->dest == EXIT_BLOCK_PTR)
+    if (succ->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
       return;
 
   /* Work out the union of all successor blocks.  */
index 7ae9f57aeee3ff407fe849dd9c2a288d81d22222..b702413d5e32aa749e28368eee1facf2f9b68869 100644 (file)
@@ -5593,7 +5593,7 @@ ix86_eax_live_at_start_p (void)
      to correct at this point.  This gives false positives for broken
      functions that might use uninitialized data that happens to be
      allocated in eax, but who cares?  */
-  return REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), 0);
+  return REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)), 0);
 }
 
 static bool
@@ -9301,7 +9301,7 @@ ix86_compute_frame_layout (struct ix86_frame *frame)
      Recompute the value as needed.  Do not recompute when amount of registers
      didn't change as reload does multiple calls to the function and does not
      expect the decision to change within single iteration.  */
-  else if (!optimize_bb_for_size_p (ENTRY_BLOCK_PTR)
+  else if (!optimize_bb_for_size_p (ENTRY_BLOCK_PTR_FOR_FN (cfun))
            && cfun->machine->use_fast_prologue_epilogue_nregs != frame->nregs)
     {
       int count = frame->nregs;
@@ -11390,7 +11390,7 @@ ix86_expand_epilogue (int style)
       /* Leave results in shorter dependency chains on CPUs that are
         able to grok it fast.  */
       else if (TARGET_USE_LEAVE
-              || optimize_bb_for_size_p (EXIT_BLOCK_PTR)
+              || optimize_bb_for_size_p (EXIT_BLOCK_PTR_FOR_FN (cfun))
               || !cfun->machine->use_fast_prologue_epilogue)
        ix86_emit_leave ();
       else
@@ -29838,7 +29838,7 @@ add_condition_to_bb (tree function_decl, tree version_decl,
   make_edge (bb1, bb3, EDGE_FALSE_VALUE); 
 
   remove_edge (e23);
-  make_edge (bb2, EXIT_BLOCK_PTR, 0);
+  make_edge (bb2, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
 
   pop_cfun ();
 
@@ -36573,7 +36573,7 @@ ix86_pad_returns (void)
   edge e;
   edge_iterator ei;
 
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     {
       basic_block bb = e->src;
       rtx ret = BB_END (bb);
@@ -36673,14 +36673,14 @@ ix86_count_insn (basic_block bb)
       edge prev_e;
       edge_iterator prev_ei;
 
-      if (e->src == ENTRY_BLOCK_PTR)
+      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        {
          min_prev_count = 0;
          break;
        }
       FOR_EACH_EDGE (prev_e, prev_ei, e->src->preds)
        {
-         if (prev_e->src == ENTRY_BLOCK_PTR)
+         if (prev_e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
            {
              int count = ix86_count_insn_bb (e->src);
              if (count < min_prev_count)
@@ -36704,7 +36704,7 @@ ix86_pad_short_function (void)
   edge e;
   edge_iterator ei;
 
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     {
       rtx ret = BB_END (e->src);
       if (JUMP_P (ret) && ANY_RETURN_P (PATTERN (ret)))
@@ -36744,7 +36744,7 @@ ix86_seh_fixup_eh_fallthru (void)
   edge e;
   edge_iterator ei;
 
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     {
       rtx insn, next;
 
index 307681c995d17e1764b18d12a67c2f0869b49a40..71bc666b685035cf08266eb712eacfc7866adb27 100644 (file)
@@ -3492,7 +3492,7 @@ ia64_expand_prologue (void)
       edge e;
       edge_iterator ei;
 
-      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
        if ((e->flags & EDGE_FAKE) == 0
            && (e->flags & EDGE_FALLTHRU) != 0)
          break;
@@ -10187,7 +10187,8 @@ ia64_asm_unwind_emit (FILE *asm_out_file, rtx insn)
 
   if (NOTE_INSN_BASIC_BLOCK_P (insn))
     {
-      last_block = NOTE_BASIC_BLOCK (insn)->next_bb == EXIT_BLOCK_PTR;
+      last_block = NOTE_BASIC_BLOCK (insn)->next_bb
+     == EXIT_BLOCK_PTR_FOR_FN (cfun);
 
       /* Restore unwind state from immediately before the epilogue.  */
       if (need_copy_state)
index 4454bf2d0d40085a5041e599a58ced1bace4a4fe..008f088df571ed7497343c9ad8cf89c7913ad639 100644 (file)
@@ -4566,7 +4566,7 @@ nds32_fp_as_gp_check_available (void)
       || frame_pointer_needed
       || NDS32_REQUIRED_CALLEE_SAVED_P (FP_REGNUM)
       || (cfun->stdarg == 1)
-      || (find_fallthru_edge (EXIT_BLOCK_PTR->preds) == NULL))
+      || (find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == NULL))
     return 0;
 
   /* Now we can check the possibility of using fp_as_gp optimization.  */
index 5c39d94395aaef537b72dbd7017f40c1d7b8f3d7..7556eb6385e5fa43412bd2f5694367c6a3b1dfa7 100644 (file)
@@ -22953,7 +22953,7 @@ rs6000_emit_prologue (void)
                                      && DEFAULT_ABI == ABI_V4
                                      && flag_pic
                                      && ! info->lr_save_p
-                                     && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
+                                     && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0);
       if (save_LR_around_toc_setup)
        {
          rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
index 35a44f28a19b3503b799d53ad457523d45b3ed3e..9b8bd1e0c4b5b57aaa149e782e3ad854d6fd080a 100644 (file)
@@ -967,7 +967,7 @@ cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
       edge_iterator ei;
 
       FOR_EACH_EDGE (e, ei, bb->succs)
-       if (e->dest != EXIT_BLOCK_PTR
+       if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
            && BB_HEAD (e->dest) == JUMP_LABEL (jump))
          {
            e->flags |= EDGE_FALLTHRU;
@@ -1376,7 +1376,7 @@ find_implicit_sets (void)
        ? BRANCH_EDGE (bb)->dest : FALLTHRU_EDGE (bb)->dest;
 
       /* If DEST doesn't go anywhere, ignore it.  */
-      if (! dest || dest == EXIT_BLOCK_PTR)
+      if (! dest || dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
        continue;
 
       /* We have found a suitable implicit set.  Try to record it now as
@@ -1612,7 +1612,7 @@ bypass_block (basic_block bb, rtx setcc, rtx jump)
          old_dest = e->dest;
          if (dest != NULL
              && dest != old_dest
-             && dest != EXIT_BLOCK_PTR)
+             && dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
             {
              redirect_edge_and_branch_force (e, dest);
 
@@ -1664,15 +1664,15 @@ bypass_conditional_jumps (void)
   rtx dest;
 
   /* Note we start at block 1.  */
-  if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
+  if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return 0;
 
   bypass_last_basic_block = last_basic_block;
   mark_dfs_back_edges ();
 
   changed = 0;
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
-                 EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     {
       /* Check for more than one predecessor.  */
       if (!single_pred_p (bb))
@@ -1836,7 +1836,8 @@ one_cprop_pass (void)
       /* Allocate vars to track sets of regs.  */
       reg_set_bitmap = ALLOC_REG_SET (NULL);
 
-      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR,
+      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
+                     EXIT_BLOCK_PTR_FOR_FN (cfun),
                      next_bb)
        {
          /* Reset tables used to keep track of what's still valid [since
index 43fa1e8191f56865fce6d09d9a35a54994a86577..e0f77964c15286fdeaff2c5cc5efdec9d6ed29e7 100644 (file)
--- a/gcc/cse.c
+++ b/gcc/cse.c
@@ -6200,7 +6200,7 @@ cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
              && e == BRANCH_EDGE (previous_bb_in_path))
            {
              bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
-             if (bb != EXIT_BLOCK_PTR
+             if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
                  && single_pred_p (bb)
                  /* We used to assert here that we would only see blocks
                     that we have not visited yet.  But we may end up
@@ -6254,7 +6254,7 @@ cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
 
          if (e
              && !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
-             && e->dest != EXIT_BLOCK_PTR
+             && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
              && single_pred_p (e->dest)
              /* Avoid visiting basic blocks twice.  The large comment
                 above explains why this can happen.  */
@@ -7166,7 +7166,7 @@ cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
        continue;
 
       if (EDGE_COUNT (e->dest->preds) != 1
-         || e->dest == EXIT_BLOCK_PTR
+         || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
          /* Avoid endless recursion on unreachable blocks.  */
          || e->dest == orig_bb)
        continue;
index 59fc2f64444e713b54eb83c8a09230cfc5bc32c9..c6349c8b0a5389acfe2abbff95e9aa79c36cc482 100644 (file)
@@ -1007,7 +1007,7 @@ static void
 df_lr_confluence_0 (basic_block bb)
 {
   bitmap op1 = &df_lr_get_bb_info (bb->index)->out;
-  if (bb != EXIT_BLOCK_PTR)
+  if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
     bitmap_copy (op1, &df->hardware_regs_used);
 }
 
index aace96d45363b8fe0beaf390f20d6fb8979e9c63..eb7e4d47e0ca673812a11156ef664e94265c7e1d 100644 (file)
@@ -3873,7 +3873,7 @@ df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
   EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
     {
       df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
-                    ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
+                    ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_DEF, 0);
     }
 
   df_canonize_collection_rec (collection_rec);
@@ -4034,17 +4034,17 @@ df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exi
 
   EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
     df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
-                  EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
+                  EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
 
 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
   /* It is deliberate that this is not put in the exit block uses but
      I do not know why.  */
   if (reload_completed
       && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
-      && bb_has_eh_pred (EXIT_BLOCK_PTR)
+      && bb_has_eh_pred (EXIT_BLOCK_PTR_FOR_FN (cfun))
       && fixed_regs[ARG_POINTER_REGNUM])
     df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
-                  EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
+                  EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
 #endif
 
   df_canonize_collection_rec (collection_rec);
index 6530109485455f3c680bd58f5a3ec5929dd88dd9..3d88c0d3ed109de0e52ec0ef04faea245cb2b9ee 100644 (file)
@@ -240,14 +240,14 @@ calc_dfs_tree_nonrec (struct dom_info *di, basic_block bb, bool reverse)
   if (reverse)
     {
       ei = ei_start (bb->preds);
-      en_block = EXIT_BLOCK_PTR;
-      ex_block = ENTRY_BLOCK_PTR;
+      en_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
+      ex_block = ENTRY_BLOCK_PTR_FOR_FN (cfun);
     }
   else
     {
       ei = ei_start (bb->succs);
-      en_block = ENTRY_BLOCK_PTR;
-      ex_block = EXIT_BLOCK_PTR;
+      en_block = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+      ex_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
     }
 
   /* When the stack is empty we break out of this loop.  */
@@ -333,7 +333,8 @@ static void
 calc_dfs_tree (struct dom_info *di, bool reverse)
 {
   /* The first block is the ENTRY_BLOCK (or EXIT_BLOCK if REVERSE).  */
-  basic_block begin = reverse ? EXIT_BLOCK_PTR : ENTRY_BLOCK_PTR;
+  basic_block begin = (reverse
+                      ? EXIT_BLOCK_PTR_FOR_FN (cfun) : ENTRY_BLOCK_PTR_FOR_FN (cfun));
   di->dfs_order[last_basic_block] = di->dfsnum;
   di->dfs_to_bb[di->dfsnum] = begin;
   di->dfsnum++;
@@ -501,9 +502,9 @@ calc_idoms (struct dom_info *di, bool reverse)
   edge_iterator ei, einext;
 
   if (reverse)
-    en_block = EXIT_BLOCK_PTR;
+    en_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
   else
-    en_block = ENTRY_BLOCK_PTR;
+    en_block = ENTRY_BLOCK_PTR_FOR_FN (cfun);
 
   /* Go backwards in DFS order, to first look at the leafs.  */
   v = di->nodes;
@@ -1097,7 +1098,7 @@ prune_bbs_to_update_dominators (vec<basic_block> bbs,
 
   for (i = 0; bbs.iterate (i, &bb);)
     {
-      if (bb == ENTRY_BLOCK_PTR)
+      if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        goto succeed;
 
       if (single_pred_p (bb))
@@ -1171,7 +1172,7 @@ determine_dominators_for_sons (struct graph *g, vec<basic_block> bbs,
   if (son[y] == -1)
     return;
   if (y == (int) bbs.length ())
-    ybb = ENTRY_BLOCK_PTR;
+    ybb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
   else
     ybb = bbs[y];
 
@@ -1344,7 +1345,7 @@ iterate_fix_dominators (enum cdi_direction dir, vec<basic_block> bbs,
        set_immediate_dominator (CDI_DOMINATORS, bb, NULL);
       *map->insert (bb) = i;
     }
-  *map->insert (ENTRY_BLOCK_PTR) = n;
+  *map->insert (ENTRY_BLOCK_PTR_FOR_FN (cfun)) = n;
 
   g = new_graph (n + 1);
   for (y = 0; y < g->n_vertices; y++)
index 4c7354ecf92d791710d7d7e344808ffd7a47995c..3350e4bb510b9906280b06cd2d4356cdba61c855 100644 (file)
@@ -169,8 +169,8 @@ dom_walker::walk (basic_block bb)
     {
       /* Don't worry about unreachable blocks.  */
       if (EDGE_COUNT (bb->preds) > 0
-         || bb == ENTRY_BLOCK_PTR
-         || bb == EXIT_BLOCK_PTR)
+         || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
+         || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
        {
          /* Callback for subclasses to do custom things before we have walked
             the dominator children, but before we walk statements.  */
index 9662da845ba63a2950fef11ff016698c6dea9c83..6584ea35bbe1abb3ac7de6ae5acc2972de88f37b 100644 (file)
--- a/gcc/dse.c
+++ b/gcc/dse.c
@@ -2751,7 +2751,7 @@ dse_step1 (void)
          if (stores_off_frame_dead_at_return
              && (EDGE_COUNT (bb->succs) == 0
                  || (single_succ_p (bb)
-                     && single_succ (bb) == EXIT_BLOCK_PTR
+                     && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
                      && ! crtl->calls_eh_return)))
            {
              insn_info_t i_ptr = active_local_stores;
index f8296b223bc2198a8d92243ac2cba2254af5d642..f7dc193ce4778ebed65d3bf24a6b9dc25b46830d 100644 (file)
@@ -1241,7 +1241,7 @@ sjlj_emit_function_enter (rtx dispatch_label)
       }
 
   if (fn_begin_outside_block)
-    insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
+    insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
   else
     emit_insn_after (seq, fn_begin);
 }
@@ -1509,7 +1509,7 @@ finish_eh_generation (void)
 
   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
       /* Kludge for Alpha (see alpha_gp_save_rtx).  */
-      || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
+      || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
     commit_edge_insertions ();
 
   /* Redirect all EH edges from the post_landing_pad to the landing pad.  */
index 2d206f1311a7d9de208214a0111b8afcc7af4048..f2adde969751114d545ff6fcf23db707adec78ce 100644 (file)
@@ -762,7 +762,7 @@ compute_alignments (void)
          && (branch_frequency > freq_threshold
              || (bb->frequency > bb->prev_bb->frequency * 10
                  && (bb->prev_bb->frequency
-                     <= ENTRY_BLOCK_PTR->frequency / 2))))
+                     <= ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency / 2))))
        {
          log = JUMP_ALIGN (label);
          if (dump_file)
index 87953e335c4a75f08a43024ad93a6fe86d90aaee..fde4a8e6d07be5b7673dae870f01cbafb6c3c572 100644 (file)
@@ -3978,7 +3978,8 @@ regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
     return false;
 
   return ((REG_N_SETS (regno) > 1
-          || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
+          || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
+                              regno))
          && REGNO_REG_SET_P (setjmp_crosses, regno));
 }
 
@@ -5400,7 +5401,7 @@ next_block_for_reg (basic_block bb, int regno, int end_regno)
 
   /* We can sometimes encounter dead code.  Don't try to move it
      into the exit block.  */
-  if (!live_edge || live_edge->dest == EXIT_BLOCK_PTR)
+  if (!live_edge || live_edge->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return NULL;
 
   /* Reject targets of abnormal edges.  This is needed for correctness
@@ -5725,7 +5726,7 @@ convert_jumps_to_returns (basic_block last_bb, bool simple_p,
 
   src_bbs.create (EDGE_COUNT (last_bb->preds));
   FOR_EACH_EDGE (e, ei, last_bb->preds)
-    if (e->src != ENTRY_BLOCK_PTR)
+    if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
       src_bbs.quick_push (e->src);
 
   label = BB_HEAD (last_bb);
@@ -5805,7 +5806,7 @@ convert_jumps_to_returns (basic_block last_bb, bool simple_p,
        }
 
       /* Fix up the CFG for the successful change we just made.  */
-      redirect_edge_succ (e, EXIT_BLOCK_PTR);
+      redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
       e->flags &= ~EDGE_CROSSING;
     }
   src_bbs.release ();
@@ -5897,7 +5898,7 @@ thread_prologue_and_epilogue_insns (void)
 
   df_analyze ();
 
-  rtl_profile_for_bb (ENTRY_BLOCK_PTR);
+  rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
   inserted = false;
   seq = NULL_RTX;
@@ -5907,8 +5908,8 @@ thread_prologue_and_epilogue_insns (void)
   /* Can't deal with multiple successors of the entry block at the
      moment.  Function should always have at least one entry
      point.  */
-  gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
-  entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
+  gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
+  entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
   orig_entry_edge = entry_edge;
 
   split_prologue_seq = NULL_RTX;
@@ -6081,7 +6082,7 @@ thread_prologue_and_epilogue_insns (void)
          basic_block tmp_bb = vec.pop ();
 
          FOR_EACH_EDGE (e, ei, tmp_bb->succs)
-           if (e->dest != EXIT_BLOCK_PTR
+           if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
                && bitmap_set_bit (&bb_flags, e->dest->index))
              vec.quick_push (e->dest);
        }
@@ -6089,7 +6090,7 @@ thread_prologue_and_epilogue_insns (void)
       /* Find the set of basic blocks that need no prologue, have a
         single successor, can be duplicated, meet a max size
         requirement, and go to the exit via like blocks.  */
-      vec.quick_push (EXIT_BLOCK_PTR);
+      vec.quick_push (EXIT_BLOCK_PTR_FOR_FN (cfun));
       while (!vec.is_empty ())
        {
          basic_block tmp_bb = vec.pop ();
@@ -6266,7 +6267,7 @@ thread_prologue_and_epilogue_insns (void)
                  {
                    /* Otherwise put the copy at the end of the function.  */
                    copy_bb = create_basic_block (NULL_RTX, NULL_RTX,
-                                                 EXIT_BLOCK_PTR->prev_bb);
+                                                 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
                    BB_COPY_PARTITION (copy_bb, bb);
                  }
 
@@ -6280,7 +6281,7 @@ thread_prologue_and_epilogue_insns (void)
                    dup_block_and_redirect (tbb, copy_bb, insert_point,
                                            &bb_flags);
                    tbb = single_succ (tbb);
-                   if (tbb == EXIT_BLOCK_PTR)
+                   if (tbb == EXIT_BLOCK_PTR_FOR_FN (cfun))
                      break;
                    e = split_block (copy_bb, PREV_INSN (insert_point));
                    copy_bb = e->dest;
@@ -6294,7 +6295,8 @@ thread_prologue_and_epilogue_insns (void)
                if (CALL_P (PREV_INSN (insert_point))
                    && SIBLING_CALL_P (PREV_INSN (insert_point)))
                  eflags = EDGE_SIBCALL | EDGE_ABNORMAL;
-               make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR, eflags);
+               make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
+                                      eflags);
 
                /* verify_flow_info doesn't like a note after a
                   sibling call.  */
@@ -6325,15 +6327,15 @@ thread_prologue_and_epilogue_insns (void)
 
   /* If the exit block has no non-fake predecessors, we don't need
      an epilogue.  */
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     if ((e->flags & EDGE_FAKE) == 0)
       break;
   if (e == NULL)
     goto epilogue_done;
 
-  rtl_profile_for_bb (EXIT_BLOCK_PTR);
+  rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
 
-  exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
+  exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
 
   /* If we're allowed to generate a simple return instruction, then by
      definition we don't need a full epilogue.  If the last basic
@@ -6349,10 +6351,10 @@ thread_prologue_and_epilogue_insns (void)
 
          /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
             (but won't remove).  Stop at end of current preds.  */
-         last = EDGE_COUNT (EXIT_BLOCK_PTR->preds);
+         last = EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
          for (i = 0; i < last; i++)
            {
-             e = EDGE_I (EXIT_BLOCK_PTR->preds, i);
+             e = EDGE_I (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds, i);
              if (LABEL_P (BB_HEAD (e->src))
                  && !bitmap_bit_p (&bb_flags, e->src->index)
                  && !active_insn_between (BB_HEAD (e->src), BB_END (e->src)))
@@ -6416,7 +6418,7 @@ thread_prologue_and_epilogue_insns (void)
      code.  In order to be able to properly annotate these with unwind
      info, try to split them now.  If we get a valid split, drop an
      EPILOGUE_BEG note and mark the insns as epilogue insns.  */
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     {
       rtx prev, last, trial;
 
@@ -6507,7 +6509,7 @@ epilogue_done:
 
       /* The epilogue insns we inserted may cause the exit edge to no longer
         be fallthru.  */
-      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
        {
          if (((e->flags & EDGE_FALLTHRU) != 0)
              && returnjump_p (BB_END (e->src)))
@@ -6544,7 +6546,7 @@ epilogue_done:
        }
 
       /* Also check returns we might need to add to tail blocks.  */
-      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
        if (EDGE_COUNT (e->src->preds) != 0
            && (e->flags & EDGE_FAKE) != 0
            && !bitmap_bit_p (&bb_flags, e->src->index))
@@ -6559,7 +6561,7 @@ epilogue_done:
          inserting new BBs at the end of the function. Do this
          after the call to split_block above which may split
          the original exit pred.  */
-      exit_pred = EXIT_BLOCK_PTR->prev_bb;
+      exit_pred = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
 
       FOR_EACH_VEC_ELT (unconverted_simple_returns, i, e)
        {
@@ -6596,7 +6598,7 @@ epilogue_done:
              emit_barrier_after (start);
 
              *pdest_bb = bb;
-             make_edge (bb, EXIT_BLOCK_PTR, 0);
+             make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
            }
          redirect_edge_and_branch_force (e, *pdest_bb);
        }
@@ -6605,7 +6607,7 @@ epilogue_done:
 
   if (entry_edge != orig_entry_edge)
     {
-      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
        if (EDGE_COUNT (e->src->preds) != 0
            && (e->flags & EDGE_FAKE) != 0
            && !bitmap_bit_p (&bb_flags, e->src->index))
@@ -6618,7 +6620,9 @@ epilogue_done:
 
 #ifdef HAVE_sibcall_epilogue
   /* Emit sibling epilogues before any sibling call sites.  */
-  for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
+  for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
+                                                            ei_safe_edge (ei));
+                                                            )
     {
       basic_block bb = e->src;
       rtx insn = BB_END (bb);
@@ -6749,7 +6753,7 @@ reposition_prologue_and_epilogue_notes (void)
       edge_iterator ei;
       edge e;
 
-      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
        {
          rtx insn, first = NULL, note = NULL;
          basic_block bb = e->src;
index a37ac6bc1f8d4a12c8c59d77509838d4e8a0bad1..3012c4d1d3671499d340cecfdb45775dcc7503ff 100644 (file)
@@ -2063,7 +2063,7 @@ pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr,
     {
       basic_block pred_bb = pred->src;
 
-      if (pred->src == ENTRY_BLOCK_PTR
+      if (pred->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
          /* Has predecessor has already been visited?  */
          || visited[pred_bb->index])
        ;/* Nothing to do.  */
@@ -2830,7 +2830,7 @@ compute_code_hoist_vbeinout (void)
         the convergence.  */
       FOR_EACH_BB_REVERSE (bb)
        {
-         if (bb->next_bb != EXIT_BLOCK_PTR)
+         if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
            {
              bitmap_intersection_of_succs (hoist_vbeout[bb->index],
                                            hoist_vbein, bb);
@@ -2908,7 +2908,7 @@ update_bb_reg_pressure (basic_block bb, rtx from)
       FOR_EACH_EDGE (succ, ei, bb->succs)
        {
          succ_bb = succ->dest;
-         if (succ_bb == EXIT_BLOCK_PTR)
+         if (succ_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
            continue;
 
          if (bitmap_bit_p (BB_DATA (succ_bb)->live_in, REGNO (dreg)))
@@ -3041,7 +3041,7 @@ should_hoist_expr_to_dom (basic_block expr_bb, struct expr *expr,
     {
       basic_block pred_bb = pred->src;
 
-      if (pred->src == ENTRY_BLOCK_PTR)
+      if (pred->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        break;
       else if (pred_bb == expr_bb)
        continue;
@@ -3185,16 +3185,16 @@ hoist_code (void)
       bb_size[bb->index] = to_head;
     }
 
-  gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1
-             && (EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
-                 == ENTRY_BLOCK_PTR->next_bb));
+  gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1
+             && (EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0)->dest
+                 == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb));
 
   from_bbs = BITMAP_ALLOC (NULL);
   if (flag_ira_hoist_pressure)
     hoisted_bbs = BITMAP_ALLOC (NULL);
 
   dom_tree_walk = get_all_dominated_blocks (CDI_DOMINATORS,
-                                           ENTRY_BLOCK_PTR->next_bb);
+                                           ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb);
 
   /* Walk over each basic block looking for potentially hoistable
      expressions, nothing gets hoisted from the entry block.  */
index 557bf35f0153f1c63cdc692d0b84b530804f4e8e..a3e74fe5012fbb8a6f9b12940ff78538e7989d42 100644 (file)
@@ -713,7 +713,7 @@ gimple_find_edge_insert_loc (edge e, gimple_stmt_iterator *gsi,
  restart:
   if (single_pred_p (dest)
       && gimple_seq_empty_p (phi_nodes (dest))
-      && dest != EXIT_BLOCK_PTR)
+      && dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       *gsi = gsi_start_bb (dest);
       if (gsi_end_p (*gsi))
@@ -744,7 +744,7 @@ gimple_find_edge_insert_loc (edge e, gimple_stmt_iterator *gsi,
   src = e->src;
   if ((e->flags & EDGE_ABNORMAL) == 0
       && single_succ_p (src)
-      && src != ENTRY_BLOCK_PTR)
+      && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
     {
       *gsi = gsi_last_bb (src);
       if (gsi_end_p (*gsi))
@@ -830,7 +830,8 @@ gsi_commit_edge_inserts (void)
   edge e;
   edge_iterator ei;
 
-  gsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
+  gsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
+                             NULL);
 
   FOR_EACH_BB (bb)
     FOR_EACH_EDGE (e, ei, bb->succs)
index 4eb897fd40cee8f01705148212f9064c0a2dfeff..72c62844fa67a2420e48ecc1302acb7748f77318 100644 (file)
@@ -735,7 +735,7 @@ slsr_process_phi (gimple phi, bool speed)
          derived_base_name = arg;
 
          if (SSA_NAME_IS_DEFAULT_DEF (arg))
-           arg_bb = single_succ (ENTRY_BLOCK_PTR);
+           arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
          else
            gimple_bb (SSA_NAME_DEF_STMT (arg));
        }
index 1dc9dbc7734d0c4e91f3f7cd8c0ba8b6e8829f52..b75135af7420af3de71fc2cdae731a3b8c94eeda 100644 (file)
@@ -195,7 +195,7 @@ draw_cfg_nodes_for_loop (pretty_printer *pp, int funcdef_no,
   const char *fillcolors[3] = { "grey88", "grey77", "grey66" };
 
   if (loop->header != NULL
-      && loop->latch != EXIT_BLOCK_PTR)
+      && loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun))
     pp_printf (pp,
               "\tsubgraph cluster_%d_%d {\n"
               "\tstyle=\"filled\";\n"
@@ -214,7 +214,7 @@ draw_cfg_nodes_for_loop (pretty_printer *pp, int funcdef_no,
   if (loop->header == NULL)
     return;
 
-  if (loop->latch == EXIT_BLOCK_PTR)
+  if (loop->latch == EXIT_BLOCK_PTR_FOR_FN (cfun))
     body = get_loop_body (loop);
   else
     body = get_loop_body_in_bfs_order (loop);
@@ -228,7 +228,7 @@ draw_cfg_nodes_for_loop (pretty_printer *pp, int funcdef_no,
 
   free (body);
 
-  if (loop->latch != EXIT_BLOCK_PTR)
+  if (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun))
     pp_printf (pp, "\t}\n");
 }
 
index a661dbbc91b101fa056151383894d453fafe5cce..ad3e1dc4ede71478d3f5131c8af890ea62cb275c 100644 (file)
@@ -1098,7 +1098,7 @@ translate_clast_user (struct clast_user_stmt *stmt, edge next_e,
   gimple_bb_p gbb = PBB_BLACK_BOX (pbb);
   vec<tree> iv_map;
 
-  if (GBB_BB (gbb) == ENTRY_BLOCK_PTR)
+  if (GBB_BB (gbb) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
     return next_e;
 
   nb_loops = number_of_loops (cfun);
index 001712673e756c8bfa71530729ae47ea8030a22d..0cfb5a59cc9986926aed94de6937a5b87f91142c 100644 (file)
@@ -448,7 +448,7 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
   gimple stmt;
 
   /* XXX: ENTRY_BLOCK_PTR could be optimized in later steps.  */
-  basic_block entry_block = ENTRY_BLOCK_PTR;
+  basic_block entry_block = ENTRY_BLOCK_PTR_FOR_FN (cfun);
   stmt = harmful_stmt_in_bb (entry_block, outermost_loop, bb);
   result.difficult = (stmt != NULL);
   result.exit = NULL;
@@ -1030,7 +1030,7 @@ create_sese_edges (vec<sd_region> regions)
   FOR_EACH_VEC_ELT (regions, i, s)
     /* Don't handle multiple edges exiting the function.  */
     if (!find_single_exit_edge (s)
-       && s->exit != EXIT_BLOCK_PTR)
+       && s->exit != EXIT_BLOCK_PTR_FOR_FN (cfun))
       create_single_exit_edge (s);
 
   unmark_exit_edges (regions);
@@ -1402,7 +1402,8 @@ build_scops (vec<scop_p> *scops)
   stack_vec<sd_region, 3> regions;
 
   canonicalize_loop_closed_ssa_form ();
-  build_scops_1 (single_succ (ENTRY_BLOCK_PTR), ENTRY_BLOCK_PTR->loop_father,
+  build_scops_1 (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
+                ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father,
                 &regions, loop);
   create_sese_edges (regions);
   build_graphite_scops (regions, scops);
index beddc116ffb9d82983b64153286c7984554e0162..c98b36c1ab6be723ba2664213073ad792e53f6cf 100644 (file)
@@ -1615,7 +1615,7 @@ priority (rtx insn)
 
           /* Selective scheduling does not define RECOVERY_BLOCK macro.  */
          rec = sel_sched_p () ? NULL : RECOVERY_BLOCK (insn);
-         if (!rec || rec == EXIT_BLOCK_PTR)
+         if (!rec || rec == EXIT_BLOCK_PTR_FOR_FN (cfun))
            {
              prev_first = PREV_INSN (insn);
              twin = insn;
@@ -7522,7 +7522,7 @@ static void
 sched_extend_bb (void)
 {
   /* The following is done to keep current_sched_info->next_tail non null.  */
-  rtx end = BB_END (EXIT_BLOCK_PTR->prev_bb);
+  rtx end = BB_END (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
   rtx insn = DEBUG_INSN_P (end) ? prev_nondebug_insn (end) : end;
   if (NEXT_INSN (end) == 0
       || (!NOTE_P (insn)
@@ -7533,7 +7533,7 @@ sched_extend_bb (void)
       rtx note = emit_note_after (NOTE_INSN_DELETED, end);
       /* Make note appear outside BB.  */
       set_block_for_insn (note, NULL);
-      BB_END (EXIT_BLOCK_PTR->prev_bb) = end;
+      BB_END (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb) = end;
     }
 }
 
@@ -7551,7 +7551,7 @@ init_before_recovery (basic_block *before_recovery_ptr)
   basic_block last;
   edge e;
 
-  last = EXIT_BLOCK_PTR->prev_bb;
+  last = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
   e = find_fallthru_edge_from (last);
 
   if (e)
@@ -7591,7 +7591,8 @@ init_before_recovery (basic_block *before_recovery_ptr)
 
       redirect_edge_succ (e, single);
       make_single_succ_edge (single, empty, 0);
-      make_single_succ_edge (empty, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
+      make_single_succ_edge (empty, EXIT_BLOCK_PTR_FOR_FN (cfun),
+                            EDGE_FALLTHRU);
 
       label = block_label (empty);
       x = emit_jump_insn_after (gen_jump (label), BB_END (single));
@@ -7734,14 +7735,14 @@ create_check_block_twin (rtx insn, bool mutate_p)
     }
   else
     {
-      rec = EXIT_BLOCK_PTR;
+      rec = EXIT_BLOCK_PTR_FOR_FN (cfun);
       label = NULL_RTX;
     }
 
   /* Emit CHECK.  */
   check = targetm.sched.gen_spec_check (insn, label, todo_spec);
 
-  if (rec != EXIT_BLOCK_PTR)
+  if (rec != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       /* To have mem_reg alive at the beginning of second_bb,
         we emit check BEFORE insn, so insn after splitting
@@ -7774,7 +7775,7 @@ create_check_block_twin (rtx insn, bool mutate_p)
 
   /* Initialize TWIN (twin is a duplicate of original instruction
      in the recovery block).  */
-  if (rec != EXIT_BLOCK_PTR)
+  if (rec != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       sd_iterator_def sd_it;
       dep_t dep;
@@ -7811,7 +7812,7 @@ create_check_block_twin (rtx insn, bool mutate_p)
      provide correct value for INSN_TICK (TWIN).  */
   sd_copy_back_deps (twin, insn, true);
 
-  if (rec != EXIT_BLOCK_PTR)
+  if (rec != EXIT_BLOCK_PTR_FOR_FN (cfun))
     /* In case of branchy check, fix CFG.  */
     {
       basic_block first_bb, second_bb;
@@ -7823,7 +7824,7 @@ create_check_block_twin (rtx insn, bool mutate_p)
       sched_create_recovery_edges (first_bb, rec, second_bb);
 
       sched_init_only_bb (second_bb, first_bb);
-      sched_init_only_bb (rec, EXIT_BLOCK_PTR);
+      sched_init_only_bb (rec, EXIT_BLOCK_PTR_FOR_FN (cfun));
 
       jump = BB_END (rec);
       haifa_init_insn (jump);
@@ -7864,7 +7865,7 @@ create_check_block_twin (rtx insn, bool mutate_p)
       init_dep_1 (new_dep, pro, check, DEP_TYPE (dep), ds);
       sd_add_dep (new_dep, false);
 
-      if (rec != EXIT_BLOCK_PTR)
+      if (rec != EXIT_BLOCK_PTR_FOR_FN (cfun))
        {
          DEP_CON (new_dep) = twin;
          sd_add_dep (new_dep, false);
@@ -7913,7 +7914,7 @@ create_check_block_twin (rtx insn, bool mutate_p)
   /* Future speculations: call the helper.  */
   process_insn_forw_deps_be_in_spec (insn, twin, fs);
 
-  if (rec != EXIT_BLOCK_PTR)
+  if (rec != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       /* Which types of dependencies should we use here is,
         generally, machine-dependent question...  But, for now,
@@ -8127,7 +8128,7 @@ unlink_bb_notes (basic_block first, basic_block last)
   bb_header = XNEWVEC (rtx, last_basic_block);
 
   /* Make a sentinel.  */
-  if (last->next_bb != EXIT_BLOCK_PTR)
+  if (last->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
     bb_header[last->next_bb->index] = 0;
 
   first = first->next_bb;
@@ -8171,7 +8172,7 @@ restore_bb_notes (basic_block first)
   first = first->next_bb;
   /* Remember: FIRST is actually a second basic block in the ebb.  */
 
-  while (first != EXIT_BLOCK_PTR
+  while (first != EXIT_BLOCK_PTR_FOR_FN (cfun)
         && bb_header[first->index])
     {
       rtx prev, label, note, next;
index 5d26638748510031a7626722f33e1e9bd3ed9b77..77c8149f806d510d902a66e2fc355f343e790a06 100644 (file)
@@ -260,7 +260,7 @@ discover_loop (hwloop_info loop, basic_block tail_bb, rtx tail_insn, rtx reg)
     {
       edge e;
       edge_iterator ei;
-      if (bb == EXIT_BLOCK_PTR)
+      if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
        {
          /* We've reached the exit block.  The loop must be bad. */
          if (dump_file)
@@ -539,7 +539,7 @@ reorder_loops (hwloop_info loops)
   
   FOR_EACH_BB (bb)
     {
-      if (bb->next_bb != EXIT_BLOCK_PTR)
+      if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
        bb->aux = bb->next_bb;
       else
        bb->aux = NULL;
index 17d26c583c7501faed98572462df3ded401ad5ed..ac0276cea041ae547886fc517bc5666f7cd81a1f 100644 (file)
@@ -3185,7 +3185,8 @@ merge_if_block (struct ce_if_block * ce_info)
       /* There should still be something at the end of the THEN or ELSE
          blocks taking us to our final destination.  */
        gcc_assert (JUMP_P (last)
-                   || (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
+                   || (EDGE_SUCC (combo_bb, 0)->dest
+                       == EXIT_BLOCK_PTR_FOR_FN (cfun)
                        && CALL_P (last)
                        && SIBLING_CALL_P (last))
                    || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
@@ -3199,7 +3200,7 @@ merge_if_block (struct ce_if_block * ce_info)
      may be zero incoming edges if the THEN block didn't actually join
      back up (as with a call to a non-return function).  */
   else if (EDGE_COUNT (join_bb->preds) < 2
-          && join_bb != EXIT_BLOCK_PTR)
+          && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       /* We can merge the JOIN cleanly and update the dataflow try
         again on this pass.*/
@@ -3216,7 +3217,7 @@ merge_if_block (struct ce_if_block * ce_info)
                  && single_succ (combo_bb) == join_bb);
 
       /* Remove the jump and cruft from the end of the COMBO block.  */
-      if (join_bb != EXIT_BLOCK_PTR)
+      if (join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
        tidy_fallthru_edge (single_succ_edge (combo_bb));
     }
 
@@ -3495,7 +3496,7 @@ cond_exec_find_if_block (struct ce_if_block * ce_info)
      code processing.  ??? we should fix this in the future.  */
   if (EDGE_COUNT (then_bb->succs) == 0)
     {
-      if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR)
+      if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
        {
          rtx last_insn = BB_END (then_bb);
 
@@ -3586,7 +3587,8 @@ cond_exec_find_if_block (struct ce_if_block * ce_info)
   next = then_bb;
   if (else_bb && (next = next->next_bb) != else_bb)
     return FALSE;
-  if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
+  if ((next = next->next_bb) != join_bb
+      && join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       if (else_bb)
        join_bb = NULL;
@@ -3725,7 +3727,7 @@ block_has_only_trap (basic_block bb)
   rtx trap;
 
   /* We're not the exit block.  */
-  if (bb == EXIT_BLOCK_PTR)
+  if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return NULL_RTX;
 
   /* The block must have no successors.  */
@@ -3881,7 +3883,7 @@ find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
                                    predictable_edge_p (then_edge)))))
     return FALSE;
 
-  if (else_bb == EXIT_BLOCK_PTR)
+  if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       rtx jump = BB_END (else_edge->src);
       gcc_assert (JUMP_P (jump));
@@ -3902,12 +3904,12 @@ find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
 
   if (then_bb->next_bb == else_bb
       && then_bb->prev_bb == test_bb
-      && else_bb != EXIT_BLOCK_PTR)
+      && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
       new_bb = 0;
     }
-  else if (else_bb == EXIT_BLOCK_PTR)
+  else if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
                                             else_bb, else_target);
   else
@@ -4196,9 +4198,9 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
         saved in caller-saved regs.  A caller-saved reg requires the
         prologue, killing a shrink-wrap opportunity.  */
       if ((flag_shrink_wrap && HAVE_simple_return && !epilogue_completed)
-         && ENTRY_BLOCK_PTR->next_bb == test_bb
+         && ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == test_bb
          && single_succ_p (new_dest)
-         && single_succ (new_dest) == EXIT_BLOCK_PTR
+         && single_succ (new_dest) == EXIT_BLOCK_PTR_FOR_FN (cfun)
          && bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
        {
          regset return_regs;
@@ -4213,8 +4215,10 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
                && targetm.calls.function_value_regno_p (i))
              bitmap_set_bit (return_regs, INCOMING_REGNO (i));
 
-         bitmap_and_into (return_regs, df_get_live_out (ENTRY_BLOCK_PTR));
-         bitmap_and_into (return_regs, df_get_live_in (EXIT_BLOCK_PTR));
+         bitmap_and_into (return_regs,
+                          df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
+         bitmap_and_into (return_regs,
+                          df_get_live_in (EXIT_BLOCK_PTR_FOR_FN (cfun)));
          if (!bitmap_empty_p (return_regs))
            {
              FOR_BB_INSNS_REVERSE (new_dest, insn)
@@ -4259,7 +4263,7 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
     {
       if (JUMP_P (BB_END (dest_edge->src)))
        new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
-      else if (new_dest == EXIT_BLOCK_PTR)
+      else if (new_dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
        new_dest_label = ret_rtx;
       else
        new_dest_label = block_label (new_dest);
index 3cd335ffd7d018c5d3c5bdb21683d0097e936295..3d95de144f9df45a1251574f00a9ebabdbcb53bd 100644 (file)
@@ -1841,9 +1841,9 @@ compute_bb_predicates (struct cgraph_node *node,
     }
 
   /* Entry block is always executable.  */
-  ENTRY_BLOCK_PTR_FOR_FUNCTION (my_function)->aux
+  ENTRY_BLOCK_PTR_FOR_FN (my_function)->aux
     = pool_alloc (edge_predicate_pool);
-  *(struct predicate *) ENTRY_BLOCK_PTR_FOR_FUNCTION (my_function)->aux
+  *(struct predicate *) ENTRY_BLOCK_PTR_FOR_FN (my_function)->aux
     = true_predicate ();
 
   /* A simple dataflow propagation of predicates forward in the CFG.
@@ -2066,7 +2066,7 @@ record_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
     return false;
   bitmap_set_bit (info->bb_set,
                  SSA_NAME_IS_DEFAULT_DEF (vdef)
-                 ? ENTRY_BLOCK_PTR->index
+                 ? ENTRY_BLOCK_PTR_FOR_FN (cfun)->index
                  : gimple_bb (SSA_NAME_DEF_STMT (vdef))->index);
   return false;
 }
@@ -2102,7 +2102,7 @@ param_change_prob (gimple stmt, int i)
        return REG_BR_PROB_BASE;
 
       if (SSA_NAME_IS_DEFAULT_DEF (op))
-       init_freq = ENTRY_BLOCK_PTR->frequency;
+       init_freq = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
       else
        init_freq = gimple_bb (SSA_NAME_DEF_STMT (op))->frequency;
 
@@ -2142,8 +2142,8 @@ param_change_prob (gimple stmt, int i)
       /* Assume that every memory is initialized at entry.
          TODO: Can we easilly determine if value is always defined
          and thus we may skip entry block?  */
-      if (ENTRY_BLOCK_PTR->frequency)
-       max = ENTRY_BLOCK_PTR->frequency;
+      if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency)
+       max = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
       else
        max = 1;
 
index 9e5b1ab921ed7d9489349d58730ea41e1d6e15f9..ed96c3c21ffcc325f5b6c79ee7afd6eb874b13b1 100644 (file)
@@ -1587,7 +1587,7 @@ local_pure_const (void)
 
   /* Do NORETURN discovery.  */
   if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
-      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
+      && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 0)
     {
       warn_function_noreturn (cfun->decl);
       if (dump_file)
@@ -1723,7 +1723,7 @@ static unsigned int
 execute_warn_function_noreturn (void)
 {
   if (!TREE_THIS_VOLATILE (current_function_decl)
-      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
+      && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 0)
     warn_function_noreturn (current_function_decl);
   return 0;
 }
index 59d174220c3ef38c3975793d17d9e3c5a8fbf6ad..d7d6b8fd70f9db1db40bc4479a237ceba0995dac 100644 (file)
@@ -210,7 +210,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
   bool ok = true;
 
   FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
-    if (e->src != ENTRY_BLOCK_PTR
+    if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
        && !bitmap_bit_p (current->split_bbs, e->src->index))
       {
         worklist.safe_push (e->src);
@@ -223,7 +223,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
       basic_block bb = worklist.pop ();
 
       FOR_EACH_EDGE (e, ei, bb->preds)
-       if (e->src != ENTRY_BLOCK_PTR
+       if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
            && bitmap_set_bit (seen, e->src->index))
          {
            gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
@@ -396,7 +396,7 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
 
   /* Do not split when we would end up calling function anyway.  */
   if (incoming_freq
-      >= (ENTRY_BLOCK_PTR->frequency
+      >= (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
          * PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY) / 100))
     {
       /* When profile is guessed, we can not expect it to give us
@@ -406,13 +406,13 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
         is likely noticeable win.  */
       if (back_edge
          && profile_status != PROFILE_READ
-         && incoming_freq < ENTRY_BLOCK_PTR->frequency)
+         && incoming_freq < ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency)
        {
          if (dump_file && (dump_flags & TDF_DETAILS))
            fprintf (dump_file,
                     "  Split before loop, accepting despite low frequencies %i %i.\n",
                     incoming_freq,
-                    ENTRY_BLOCK_PTR->frequency);
+                    ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency);
        }
       else
        {
@@ -583,7 +583,7 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
 
   /* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
      for the return value.  If there are other PHIs, give up.  */
-  if (return_bb != EXIT_BLOCK_PTR)
+  if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       gimple_stmt_iterator psi;
 
@@ -650,15 +650,15 @@ static basic_block
 find_return_bb (void)
 {
   edge e;
-  basic_block return_bb = EXIT_BLOCK_PTR;
+  basic_block return_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
   gimple_stmt_iterator bsi;
   bool found_return = false;
   tree retval = NULL_TREE;
 
-  if (!single_pred_p (EXIT_BLOCK_PTR))
+  if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun)))
     return return_bb;
 
-  e = single_pred_edge (EXIT_BLOCK_PTR);
+  e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
   for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
     {
       gimple stmt = gsi_stmt (bsi);
@@ -937,7 +937,7 @@ find_split_points (int overall_time, int overall_size)
   current.split_size = 0;
   current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
 
-  first.bb = ENTRY_BLOCK_PTR;
+  first.bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
   first.edge_num = 0;
   first.overall_time = 0;
   first.overall_size = 0;
@@ -946,7 +946,7 @@ find_split_points (int overall_time, int overall_size)
   first.used_ssa_names = 0;
   first.bbs_visited = 0;
   stack.safe_push (first);
-  ENTRY_BLOCK_PTR->aux = (void *)(intptr_t)-1;
+  ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(intptr_t)-1;
 
   while (!stack.is_empty ())
     {
@@ -957,7 +957,7 @@ find_split_points (int overall_time, int overall_size)
          articulation, we want to have processed everything reachable
         from articulation but nothing that reaches into it.  */
       if (entry->edge_num == EDGE_COUNT (entry->bb->succs)
-         && entry->bb != ENTRY_BLOCK_PTR)
+         && entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
        {
          int pos = stack.length ();
          entry->can_split &= visit_bb (entry->bb, return_bb,
@@ -1009,7 +1009,7 @@ find_split_points (int overall_time, int overall_size)
          entry->edge_num++;
 
          /* New BB to visit, push it to the stack.  */
-         if (dest != return_bb && dest != EXIT_BLOCK_PTR
+         if (dest != return_bb && dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
              && !dest->aux)
            {
              stack_entry new_entry;
@@ -1037,7 +1037,7 @@ find_split_points (int overall_time, int overall_size)
        }
       /* We are done with examining the edges.  Pop off the value from stack
         and merge stuff we accumulate during the walk.  */
-      else if (entry->bb != ENTRY_BLOCK_PTR)
+      else if (entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
        {
          stack_entry *prev = &stack[stack.length () - 2];
 
@@ -1063,7 +1063,7 @@ find_split_points (int overall_time, int overall_size)
       else
         stack.pop ();
     }
-  ENTRY_BLOCK_PTR->aux = NULL;
+  ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = NULL;
   FOR_EACH_BB (bb)
     bb->aux = NULL;
   stack.release ();
@@ -1139,7 +1139,7 @@ split_function (struct split_point *split_point)
   if (!split_part_return_p)
     ;
   /* We have no return block, so nothing is needed.  */
-  else if (return_bb == EXIT_BLOCK_PTR)
+  else if (return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     ;
   /* When we do not want to return value, we need to construct
      new return block with empty return statement.
@@ -1166,7 +1166,7 @@ split_function (struct split_point *split_point)
                break;
              }
        }
-      e = make_edge (new_return_bb, EXIT_BLOCK_PTR, 0);
+      e = make_edge (new_return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
       e->probability = REG_BR_PROB_BASE;
       e->count = new_return_bb->count;
       if (current_loops)
@@ -1183,7 +1183,7 @@ split_function (struct split_point *split_point)
 
      Note this can happen whether or not we have a return value.  If we have
      a return value, then RETURN_BB may have PHIs for real operands too.  */
-  if (return_bb != EXIT_BLOCK_PTR)
+  if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       bool phi_p = false;
       for (gsi = gsi_start_phis (return_bb); !gsi_end_p (gsi);)
@@ -1325,7 +1325,7 @@ split_function (struct split_point *split_point)
       push_cfun (DECL_STRUCT_FUNCTION (node->decl));
       var = BLOCK_VARS (DECL_INITIAL (node->decl));
       i = vec_safe_length (*debug_args);
-      cgsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
+      cgsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
       do
        {
          i -= 2;
@@ -1366,13 +1366,14 @@ split_function (struct split_point *split_point)
   else
     {
       e = make_edge (call_bb, return_bb,
-                    return_bb == EXIT_BLOCK_PTR ? 0 : EDGE_FALLTHRU);
+                    return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
+                    ? 0 : EDGE_FALLTHRU);
       e->count = call_bb->count;
       e->probability = REG_BR_PROB_BASE;
 
       /* If there is return basic block, see what value we need to store
          return value into and put call just before it.  */
-      if (return_bb != EXIT_BLOCK_PTR)
+      if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
        {
          real_retval = retval = find_retval (return_bb);
 
index ca6f64d0637e2b3e5c9363f6df57217776122f44..e249ba0dcff2696d8c29cef53db0ff94f6a1be61 100644 (file)
@@ -1745,7 +1745,7 @@ ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSE
                  ira_loop_tree_node_t pred_node;
                  basic_block pred_bb = e->src;
 
-                 if (e->src == ENTRY_BLOCK_PTR)
+                 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
                    continue;
 
                  pred_node = IRA_BB_NODE_BY_INDEX (pred_bb->index);
index 6c52a2b72459ca6ab386d2f4301d4da9a4bf7d12..30282aad974eac4637f2d5176b2c6d8cfd05c489 100644 (file)
@@ -3100,7 +3100,7 @@ print_loop_title (ira_loop_tree_node_t loop_tree_node)
       {
        fprintf (ira_dump_file, " %d", subloop_node->bb->index);
        FOR_EACH_EDGE (e, ei, subloop_node->bb->succs)
-         if (e->dest != EXIT_BLOCK_PTR
+         if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
              && ((dest_loop_node = IRA_BB_NODE (e->dest)->parent)
                  != loop_tree_node))
            fprintf (ira_dump_file, "(->%d:l%d)",
index cdd694176aa41c4d6b19411d3439c394a70f0397..198fa47b702434b2597babbd10fc2fa4c13e9ff3 100644 (file)
@@ -403,7 +403,7 @@ entered_from_non_parent_p (ira_loop_tree_node_t loop_node)
     if (bb_node->bb != NULL)
       {
        FOR_EACH_EDGE (e, ei, bb_node->bb->preds)
-         if (e->src != ENTRY_BLOCK_PTR
+         if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
              && (src_loop_node = IRA_BB_NODE (e->src)->parent) != loop_node)
            {
              for (parent = src_loop_node->parent;
@@ -1263,7 +1263,7 @@ ira_emit (bool loops_p)
       at_bb_start[bb->index] = NULL;
       at_bb_end[bb->index] = NULL;
       FOR_EACH_EDGE (e, ei, bb->succs)
-       if (e->dest != EXIT_BLOCK_PTR)
+       if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
          generate_edge_moves (e);
     }
   allocno_last_set
index b9b21ba27af32d9fce8b0e0b42917abc50c8a81e..b46e7b0027419fc6bc5ba3695ff58d1c541053ea 100644 (file)
@@ -43,8 +43,9 @@ along with GCC; see the file COPYING3.  If not see
    executed, frequency is always equivalent.  Otherwise rescale the
    edge frequency.  */
 #define REG_FREQ_FROM_EDGE_FREQ(freq)                                     \
-  (optimize_size || (flag_branch_probabilities && !ENTRY_BLOCK_PTR->count) \
-   ? REG_FREQ_MAX : (freq * REG_FREQ_MAX / BB_FREQ_MAX)                           \
+  (optimize_size || (flag_branch_probabilities                            \
+                    && !ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)             \
+   ? REG_FREQ_MAX : (freq * REG_FREQ_MAX / BB_FREQ_MAX)                   \
    ? (freq * REG_FREQ_MAX / BB_FREQ_MAX) : 1)
 
 /* A modified value of flag `-fira-verbose' used internally.  */
index a813b02bef9c3a24fe9a9c01c841bf5688fff17d..f5a5af88838f4762597836bcfede0e4c6cd8ca61 100644 (file)
--- a/gcc/ira.c
+++ b/gcc/ira.c
@@ -4865,7 +4865,7 @@ static bool
 split_live_ranges_for_shrink_wrap (void)
 {
   basic_block bb, call_dom = NULL;
-  basic_block first = single_succ (ENTRY_BLOCK_PTR);
+  basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
   rtx insn, last_interesting_insn = NULL;
   bitmap_head need_new, reachable;
   vec<basic_block> queue;
@@ -4910,7 +4910,7 @@ split_live_ranges_for_shrink_wrap (void)
 
       bb = queue.pop ();
       FOR_EACH_EDGE (e, ei, bb->succs)
-       if (e->dest != EXIT_BLOCK_PTR
+       if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
            && bitmap_set_bit (&reachable, e->dest->index))
          queue.quick_push (e->dest);
     }
index 6266d48500b3c27bf9ce98767b013f859050043f..aa63c7272f0443760e7a2d1cbb3656e315bacd80 100644 (file)
--- a/gcc/lcm.c
+++ b/gcc/lcm.c
@@ -121,8 +121,8 @@ compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
 
   /* Mark blocks which are predecessors of the exit block so that we
      can easily identify them below.  */
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
-    e->src->aux = EXIT_BLOCK_PTR;
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
+    e->src->aux = EXIT_BLOCK_PTR_FOR_FN (cfun);
 
   /* Iterate until the worklist is empty.  */
   while (qlen)
@@ -134,7 +134,7 @@ compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
       if (qout >= qend)
        qout = worklist;
 
-      if (bb->aux == EXIT_BLOCK_PTR)
+      if (bb->aux == EXIT_BLOCK_PTR_FOR_FN (cfun))
        /* Do not clear the aux field for blocks which are predecessors of
           the EXIT block.  That way we never add then to the worklist
           again.  */
@@ -153,7 +153,7 @@ compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
           to add the predecessors of this block to the worklist
           if they are not already on the worklist.  */
        FOR_EACH_EDGE (e, ei, bb->preds)
-         if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
+         if (!e->src->aux && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
            {
              *qin++ = e->src;
              e->src->aux = e;
@@ -188,11 +188,11 @@ compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
     {
       pred = INDEX_EDGE_PRED_BB (edge_list, x);
       succ = INDEX_EDGE_SUCC_BB (edge_list, x);
-      if (pred == ENTRY_BLOCK_PTR)
+      if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        bitmap_copy (earliest[x], antin[succ->index]);
       else
        {
-         if (succ == EXIT_BLOCK_PTR)
+         if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
            bitmap_clear (earliest[x]);
          else
            {
@@ -276,7 +276,7 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
      do not want to be overly optimistic.  Consider an outgoing edge from
      the entry block.  That edge should always have a LATER value the
      same as EARLIEST for that edge.  */
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
     bitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
 
   /* Add all the blocks to the worklist.  This prevents an early exit from
@@ -317,7 +317,7 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
                                      antloc[e->src->index])
            /* If LATER for an outgoing edge was changed, then we need
               to add the target of the outgoing edge to the worklist.  */
-           && e->dest != EXIT_BLOCK_PTR && e->dest->aux == 0)
+           && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest->aux == 0)
          {
            *qin++ = e->dest;
            e->dest->aux = e;
@@ -331,7 +331,7 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
      for the EXIT block.  We allocated an extra entry in the LATERIN array
      for just this purpose.  */
   bitmap_ones (laterin[last_basic_block]);
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     bitmap_and (laterin[last_basic_block],
                     laterin[last_basic_block],
                     later[(size_t) e->aux]);
@@ -358,7 +358,7 @@ compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
     {
       basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
 
-      if (b == EXIT_BLOCK_PTR)
+      if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
        bitmap_and_compl (insert[x], later[x], laterin[last_basic_block]);
       else
        bitmap_and_compl (insert[x], later[x], laterin[b->index]);
@@ -500,8 +500,8 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
 
   /* Mark blocks which are successors of the entry block so that we
      can easily identify them below.  */
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
-    e->dest->aux = ENTRY_BLOCK_PTR;
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
+    e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun);
 
   /* Iterate until the worklist is empty.  */
   while (qlen)
@@ -516,7 +516,7 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
       /* If one of the predecessor blocks is the ENTRY block, then the
         intersection of avouts is the null set.  We can identify such blocks
         by the special value in the AUX field in the block structure.  */
-      if (bb->aux == ENTRY_BLOCK_PTR)
+      if (bb->aux == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        /* Do not clear the aux field for blocks which are successors of the
           ENTRY block.  That way we never add then to the worklist again.  */
        bitmap_clear (avin[bb->index]);
@@ -534,7 +534,7 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
           to add the successors of this block to the worklist
           if they are not already on the worklist.  */
        FOR_EACH_EDGE (e, ei, bb->succs)
-         if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
+         if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
            {
              *qin++ = e->dest;
              e->dest->aux = e;
@@ -570,11 +570,11 @@ compute_farthest (struct edge_list *edge_list, int n_exprs,
     {
       pred = INDEX_EDGE_PRED_BB (edge_list, x);
       succ = INDEX_EDGE_SUCC_BB (edge_list, x);
-      if (succ == EXIT_BLOCK_PTR)
+      if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
        bitmap_copy (farthest[x], st_avout[pred->index]);
       else
        {
-         if (pred == ENTRY_BLOCK_PTR)
+         if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
            bitmap_clear (farthest[x]);
          else
            {
@@ -624,7 +624,7 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
      do not want to be overly optimistic.  Consider an incoming edge to
      the exit block.  That edge should always have a NEARER value the
      same as FARTHEST for that edge.  */
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     bitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
 
   /* Add all the blocks to the worklist.  This prevents an early exit
@@ -656,7 +656,7 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
                                      st_avloc[e->dest->index])
            /* If NEARER for an incoming edge was changed, then we need
               to add the source of the incoming edge to the worklist.  */
-           && e->src != ENTRY_BLOCK_PTR && e->src->aux == 0)
+           && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && e->src->aux == 0)
          {
            *tos++ = e->src;
            e->src->aux = e;
@@ -667,7 +667,7 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
      for the ENTRY block.  We allocated an extra entry in the NEAREROUT array
      for just this purpose.  */
   bitmap_ones (nearerout[last_basic_block]);
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
     bitmap_and (nearerout[last_basic_block],
                     nearerout[last_basic_block],
                     nearer[(size_t) e->aux]);
@@ -693,7 +693,7 @@ compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
   for (x = 0; x < NUM_EDGES (edge_list); x++)
     {
       basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
-      if (b == ENTRY_BLOCK_PTR)
+      if (b == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        bitmap_and_compl (insert[x], nearer[x], nearerout[last_basic_block]);
       else
        bitmap_and_compl (insert[x], nearer[x], nearerout[b->index]);
index 97aa52fc6dd69b8bf9db85b6331647c9389bdf4f..c01ee1783051357df04f09d422003a95cdb235f1 100644 (file)
@@ -1937,7 +1937,7 @@ simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
     return;
 
   e = loop_preheader_edge (loop);
-  if (e->src == ENTRY_BLOCK_PTR)
+  if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
     return;
 
   altered = ALLOC_REG_SET (&reg_obstack);
@@ -2068,7 +2068,7 @@ simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
        }
 
       if (!single_pred_p (e->src)
-         || single_pred (e->src) == ENTRY_BLOCK_PTR)
+         || single_pred (e->src) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        break;
       e = single_pred_edge (e->src);
     }
index 671ec19b6d0662cef59c338ffd4b5559331ee501..c8f1281a0ef8a6b98b09241d693aa1ec807b062b 100644 (file)
@@ -433,7 +433,7 @@ unswitch_loop (struct loop *loop, basic_block unswitch_on, rtx cond, rtx cinsn)
 
   /* Create a block with the condition.  */
   prob = true_edge->probability;
-  switch_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
+  switch_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
   seq = compare_and_jump_seq (XEXP (cond, 0), XEXP (cond, 1), GET_CODE (cond),
                              block_label (true_edge->dest),
                              prob, cinsn);
index 54ffc779f116aa3a7a82fe4604011588fbe00d6a..88fc693bf2dd1ffe6fb3a7343fea99cfac7b1547 100644 (file)
@@ -612,7 +612,7 @@ find_hard_regno_for (int regno, int *cost, int try_only_hard_regno)
                && ! df_regs_ever_live_p (hard_regno + j))
              /* It needs save restore.  */
              hard_regno_costs[hard_regno]
-               += 2 * ENTRY_BLOCK_PTR->next_bb->frequency + 1;
+               += 2 * ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->frequency + 1;
          priority = targetm.register_priority (hard_regno);
          if (best_hard_regno < 0 || hard_regno_costs[hard_regno] < best_cost
              || (hard_regno_costs[hard_regno] == best_cost
index ee82c6f496c0be229c4f3dd4bee6b02ed55017e5..94b6e2559ef4453be720b8fceb6fafc3ddf4245a 100644 (file)
@@ -5295,7 +5295,8 @@ lra_inheritance (void)
        {
          if (lra_dump_file != NULL)
            fprintf (lra_dump_file, " %d", bb->index);
-         if (bb->next_bb == EXIT_BLOCK_PTR || LABEL_P (BB_HEAD (bb->next_bb)))
+         if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
+             || LABEL_P (BB_HEAD (bb->next_bb)))
            break;
          e = find_fallthru_edge (bb->succs);
          if (! e)
index 2839c5cf9401dd32133dfc0e6a4d6a64ef863bc3..efc19f2014058c0749a47741a6845b0944f7a7a1 100644 (file)
@@ -1002,7 +1002,8 @@ lra_create_live_ranges (bool all_p)
   for (i = n_blocks_inverted - 1; i >= 0; --i)
     {
       bb = BASIC_BLOCK (post_order_rev_cfg[i]);
-      if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
+      if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb
+         == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        continue;
       process_bb_lives (bb, curr_point);
     }
index 3c8b71d949d4f834821f75be84c6900de09cdc86..0deae88e85a3dea39479cecdbe05c18a2f528833 100644 (file)
--- a/gcc/lra.c
+++ b/gcc/lra.c
@@ -2065,8 +2065,8 @@ has_nonexceptional_receiver (void)
     bb->flags &= ~BB_REACHABLE;
 
   /* Place the exit block on our worklist.  */
-  EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
-  *tos++ = EXIT_BLOCK_PTR;
+  EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
+  *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
 
   /* Iterate: find everything reachable from what we've already seen.  */
   while (tos != worklist)
index 7b9f4ca496ecac1d6e3ac880095c007f85007b07..de25925b6235d3e06034f485d82a61056c12a3c5 100644 (file)
@@ -659,7 +659,7 @@ input_cfg (struct lto_input_block *ib, struct function *fn,
       index = streamer_read_hwi (ib);
     }
 
-  p_bb = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
+  p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
   index = streamer_read_hwi (ib);
   while (index != -1)
     {
@@ -996,7 +996,7 @@ input_function (tree fn_decl, struct data_in *data_in,
      of a gimple body is used by the cgraph routines, but we should
      really use the presence of the CFG.  */
   {
-    edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
+    edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
     gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
   }
 
index 5e264fceee0c11d3de1b957cb540cc84cdc2b1ac..6f1585a2bf967f9d286781cb42a11d8b3ad775bd 100644 (file)
@@ -1594,7 +1594,7 @@ output_cfg (struct output_block *ob, struct function *fn)
 
   streamer_write_hwi (ob, -1);
 
-  bb = ENTRY_BLOCK_PTR;
+  bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
   while (bb->next_bb)
     {
       streamer_write_hwi (ob, bb->next_bb->index);
index 45adda3afc047fc79bade8970081010361a2dd42..e709f2ac2c65f8424af24dbfb91004e75868fc0e 100644 (file)
--- a/gcc/mcf.c
+++ b/gcc/mcf.c
@@ -508,7 +508,7 @@ create_fixup_graph (fixup_graph_type *fixup_graph)
 
   /* Compute constants b, k_pos, k_neg used in the cost function calculation.
      b = sqrt(avg_vertex_weight(cfg)); k_pos = b; k_neg = 50b.  */
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     total_vertex_weight += bb->count;
 
   sqrt_avg_vertex_weight = mcf_sqrt (total_vertex_weight /
@@ -523,7 +523,7 @@ create_fixup_graph (fixup_graph_type *fixup_graph)
   if (dump_file)
     fprintf (dump_file, "\nVertex transformation:\n");
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
   {
     /* v'->v'': index1->(index1+1).  */
     i = 2 * bb->index;
@@ -1125,7 +1125,8 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
   if (dump_file)
     fprintf (dump_file, "\nadjust_cfg_counts():\n");
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     {
       i = 2 * bb->index;
 
@@ -1238,8 +1239,10 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
         }
     }
 
-  ENTRY_BLOCK_PTR->count = sum_edge_counts (ENTRY_BLOCK_PTR->succs);
-  EXIT_BLOCK_PTR->count = sum_edge_counts (EXIT_BLOCK_PTR->preds);
+  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
+                    sum_edge_counts (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
+  EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
+                    sum_edge_counts (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
 
   /* Compute edge probabilities.  */
   FOR_ALL_BB (bb)
index d54f32ca07139ca78c8286f1398a81b59621852a..ed45094c3959aa6a463c34d9b4f8a6191424e1b6 100644 (file)
@@ -211,7 +211,7 @@ create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
      fallthrough edge; there can be at most one, but there could be
      none at all, e.g. when exit is called.  */
   pre_exit = 0;
-  FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     if (eg->flags & EDGE_FALLTHRU)
       {
        basic_block src_bb = eg->src;
@@ -221,7 +221,7 @@ create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
        /* If this function returns a value at the end, we have to
           insert the final mode switch before the return value copy
           to its hard register.  */
-       if (EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 1
+       if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 1
            && NONJUMP_INSN_P ((last_insn = BB_END (src_bb)))
            && GET_CODE (PATTERN (last_insn)) == USE
            && GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG)
@@ -492,7 +492,7 @@ optimize_mode_switching (void)
 #if defined (MODE_ENTRY) && defined (MODE_EXIT)
   /* Split the edge from the entry block, so that we can note that
      there NORMAL_MODE is supplied.  */
-  post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
+  post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
   pre_exit = create_pre_exit (n_entities, entity_map, num_modes);
 #endif
 
index 1f2a014127b823805a889bdeda6b7f02ade769f6..f3130449909873d4238dcc61150aaf1a3b63d4fa 100644 (file)
@@ -1308,7 +1308,7 @@ canon_loop (struct loop *loop)
 
   /* Avoid annoying special cases of edges going to exit
      block.  */
-  FOR_EACH_EDGE (e, i, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, i, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     if ((e->flags & EDGE_FALLTHRU) && (EDGE_COUNT (e->src->succs) > 1))
       split_edge (e);
 
@@ -3344,7 +3344,7 @@ rest_of_handle_sms (void)
 
   /* Finalize layout changes.  */
   FOR_EACH_BB (bb)
-    if (bb->next_bb != EXIT_BLOCK_PTR)
+    if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
       bb->aux = bb->next_bb;
   free_dominance_info (CDI_DOMINATORS);
   cfg_layout_finalize ();
index 783b422a6438efc7ff0a249b17edc7ecdd557d53..bf834bfd7b4b24e41e8499b7cdab583edc30dfc5 100644 (file)
@@ -8235,7 +8235,7 @@ build_omp_regions (void)
 {
   gcc_assert (root_omp_region == NULL);
   calculate_dominance_info (CDI_DOMINATORS);
-  build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL, false);
+  build_omp_regions_1 (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, false);
 }
 
 /* Main entry point for expanding OMP-GIMPLE into runtime calls.  */
index 941007f5220879bc6ef208e72944232821ef8aef..9ce17e50793d07289d99d6860709c6cd40c63a24 100644 (file)
@@ -1158,12 +1158,12 @@ eliminate_partially_redundant_loads (void)
 
   /* Note we start at block 1.  */
 
-  if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
+  if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return;
 
   FOR_BB_BETWEEN (bb,
-                 ENTRY_BLOCK_PTR->next_bb->next_bb,
-                 EXIT_BLOCK_PTR,
+                 ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
+                 EXIT_BLOCK_PTR_FOR_FN (cfun),
                  next_bb)
     {
       /* Don't try anything on basic blocks with strange predecessors.  */
index e22c96ce8c0f54271cea03e75106284261a43dca..919dbe90b3e712c80a14334a98cef74e596740ab 100644 (file)
@@ -129,11 +129,11 @@ maybe_hot_frequency_p (struct function *fun, int freq)
   if (profile_status_for_function (fun) == PROFILE_ABSENT)
     return true;
   if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
-      && freq < (ENTRY_BLOCK_PTR_FOR_FUNCTION (fun)->frequency * 2 / 3))
+      && freq < (ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency * 2 / 3))
     return false;
   if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0)
     return false;
-  if (freq < (ENTRY_BLOCK_PTR_FOR_FUNCTION (fun)->frequency
+  if (freq < (ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency
              / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
     return false;
   return true;
@@ -251,24 +251,27 @@ probably_never_executed (struct function *fun,
        return false;
       if (!frequency)
        return true;
-      if (!ENTRY_BLOCK_PTR->frequency)
+      if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency)
        return false;
-      if (ENTRY_BLOCK_PTR->count)
+      if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
        {
           gcov_type computed_count;
           /* Check for possibility of overflow, in which case entry bb count
              is large enough to do the division first without losing much
              precision.  */
-          if (ENTRY_BLOCK_PTR->count < REG_BR_PROB_BASE * REG_BR_PROB_BASE)
+         if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count < REG_BR_PROB_BASE *
+             REG_BR_PROB_BASE)
             {
               gcov_type scaled_count
-                  = frequency * ENTRY_BLOCK_PTR->count * unlikely_count_fraction;
-              computed_count = RDIV (scaled_count, ENTRY_BLOCK_PTR->frequency);
+                 = frequency * ENTRY_BLOCK_PTR_FOR_FN (cfun)->count *
+            unlikely_count_fraction;
+             computed_count = RDIV (scaled_count,
+                                    ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency);
             }
           else
             {
-              computed_count = RDIV (ENTRY_BLOCK_PTR->count,
-                                     ENTRY_BLOCK_PTR->frequency);
+             computed_count = RDIV (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count,
+                                    ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency);
               computed_count *= frequency * unlikely_count_fraction;
             }
           if (computed_count >= profile_info->runs)
@@ -613,7 +616,8 @@ void
 gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
 {
   gcc_assert (profile_status != PROFILE_GUESSED);
-  if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
+  if ((e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && EDGE_COUNT (e->src->succs) >
+       1)
       && flag_guess_branch_prob && optimize)
     {
       struct edge_prediction *i = XNEW (struct edge_prediction);
@@ -2170,7 +2174,7 @@ apply_return_prediction (void)
   enum prediction direction;
   edge_iterator ei;
 
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     {
       return_stmt = last_stmt (e->src);
       if (return_stmt
@@ -2218,7 +2222,7 @@ tree_bb_level_predictions (void)
   edge e;
   edge_iterator ei;
 
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     if (!(e->flags & (EDGE_ABNORMAL | EDGE_FAKE | EDGE_EH)))
       {
         has_return_edges = true;
@@ -2286,7 +2290,7 @@ tree_estimate_probability_bb (basic_block bb)
   FOR_EACH_EDGE (e, ei, bb->succs)
     {
       /* Predict edges to user labels with attributes.  */
-      if (e->dest != EXIT_BLOCK_PTR)
+      if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
        {
          gimple_stmt_iterator gi;
          for (gi = gsi_start_bb (e->dest); !gsi_end_p (gi); gsi_next (&gi))
@@ -2324,9 +2328,9 @@ tree_estimate_probability_bb (basic_block bb)
         return_block:
         return_stmt.  */
       if (e->dest != bb->next_bb
-         && e->dest != EXIT_BLOCK_PTR
+         && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
          && single_succ_p (e->dest)
-         && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
+         && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
          && (last = last_stmt (e->dest)) != NULL
          && gimple_code (last) == GIMPLE_RETURN)
        {
@@ -2350,7 +2354,7 @@ tree_estimate_probability_bb (basic_block bb)
 
       /* Look for block we are guarding (ie we dominate it,
         but it doesn't postdominate us).  */
-      if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
+      if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest != bb
          && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
          && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
        {
@@ -2612,7 +2616,7 @@ propagate_freq (basic_block head, bitmap tovisit)
        }
       BLOCK_INFO (bb)->npredecessors = count;
       /* When function never returns, we will never process exit block.  */
-      if (!count && bb == EXIT_BLOCK_PTR)
+      if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
        bb->count = bb->frequency = 0;
     }
 
@@ -2762,7 +2766,7 @@ estimate_loops (void)
     {
       bitmap_set_bit (tovisit, bb->index);
     }
-  propagate_freq (ENTRY_BLOCK_PTR, tovisit);
+  propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit);
   BITMAP_FREE (tovisit);
 }
 
@@ -2892,14 +2896,14 @@ counts_to_freqs (void)
   /* Don't overwrite the estimated frequencies when the profile for
      the function is missing.  We may drop this function PROFILE_GUESSED
      later in drop_profile ().  */
-  if (!ENTRY_BLOCK_PTR->count)
+  if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
     return 0;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     true_count_max = MAX (bb->count, true_count_max);
 
   count_max = MAX (true_count_max, 1);
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
 
   return true_count_max;
@@ -2924,11 +2928,11 @@ expensive_function_p (int threshold)
   /* Frequencies are out of range.  This either means that function contains
      internal loop executing more than BB_FREQ_MAX times or profile feedback
      is available and function has not been executed at all.  */
-  if (ENTRY_BLOCK_PTR->frequency == 0)
+  if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency == 0)
     return true;
 
   /* Maximally BB_FREQ_MAX^2 so overflow won't happen.  */
-  limit = ENTRY_BLOCK_PTR->frequency * threshold;
+  limit = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency * threshold;
   FOR_EACH_BB (bb)
     {
       rtx insn;
@@ -2973,12 +2977,13 @@ estimate_bb_frequencies (bool force)
 
       mark_dfs_back_edges ();
 
-      single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
+      single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability =
+        REG_BR_PROB_BASE;
 
       /* Set up block info for each basic block.  */
       alloc_aux_for_blocks (sizeof (struct block_info_def));
       alloc_aux_for_edges (sizeof (struct edge_info_def));
-      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
        {
          edge e;
          edge_iterator ei;
@@ -3002,7 +3007,7 @@ estimate_bb_frequencies (bool force)
          memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
 
       sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
-      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
        {
          sreal tmp;
 
@@ -3186,7 +3191,7 @@ rebuild_frequencies (void)
      max counts.  */
   gcov_type count_max = 0;
   basic_block bb;
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     count_max = MAX (bb->count, count_max);
 
   if (profile_status == PROFILE_GUESSED
index 1f1c2652b55776f67522ffc44982e7628bbab022..85671b30bc47f7e109cbd2ba89cd9e51b9b40607 100644 (file)
@@ -117,7 +117,7 @@ instrument_edges (struct edge_list *el)
   int num_edges = NUM_EDGES (el);
   basic_block bb;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     {
       edge e;
       edge_iterator ei;
@@ -192,7 +192,8 @@ instrument_values (histogram_values values)
 
   case HIST_TYPE_TIME_PROFILE:
     {
-      basic_block bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
+      basic_block bb =
+     split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
       gimple_stmt_iterator gsi = gsi_start_bb (bb);
 
       gimple_gen_time_profiler (t, 0, gsi);
@@ -272,7 +273,7 @@ get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
   gcov_type *counts;
 
   /* Count the edges to be (possibly) instrumented.  */
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     {
       edge e;
       edge_iterator ei;
@@ -332,7 +333,7 @@ correct_negative_edge_counts (void)
   edge e;
   edge_iterator ei;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     {
       FOR_EACH_EDGE (e, ei, bb->succs)
         {
@@ -383,7 +384,8 @@ is_inconsistent (void)
          inconsistent = true;
        }
       if (bb->count != sum_edge_counts (bb->succs) &&
-          ! (find_edge (bb, EXIT_BLOCK_PTR) != NULL && block_ends_with_call_p (bb)))
+         ! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
+            && block_ends_with_call_p (bb)))
        {
          if (dump_file)
            {
@@ -408,7 +410,7 @@ static void
 set_bb_counts (void)
 {
   basic_block bb;
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     {
       bb->count = sum_edge_counts (bb->succs);
       gcc_assert (bb->count >= 0);
@@ -427,7 +429,7 @@ read_profile_edge_counts (gcov_type *exec_counts)
   /* The first count in the .da file is the number of times that the function
      was entered.  This is the exec_count for block zero.  */
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     {
       edge e;
       edge_iterator ei;
@@ -491,7 +493,7 @@ compute_frequency_overlap (void)
   int overlap = 0;
   basic_block bb;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     {
       count_total += bb->count;
       freq_total += bb->frequency;
@@ -500,7 +502,7 @@ compute_frequency_overlap (void)
   if (count_total == 0 || freq_total == 0)
     return 0;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     overlap += MIN (bb->count * OVERLAP_BASE / count_total,
                    bb->frequency * OVERLAP_BASE / freq_total);
 
@@ -537,7 +539,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
 
   /* Attach extra info block to each bb.  */
   alloc_aux_for_blocks (sizeof (struct bb_info));
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     {
       edge e;
       edge_iterator ei;
@@ -551,8 +553,8 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
     }
 
   /* Avoid predicting entry on exit nodes.  */
-  BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
-  BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
+  BB_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->succ_count = 2;
+  BB_INFO (ENTRY_BLOCK_PTR_FOR_FN (cfun))->pred_count = 2;
 
   num_edges = read_profile_edge_counts (exec_counts);
 
@@ -582,7 +584,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
     {
       passes++;
       changes = 0;
-      FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
+      FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
        {
          struct bb_info *bi = BB_INFO (bb);
          if (! bi->count_valid)
@@ -724,7 +726,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
     hist_br_prob[i] = 0;
   num_branches = 0;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     {
       edge e;
       edge_iterator ei;
@@ -743,9 +745,9 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
             already present.  We get negative frequency from the entry
             point.  */
          if ((e->count < 0
-              && e->dest == EXIT_BLOCK_PTR)
+              && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
              || (e->count > bb->count
-                 && e->dest != EXIT_BLOCK_PTR))
+                 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
            {
              if (block_ends_with_call_p (bb))
                e->count = e->count < 0 ? 0 : bb->count;
@@ -1064,17 +1066,17 @@ branch_prob (void)
              ne->goto_locus = e->goto_locus;
            }
          if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
-              && e->dest != EXIT_BLOCK_PTR)
+              && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
            need_exit_edge = 1;
-         if (e->dest == EXIT_BLOCK_PTR)
+         if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
            have_exit_edge = 1;
        }
       FOR_EACH_EDGE (e, ei, bb->preds)
        {
          if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
-              && e->src != ENTRY_BLOCK_PTR)
+              && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
            need_entry_edge = 1;
-         if (e->src == ENTRY_BLOCK_PTR)
+         if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
            have_entry_edge = 1;
        }
 
@@ -1083,14 +1085,14 @@ branch_prob (void)
          if (dump_file)
            fprintf (dump_file, "Adding fake exit edge to bb %i\n",
                     bb->index);
-         make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
+         make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
        }
       if (need_entry_edge && !have_entry_edge)
        {
          if (dump_file)
            fprintf (dump_file, "Adding fake entry edge to bb %i\n",
                     bb->index);
-         make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
+         make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
          /* Avoid bbs that have both fake entry edge and also some
             exit edge.  One of those edges wouldn't be added to the
             spanning tree, but we can't instrument any of them.  */
@@ -1146,7 +1148,8 @@ branch_prob (void)
 
       /* Mark edges we've replaced by fake edges above as ignored.  */
       if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
-         && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
+         && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+         && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
        {
          EDGE_INFO (e)->ignore = 1;
          ignored_edges++;
@@ -1213,7 +1216,8 @@ branch_prob (void)
       gcov_write_length (offset);
 
       /* Arcs */
-      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                     EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
        {
          edge e;
          edge_iterator ei;
@@ -1257,7 +1261,7 @@ branch_prob (void)
          gimple_stmt_iterator gsi;
          gcov_position_t offset = 0;
 
-         if (bb == ENTRY_BLOCK_PTR->next_bb)
+         if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
            {
              expanded_location curr_location =
                expand_location (DECL_SOURCE_LOCATION (current_function_decl));
@@ -1381,11 +1385,11 @@ find_spanning_tree (struct edge_list *el)
   basic_block bb;
 
   /* We use aux field for standard union-find algorithm.  */
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
     bb->aux = bb;
 
   /* Add fake edge exit to entry we can't instrument.  */
-  union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
+  union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
   /* First add all abnormal edges to the tree unless they form a cycle. Also
      add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
@@ -1394,7 +1398,7 @@ find_spanning_tree (struct edge_list *el)
     {
       edge e = INDEX_EDGE (el, i);
       if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
-          || e->dest == EXIT_BLOCK_PTR)
+          || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
          && !EDGE_INFO (e)->ignore
          && (find_group (e->src) != find_group (e->dest)))
        {
index 756d3bd586f7ef57da373520ff4cdddfc72f290a..6aad46684d665a4b3120f5d1b55a809c5353a46c 100644 (file)
@@ -2649,7 +2649,7 @@ convert_regs_entry (void)
      Note that we are inserting converted code here.  This code is
      never seen by the convert_regs pass.  */
 
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
     {
       basic_block block = e->dest;
       block_info bi = BLOCK_INFO (block);
@@ -2693,7 +2693,7 @@ convert_regs_exit (void)
       value_reg_high = END_HARD_REGNO (retvalue) - 1;
     }
 
-  output_stack = &BLOCK_INFO (EXIT_BLOCK_PTR)->stack_in;
+  output_stack = &BLOCK_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->stack_in;
   if (value_reg_low == -1)
     output_stack->top = -1;
   else
@@ -2847,7 +2847,7 @@ compensate_edges (void)
   starting_stack_p = false;
 
   FOR_EACH_BB (bb)
-    if (bb != ENTRY_BLOCK_PTR)
+    if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
       {
         edge e;
         edge_iterator ei;
@@ -3141,14 +3141,14 @@ convert_regs (void)
 
   /* Construct the desired stack for function exit.  */
   convert_regs_exit ();
-  BLOCK_INFO (EXIT_BLOCK_PTR)->done = 1;
+  BLOCK_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->done = 1;
 
   /* ??? Future: process inner loops first, and give them arbitrary
      initial stacks which emit_swap_insn can modify.  This ought to
      prevent double fxch that often appears at the head of a loop.  */
 
   /* Process all blocks reachable from all entry points.  */
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
     cfg_altered |= convert_regs_2 (e->dest);
 
   /* ??? Process all unreachable blocks.  Though there's no excuse
@@ -3221,7 +3221,7 @@ reg_to_stack (void)
 
       FOR_EACH_EDGE (e, ei, bb->preds)
        if (!(e->flags & EDGE_DFS_BACK)
-           && e->src != ENTRY_BLOCK_PTR)
+           && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
          bi->predecessors++;
 
       /* Set current register status at last instruction `uninitialized'.  */
index b5fa3f3995f3ff8dc8d288c33edfef3cabec7314..9bf426cd175da5dce38b25588924ede9a174807b 100644 (file)
@@ -137,7 +137,7 @@ extern size_t reg_info_p_size;
    frequency.  */
 #define REG_FREQ_FROM_BB(bb) (optimize_size                                  \
                              || (flag_branch_probabilities                   \
-                                 && !ENTRY_BLOCK_PTR->count)                 \
+                                 && !ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)   \
                              ? REG_FREQ_MAX                                  \
                              : ((bb)->frequency * REG_FREQ_MAX / BB_FREQ_MAX)\
                              ? ((bb)->frequency * REG_FREQ_MAX / BB_FREQ_MAX)\
index b69660d16af8bd8231ad8801d30b1355460d2479..96619f6782096e476bfad5760e351c2288e28df7 100644 (file)
@@ -1615,7 +1615,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
            && reg_mentioned_p (XEXP (note, 0), in)
            /* Check that a former pseudo is valid; see find_dummy_reload.  */
            && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
-               || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
+               || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
                                    ORIGINAL_REGNO (XEXP (note, 0)))
                    && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
            && ! refers_to_regno_for_reload_p (regno,
@@ -1939,7 +1939,7 @@ combine_reloads (void)
        && !fixed_regs[regno]
        /* Check that a former pseudo is valid; see find_dummy_reload.  */
        && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
-           || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
+           || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
                               ORIGINAL_REGNO (XEXP (note, 0)))
                && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
       {
@@ -2098,7 +2098,7 @@ find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
             can ignore the conflict).  We must never introduce writes
             to such hardregs, as they would clobber the other live
             pseudo.  See PR 20973.  */
-          || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
+         || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
                             ORIGINAL_REGNO (in))
              /* Similarly, only do this if we can be sure that the death
                 note is still valid.  global can assign some hardreg to
index 66b5ff16b22cf3dc79cad7d9dce70c0a6788c578..6864ec1667fac8d1ad47689ec18c7e13a6ff27e6 100644 (file)
@@ -617,8 +617,8 @@ has_nonexceptional_receiver (void)
     bb->flags &= ~BB_REACHABLE;
 
   /* Place the exit block on our worklist.  */
-  EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
-  *tos++ = EXIT_BLOCK_PTR;
+  EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
+  *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
 
   /* Iterate: find everything reachable from what we've already seen.  */
   while (tos != worklist)
index 367181289df59ce48ce0a21eb2fe3fd6e0e5cc1d..4609c3ad963742c9838568d316325102ab982fd0 100644 (file)
@@ -147,7 +147,7 @@ find_basic_block (rtx insn, int search_limit)
 
   /* The start of the function.  */
   else if (insn == 0)
-    return ENTRY_BLOCK_PTR->next_bb->index;
+    return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
 
   /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
      anything other than a CODE_LABEL or note, we can't find this code.  */
@@ -966,7 +966,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
 
       /* Get starting and ending insn, handling the case where each might
         be a SEQUENCE.  */
-      start_insn = (b == ENTRY_BLOCK_PTR->next_bb->index ?
+      start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
                    insns : BB_HEAD (BASIC_BLOCK (b)));
       stop_insn = target;
 
index 8d23e33f89e4a4323b0dade90c8d42bd6bb49e5e..955501a9547ba26ae5325c665bca17a9f855feb9 100644 (file)
@@ -648,7 +648,7 @@ schedule_ebbs (void)
        {
          edge e;
          tail = BB_END (bb);
-         if (bb->next_bb == EXIT_BLOCK_PTR
+         if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
              || LABEL_P (BB_HEAD (bb->next_bb)))
            break;
          e = find_fallthru_edge (bb->succs);
@@ -683,7 +683,7 @@ ebb_add_block (basic_block bb, basic_block after)
   /* Recovery blocks are always bounded by BARRIERS,
      therefore, they always form single block EBB,
      therefore, we can use rec->index to identify such EBBs.  */
-  if (after == EXIT_BLOCK_PTR)
+  if (after == EXIT_BLOCK_PTR_FOR_FN (cfun))
     bitmap_set_bit (&dont_calc_deps, bb->index);
   else if (after == last_bb)
     last_bb = bb;
index 33112eef0759ed57a3b68c0415e5f8433e9e68e0..070404c42450d2d8b91503a83be4794af1c9d58a 100644 (file)
@@ -945,14 +945,15 @@ extern vec<haifa_deps_insn_data_def> h_d_i_d;
 /* INSN is a speculation check that will simply reexecute the speculatively
    scheduled instruction if the speculation fails.  */
 #define IS_SPECULATION_SIMPLE_CHECK_P(INSN) \
-  (RECOVERY_BLOCK (INSN) == EXIT_BLOCK_PTR)
+  (RECOVERY_BLOCK (INSN) == EXIT_BLOCK_PTR_FOR_FN (cfun))
 
 /* INSN is a speculation check that will branch to RECOVERY_BLOCK if the
    speculation fails.  Insns in that block will reexecute the speculatively
    scheduled code and then will return immediately after INSN thus preserving
    semantics of the program.  */
 #define IS_SPECULATION_BRANCHY_CHECK_P(INSN) \
-  (RECOVERY_BLOCK (INSN) != NULL && RECOVERY_BLOCK (INSN) != EXIT_BLOCK_PTR)
+  (RECOVERY_BLOCK (INSN) != NULL             \
+   && RECOVERY_BLOCK (INSN) != EXIT_BLOCK_PTR_FOR_FN (cfun))
 
 \f
 /* Dep status (aka ds_t) of the link encapsulates all information for a given
index 87042dd7c1c25112c64ee50a12551c7d895af93f..1663e2fd95dae337d6861e2e13562a39760cd022 100644 (file)
@@ -495,7 +495,7 @@ find_single_block_region (bool ebbs_p)
             BLOCK_TO_BB (bb->index) = i - RGN_BLOCKS (nr_regions);
             i++;
 
-            if (bb->next_bb == EXIT_BLOCK_PTR
+           if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
                 || LABEL_P (BB_HEAD (bb->next_bb)))
               break;
 
@@ -665,7 +665,7 @@ haifa_find_rgns (void)
 
   /* DFS traversal to find inner loops in the cfg.  */
 
-  current_edge = ei_start (single_succ (ENTRY_BLOCK_PTR)->succs);
+  current_edge = ei_start (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->succs);
   sp = -1;
 
   while (1)
@@ -840,7 +840,7 @@ haifa_find_rgns (void)
              /* If we exited the loop early, then I is the header of
                 a non-reducible loop and we should quit processing it
                 now.  */
-             if (jbb != EXIT_BLOCK_PTR)
+             if (jbb != EXIT_BLOCK_PTR_FOR_FN (cfun))
                continue;
 
              /* I is a header of an inner loop, or block 0 in a subroutine
@@ -858,7 +858,7 @@ haifa_find_rgns (void)
              /* Decrease degree of all I's successors for topological
                 ordering.  */
              FOR_EACH_EDGE (e, ei, bb->succs)
-               if (e->dest != EXIT_BLOCK_PTR)
+               if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
                  --degree[e->dest->index];
 
              /* Estimate # insns, and count # blocks in the region.  */
@@ -875,7 +875,7 @@ haifa_find_rgns (void)
                    /* Leaf nodes have only a single successor which must
                       be EXIT_BLOCK.  */
                    if (single_succ_p (jbb)
-                       && single_succ (jbb) == EXIT_BLOCK_PTR)
+                       && single_succ (jbb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
                      {
                        queue[++tail] = jbb->index;
                        bitmap_set_bit (in_queue, jbb->index);
@@ -893,7 +893,7 @@ haifa_find_rgns (void)
 
                  FOR_EACH_EDGE (e, ei, bb->preds)
                    {
-                     if (e->src == ENTRY_BLOCK_PTR)
+                     if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
                        continue;
 
                      node = e->src->index;
@@ -954,7 +954,7 @@ haifa_find_rgns (void)
 
                      /* See discussion above about nodes not marked as in
                         this loop during the initial DFS traversal.  */
-                     if (e->src == ENTRY_BLOCK_PTR
+                     if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
                          || max_hdr[node] != loop_head)
                        {
                          tail = -1;
@@ -1006,7 +1006,7 @@ haifa_find_rgns (void)
                          queue[head] = queue[tail--];
 
                          FOR_EACH_EDGE (e, ei, BASIC_BLOCK (child)->succs)
-                           if (e->dest != EXIT_BLOCK_PTR)
+                           if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
                              --degree[e->dest->index];
                        }
                      else
@@ -1026,7 +1026,7 @@ haifa_find_rgns (void)
                     This may provide several smaller regions instead
                     of one too_large region.  */
                   FOR_EACH_EDGE (e, ei, bb->succs)
-                    if (e->dest != EXIT_BLOCK_PTR)
+                   if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
                       bitmap_set_bit (extended_rgn_header, e->dest->index);
                 }
            }
@@ -1305,7 +1305,7 @@ extend_rgns (int *degree, int *idxp, sbitmap header, int *loop_hdr)
              BLOCK_TO_BB (bbn) = 0;
 
              FOR_EACH_EDGE (e, ei, BASIC_BLOCK (bbn)->succs)
-               if (e->dest != EXIT_BLOCK_PTR)
+               if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
                  degree[e->dest->index]--;
 
              if (!large)
@@ -1362,7 +1362,7 @@ extend_rgns (int *degree, int *idxp, sbitmap header, int *loop_hdr)
                      idx++;
 
                      FOR_EACH_EDGE (e, ei, BASIC_BLOCK (succn)->succs)
-                       if (e->dest != EXIT_BLOCK_PTR)
+                       if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
                          degree[e->dest->index]--;
                    }
                }
@@ -1426,7 +1426,7 @@ compute_dom_prob_ps (int bb)
       edge out_edge;
       edge_iterator out_ei;
 
-      if (in_edge->src == ENTRY_BLOCK_PTR)
+      if (in_edge->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        continue;
 
       pred_bb = BLOCK_TO_BB (in_edge->src->index);
@@ -2663,7 +2663,7 @@ propagate_deps (int bb, struct deps_desc *pred_deps)
   FOR_EACH_EDGE (e, ei, block->succs)
     {
       /* Only bbs "below" bb, in the same region, are interesting.  */
-      if (e->dest == EXIT_BLOCK_PTR
+      if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
          || CONTAINING_RGN (block->index) != CONTAINING_RGN (e->dest->index)
          || BLOCK_TO_BB (e->dest->index) <= bb)
        continue;
@@ -3454,10 +3454,11 @@ rgn_add_block (basic_block bb, basic_block after)
   extend_regions ();
   bitmap_set_bit (&not_in_df, bb->index);
 
-  if (after == 0 || after == EXIT_BLOCK_PTR)
+  if (after == 0 || after == EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       rgn_make_new_region_out_of_new_block (bb);
-      RGN_DONT_CALC_DEPS (nr_regions - 1) = (after == EXIT_BLOCK_PTR);
+      RGN_DONT_CALC_DEPS (nr_regions - 1) = (after
+                                            == EXIT_BLOCK_PTR_FOR_FN (cfun));
     }
   else
     {
index 579cf8d405609c94cdf3894d9f4cd53a63cc99cc..7dfc70327d7a53cef504c31f39b500752f258e48 100644 (file)
@@ -3682,7 +3682,7 @@ maybe_tidy_empty_bb (basic_block bb)
      successors.  Otherwise remove it.  */
   if (!sel_bb_empty_p (bb)
       || (single_succ_p (bb)
-          && single_succ (bb) == EXIT_BLOCK_PTR
+         && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
           && (!single_pred_p (bb)
               || !(single_pred_edge (bb)->flags & EDGE_FALLTHRU)))
       || EDGE_COUNT (bb->preds) == 0
@@ -3853,7 +3853,7 @@ tidy_control_flow (basic_block xbb, bool full_tidying)
       && EDGE_COUNT (xbb->succs) == 1
       && (EDGE_SUCC (xbb, 0)->flags & EDGE_FALLTHRU)
       /* When successor is an EXIT block, it may not be the next block.  */
-      && single_succ (xbb) != EXIT_BLOCK_PTR
+      && single_succ (xbb) != EXIT_BLOCK_PTR_FOR_FN (cfun)
       /* And unconditional jump in previous basic block leads to
          next basic block of XBB and this jump can be safely removed.  */
       && in_current_region_p (xbb->prev_bb)
@@ -4325,7 +4325,7 @@ init_lv_sets (void)
     init_lv_set (bb);
 
   /* Don't forget EXIT_BLOCK.  */
-  init_lv_set (EXIT_BLOCK_PTR);
+  init_lv_set (EXIT_BLOCK_PTR_FOR_FN (cfun));
 }
 
 /* Release lv set of HEAD.  */
@@ -4346,7 +4346,7 @@ free_lv_sets (void)
   basic_block bb;
 
   /* Don't forget EXIT_BLOCK.  */
-  free_lv_set (EXIT_BLOCK_PTR);
+  free_lv_set (EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   /* Free LV sets.  */
   FOR_EACH_BB (bb)
@@ -4524,7 +4524,7 @@ sel_bb_head (basic_block bb)
 {
   insn_t head;
 
-  if (bb == EXIT_BLOCK_PTR)
+  if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       gcc_assert (exit_insn != NULL_RTX);
       head = exit_insn;
@@ -4557,7 +4557,7 @@ sel_bb_end (basic_block bb)
   if (sel_bb_empty_p (bb))
     return NULL_RTX;
 
-  gcc_assert (bb != EXIT_BLOCK_PTR);
+  gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   return BB_END (bb);
 }
@@ -4852,7 +4852,7 @@ bb_ends_ebb_p (basic_block bb)
   basic_block next_bb = bb_next_bb (bb);
   edge e;
 
-  if (next_bb == EXIT_BLOCK_PTR
+  if (next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
       || bitmap_bit_p (forced_ebb_heads, next_bb->index)
       || (LABEL_P (BB_HEAD (next_bb))
          /* NB: LABEL_NUSES () is not maintained outside of jump.c.
@@ -5538,7 +5538,7 @@ sel_create_recovery_block (insn_t orig_insn)
 
   recovery_block = sched_create_recovery_block (&before_recovery);
   if (before_recovery)
-    copy_lv_set_from (before_recovery, EXIT_BLOCK_PTR);
+    copy_lv_set_from (before_recovery, EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   gcc_assert (sel_bb_empty_p (recovery_block));
   sched_create_recovery_edges (first_bb, recovery_block, second_bb);
@@ -5821,7 +5821,7 @@ setup_nop_and_exit_insns (void)
   emit_insn (nop_pattern);
   exit_insn = get_insns ();
   end_sequence ();
-  set_block_for_insn (exit_insn, EXIT_BLOCK_PTR);
+  set_block_for_insn (exit_insn, EXIT_BLOCK_PTR_FOR_FN (cfun));
 }
 
 /* Free special insns used in the scheduler.  */
@@ -6396,7 +6396,7 @@ sel_remove_loop_preheader (void)
                  If it is so - delete this jump and clear data sets of its
                  basic block if it becomes empty.  */
              if (next_bb->prev_bb == prev_bb
-                  && prev_bb != ENTRY_BLOCK_PTR
+                 && prev_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
                   && bb_has_removable_jump_to_p (prev_bb, next_bb))
                 {
                   redirect_edge_and_branch (EDGE_SUCC (prev_bb, 0), next_bb);
index 486159dd262f98213829afb479af699d6867e0e4..ff99e519cf9805d0be864d6f92be1304916c3be7 100644 (file)
@@ -1024,7 +1024,7 @@ inner_loop_header_p (basic_block bb)
   if (!current_loop_nest)
     return false;
 
-  if (bb == EXIT_BLOCK_PTR)
+  if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return false;
 
   inner_loop = bb->loop_father;
@@ -1050,7 +1050,7 @@ get_loop_exit_edges_unique_dests (const struct loop *loop)
   vec<edge> edges = vNULL;
   struct loop_exit *exit;
 
-  gcc_assert (loop->latch != EXIT_BLOCK_PTR
+  gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun)
               && current_loops->state & LOOPS_HAVE_RECORDED_EXITS);
 
   for (exit = loop->exits->next; exit->e; exit = exit->next)
@@ -1083,7 +1083,7 @@ sel_bb_empty_or_nop_p (basic_block bb)
   if (!INSN_NOP_P (first))
     return false;
 
-  if (bb == EXIT_BLOCK_PTR)
+  if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return false;
 
   last = sel_bb_end (bb);
@@ -1204,7 +1204,7 @@ _succ_iter_start (insn_t *succp, insn_t insn, int flags)
   i.current_exit = -1;
   i.loop_exits.create (0);
 
-  if (bb != EXIT_BLOCK_PTR && BB_END (bb) != insn)
+  if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun) && BB_END (bb) != insn)
     {
       i.bb_end = false;
 
@@ -1308,7 +1308,7 @@ _succ_iter_cond (succ_iterator *ip, rtx *succp, rtx insn,
        {
          basic_block bb = ip->e2->dest;
 
-         if (bb == EXIT_BLOCK_PTR || bb == after_recovery)
+         if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == after_recovery)
            *succp = exit_insn;
          else
            {
index c2d4185d6eca6e6487181e29f86001ddad15ca5c..1e3fcf0da5a85f14dcd205792e089b62ee458919 100644 (file)
@@ -4551,7 +4551,8 @@ find_block_for_bookkeeping (edge e1, edge e2, bool lax)
   edge e;
 
   /* Loop over edges from E1 to E2, inclusive.  */
-  for (e = e1; !lax || e->dest != EXIT_BLOCK_PTR; e = EDGE_SUCC (e->dest, 0))
+  for (e = e1; !lax || e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun); e =
+       EDGE_SUCC (e->dest, 0))
     {
       if (EDGE_COUNT (e->dest->preds) == 2)
        {
@@ -4642,7 +4643,7 @@ create_block_for_bookkeeping (edge e1, edge e2)
       if (DEBUG_INSN_P (insn)
          && single_succ_p (new_bb)
          && (succ = single_succ (new_bb))
-         && succ != EXIT_BLOCK_PTR
+         && succ != EXIT_BLOCK_PTR_FOR_FN (cfun)
          && DEBUG_INSN_P ((last = sel_bb_end (new_bb))))
        {
          while (insn != last && (DEBUG_INSN_P (insn) || NOTE_P (insn)))
index ffbeed2a07171eb01f67a21bbecabd52be97dad7..378d6c7e8ba7342878ac976befd235def0da1fc9 100644 (file)
@@ -805,7 +805,7 @@ insert_store (struct st_expr * expr, edge e)
 
   /* If tmp is NULL, we found an insertion on every edge, blank the
      insertion vector for these edges, and insert at the start of the BB.  */
-  if (!tmp && bb != EXIT_BLOCK_PTR)
+  if (!tmp && bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       FOR_EACH_EDGE (tmp, ei, e->dest->preds)
        {
@@ -869,7 +869,7 @@ remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
        }
       bb = act->dest;
 
-      if (bb == EXIT_BLOCK_PTR
+      if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
          || bitmap_bit_p (visited, bb->index))
        {
          if (!ei_end_p (ei))
index 24860054e6e7086c4a0794e59fa18532b3d98640..271f600323b14517abd1a75a4d4cec8a7098f4ce 100644 (file)
@@ -1950,7 +1950,7 @@ tm_region_init (struct tm_region *region)
   vec<tm_region_p> bb_regions = vNULL;
 
   all_tm_regions = region;
-  bb = single_succ (ENTRY_BLOCK_PTR);
+  bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
   /* We could store this information in bb->aux, but we may get called
      through get_all_tm_blocks() from another pass that may be already
@@ -2016,7 +2016,7 @@ gate_tm_init (void)
       struct tm_region *region = (struct tm_region *)
        obstack_alloc (&tm_obstack.obstack, sizeof (struct tm_region));
       memset (region, 0, sizeof (*region));
-      region->entry_block = single_succ (ENTRY_BLOCK_PTR);
+      region->entry_block = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
       /* For a clone, the entire function is the region.  But even if
         we don't need to record any exit blocks, we may need to
         record irrevocable blocks.  */
@@ -3633,7 +3633,8 @@ tm_memopt_compute_available (struct tm_region *region,
        /* If the out state of this block changed, then we need to add
           its successors to the worklist if they are not already in.  */
        FOR_EACH_EDGE (e, ei, bb->succs)
-         if (!AVAIL_IN_WORKLIST_P (e->dest) && e->dest != EXIT_BLOCK_PTR)
+         if (!AVAIL_IN_WORKLIST_P (e->dest)
+             && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
            {
              *qin++ = e->dest;
              AVAIL_IN_WORKLIST_P (e->dest) = true;
@@ -4539,12 +4540,14 @@ ipa_tm_scan_irr_function (struct cgraph_node *node, bool for_clone)
   if (for_clone)
     {
       old_irr = d->irrevocable_blocks_clone;
-      queue.quick_push (single_succ (ENTRY_BLOCK_PTR));
+      queue.quick_push (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
       if (ipa_tm_scan_irr_blocks (&queue, new_irr, old_irr, NULL))
        {
-         ipa_tm_propagate_irr (single_succ (ENTRY_BLOCK_PTR), new_irr,
+         ipa_tm_propagate_irr (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
+                               new_irr,
                                old_irr, NULL);
-         ret = bitmap_bit_p (new_irr, single_succ (ENTRY_BLOCK_PTR)->index);
+         ret = bitmap_bit_p (new_irr,
+                             single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->index);
        }
     }
   else
@@ -5294,7 +5297,8 @@ ipa_tm_transform_clone (struct cgraph_node *node)
   calculate_dominance_info (CDI_DOMINATORS);
 
   need_ssa_rename =
-    ipa_tm_transform_calls (d->clone, NULL, single_succ (ENTRY_BLOCK_PTR),
+    ipa_tm_transform_calls (d->clone, NULL,
+                           single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
                            d->irrevocable_blocks_clone);
 
   if (need_ssa_rename)
index d2af39e2bbe64c213794b3212e99880d2545380c..b9fb7195bd2c795d8a81b9eda878684597724a5f 100644 (file)
@@ -190,14 +190,14 @@ init_empty_tree_cfg_for_function (struct function *fn)
                         initial_cfg_capacity);
 
   SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
-                               ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
+                               ENTRY_BLOCK_PTR_FOR_FN (fn));
   SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
-                  EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
+                  EXIT_BLOCK_PTR_FOR_FN (fn));
 
-  ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
-    = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
-  EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
-    = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
+  ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
+    = EXIT_BLOCK_PTR_FOR_FN (fn);
+  EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
+    = ENTRY_BLOCK_PTR_FOR_FN (fn);
 }
 
 void
@@ -236,7 +236,7 @@ build_gimple_cfg (gimple_seq seq)
 
   /* Make sure there is always at least one block, even if it's empty.  */
   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
-    create_empty_bb (ENTRY_BLOCK_PTR);
+    create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
   /* Adjust the size of the array.  */
   if (basic_block_info->length () < (size_t) n_basic_blocks_for_fn (cfun))
@@ -518,7 +518,7 @@ make_blocks (gimple_seq seq)
   gimple stmt = NULL;
   bool start_new_block = true;
   bool first_stmt_of_seq = true;
-  basic_block bb = ENTRY_BLOCK_PTR;
+  basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
 
   while (!gsi_end_p (i))
     {
@@ -669,7 +669,8 @@ make_edges (void)
 
   /* Create an edge from entry to the first block with executable
      statements in it.  */
-  make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
+  make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), BASIC_BLOCK (NUM_FIXED_BLOCKS),
+            EDGE_FALLTHRU);
 
   /* Traverse the basic block array placing edges.  */
   FOR_EACH_BB (bb)
@@ -687,7 +688,7 @@ make_edges (void)
              fallthru = false;
              break;
            case GIMPLE_RETURN:
-             make_edge (bb, EXIT_BLOCK_PTR, 0);
+             make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
              fallthru = false;
              break;
            case GIMPLE_COND:
@@ -719,7 +720,8 @@ make_edges (void)
 
              /* BUILTIN_RETURN is really a return statement.  */
              if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
-               make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
+               make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0), fallthru =
+            false;
              /* Some calls are known not to return.  */
              else
                fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
@@ -1503,7 +1505,7 @@ gimple_can_merge_blocks_p (basic_block a, basic_block b)
   if (!single_pred_p (b))
     return false;
 
-  if (b == EXIT_BLOCK_PTR)
+  if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return false;
 
   /* If A ends by a statement causing exceptions or something similar, we
@@ -4849,19 +4851,21 @@ gimple_verify_flow_info (void)
   edge e;
   edge_iterator ei;
 
-  if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
+  if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
+      || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
     {
       error ("ENTRY_BLOCK has IL associated with it");
       err = 1;
     }
 
-  if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
+  if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
+      || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
     {
       error ("EXIT_BLOCK has IL associated with it");
       err = 1;
     }
 
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     if (e->flags & EDGE_FALLTHRU)
       {
        error ("fallthru to exit from bb %d", e->src->index);
@@ -5041,7 +5045,7 @@ gimple_verify_flow_info (void)
              error ("wrong outgoing edge flags at end of bb %d", bb->index);
              err = 1;
            }
-         if (single_succ (bb) != EXIT_BLOCK_PTR)
+         if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
            {
              error ("return edge does not point to exit in bb %d",
                     bb->index);
@@ -5281,7 +5285,7 @@ gimple_redirect_edge_and_branch (edge e, basic_block dest)
   if (e->flags & EDGE_EH)
     return redirect_eh_edge (e, dest);
 
-  if (e->src != ENTRY_BLOCK_PTR)
+  if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
     {
       ret = gimple_try_redirect_by_replacing_jump (e, dest);
       if (ret)
@@ -5564,7 +5568,7 @@ gimple_duplicate_bb (basic_block bb)
   gimple_seq phis = phi_nodes (bb);
   gimple phi, stmt, copy;
 
-  new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
+  new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
 
   /* Copy the PHI nodes.  We ignore PHI node arguments here because
      the incoming edges have not been setup yet.  */
@@ -6901,9 +6905,9 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
      FIXME, this is silly.  The CFG ought to become a parameter to
      these helpers.  */
   push_cfun (dest_cfun);
-  make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
+  make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
   if (exit_bb)
-    make_edge (exit_bb,  EXIT_BLOCK_PTR, 0);
+    make_edge (exit_bb,  EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
   pop_cfun ();
 
   /* Back in the original function, the SESE region has disappeared,
@@ -7247,7 +7251,7 @@ print_loops (FILE *file, int verbosity)
 {
   basic_block bb;
 
-  bb = ENTRY_BLOCK_PTR;
+  bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
   if (bb && bb->loop_father)
     print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
 }
@@ -7416,7 +7420,8 @@ gimple_flow_call_edges_add (sbitmap blocks)
   if (! blocks)
     check_last_block = true;
   else
-    check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
+    check_last_block = bitmap_bit_p (blocks,
+                                    EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
 
   /* In the last basic block, before epilogue generation, there will be
      a fallthru edge to EXIT.  Special care is required if the last insn
@@ -7432,7 +7437,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
      Handle this by adding a dummy instruction in a new last basic block.  */
   if (check_last_block)
     {
-      basic_block bb = EXIT_BLOCK_PTR->prev_bb;
+      basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
       gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
       gimple t = NULL;
 
@@ -7443,7 +7448,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
        {
          edge e;
 
-         e = find_edge (bb, EXIT_BLOCK_PTR);
+         e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
          if (e)
            {
              gsi_insert_on_edge (e, gimple_build_nop ());
@@ -7486,7 +7491,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
 #ifdef ENABLE_CHECKING
                  if (stmt == last_stmt)
                    {
-                     e = find_edge (bb, EXIT_BLOCK_PTR);
+                     e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
                      gcc_assert (e == NULL);
                    }
 #endif
@@ -7499,7 +7504,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
                      if (e)
                        blocks_split++;
                    }
-                 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
+                 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
                }
              gsi_prev (&gsi);
            }
@@ -7537,7 +7542,7 @@ remove_edge_and_dominated_blocks (edge e)
     }
 
   /* No updating is needed for edges to exit.  */
-  if (e->dest == EXIT_BLOCK_PTR)
+  if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
     {
       if (cfgcleanup_altered_bbs)
        bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
@@ -7577,7 +7582,7 @@ remove_edge_and_dominated_blocks (edge e)
        {
          FOR_EACH_EDGE (f, ei, bb->succs)
            {
-             if (f->dest != EXIT_BLOCK_PTR)
+             if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
                bitmap_set_bit (df, f->dest->index);
            }
        }
@@ -7928,8 +7933,8 @@ split_critical_edges (void)
             gimple_find_edge_insert_loc.  */
          else if ((!single_pred_p (e->dest)
                    || !gimple_seq_empty_p (phi_nodes (e->dest))
-                   || e->dest == EXIT_BLOCK_PTR)
-                  && e->src != ENTRY_BLOCK_PTR
+                   || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
+                  && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
                   && !(e->flags & EDGE_ABNORMAL))
            {
              gimple_stmt_iterator gsi;
@@ -8053,10 +8058,10 @@ execute_warn_function_return (void)
 
   /* If we have a path to EXIT, then we do return.  */
   if (TREE_THIS_VOLATILE (cfun->decl)
-      && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
+      && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0)
     {
       location = UNKNOWN_LOCATION;
-      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
        {
          last = last_stmt (e->src);
          if ((gimple_code (last) == GIMPLE_RETURN
@@ -8073,10 +8078,10 @@ execute_warn_function_return (void)
      without returning a value.  */
   else if (warn_return_type
           && !TREE_NO_WARNING (cfun->decl)
-          && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
+          && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0
           && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
     {
-      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
        {
          gimple last = last_stmt (e->src);
          if (gimple_code (last) == GIMPLE_RETURN
@@ -8293,13 +8298,15 @@ execute_fixup_cfg (void)
 
   count_scale
       = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
-                            ENTRY_BLOCK_PTR->count);
+                           ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
 
-  ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
-  EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
+  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
+                           cgraph_get_node (current_function_decl)->count;
+  EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
+                           apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
                                        count_scale);
 
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
     e->count = apply_scale (e->count, count_scale);
 
   FOR_EACH_BB (bb)
index ec99ed0a1117c950805319df1198c5cf7231e931..4e5adc28a06aadb36c0b1e54941ae63e1597e46e 100644 (file)
@@ -251,14 +251,14 @@ tree_forwarder_block_p (basic_block bb, bool phi_wanted)
         Otherwise, BB must have PHI nodes.  */
       || gimple_seq_empty_p (phi_nodes (bb)) == phi_wanted
       /* BB may not be a predecessor of EXIT_BLOCK_PTR.  */
-      || single_succ (bb) == EXIT_BLOCK_PTR
+      || single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
       /* Nor should this be an infinite loop.  */
       || single_succ (bb) == bb
       /* BB may not have an abnormal outgoing edge.  */
       || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
     return false;
 
-  gcc_checking_assert (bb != ENTRY_BLOCK_PTR);
+  gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
   locus = single_succ_edge (bb)->goto_locus;
 
@@ -268,7 +268,7 @@ tree_forwarder_block_p (basic_block bb, bool phi_wanted)
     edge e;
 
     FOR_EACH_EDGE (e, ei, bb->preds)
-      if (e->src == ENTRY_BLOCK_PTR || (e->flags & EDGE_EH))
+      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || (e->flags & EDGE_EH))
        return false;
       /* If goto_locus of any of the edges differs, prevent removing
         the forwarder block for -O0.  */
index 05f30e5afa28ea98a6b3d9d32a3c93fbcc16c67a..7bc3458165f4ce634b45501867c3b869159ea9ad 100644 (file)
@@ -690,7 +690,7 @@ update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i)
 static void
 update_parameter_components (void)
 {
-  edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
+  edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
   tree parm;
 
   for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm))
index dd3925ad859cf7c6698695fd6dd38855489acfad..907b403e43c147d77bf61518cb3eeaba391cfcfa 100644 (file)
@@ -918,7 +918,7 @@ get_loop_body_in_if_conv_order (const struct loop *loop)
   unsigned int visited_count = 0;
 
   gcc_assert (loop->num_nodes);
-  gcc_assert (loop->latch != EXIT_BLOCK_PTR);
+  gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   blocks = XCNEWVEC (basic_block, loop->num_nodes);
   visited = BITMAP_ALLOC (NULL);
index 6ef8bb456157feac7a0f196a0525f8d8b69b0f8a..25705a9d0fb78b0364ed846e2ca8e3d2814fdff9 100644 (file)
@@ -199,7 +199,7 @@ remap_ssa_name (tree name, copy_body_data *id)
       if (SSA_NAME_IS_DEFAULT_DEF (name)
          && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
          && id->entry_bb == NULL
-         && single_succ_p (ENTRY_BLOCK_PTR))
+         && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
        {
          tree vexpr = make_node (DEBUG_EXPR_DECL);
          gimple def_temp;
@@ -218,7 +218,7 @@ remap_ssa_name (tree name, copy_body_data *id)
          DECL_ARTIFICIAL (vexpr) = 1;
          TREE_TYPE (vexpr) = TREE_TYPE (name);
          DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
-         gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
+         gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
          gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
          return vexpr;
        }
@@ -300,7 +300,8 @@ remap_ssa_name (tree name, copy_body_data *id)
              && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
              && (!SSA_NAME_VAR (name)
                  || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
-             && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
+             && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                                            0)->dest
                  || EDGE_COUNT (id->entry_bb->preds) != 1))
            {
              gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
@@ -1978,7 +1979,7 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
 
        /* Return edges do get a FALLTHRU flag when the get inlined.  */
        if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
-           && old_edge->dest->aux != EXIT_BLOCK_PTR)
+           && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
          flags |= EDGE_FALLTHRU;
        new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
        new_edge->count = apply_scale (old_edge->count, count_scale);
@@ -2163,10 +2164,10 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
   if (!DECL_RESULT (new_fndecl))
     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
 
-  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
+  if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
     count_scale
         = GCOV_COMPUTE_SCALE (count,
-                              ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
+                              ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
   else
     count_scale = REG_BR_PROB_BASE;
 
@@ -2202,16 +2203,16 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
   init_empty_tree_cfg ();
 
   profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
-  ENTRY_BLOCK_PTR->count =
-    (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
+    (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
      REG_BR_PROB_BASE);
-  ENTRY_BLOCK_PTR->frequency
-    = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
-  EXIT_BLOCK_PTR->count =
-    (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
+  ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
+    = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
+  EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
+    (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
      REG_BR_PROB_BASE);
-  EXIT_BLOCK_PTR->frequency =
-    EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
+  EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
+    EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
   if (src_cfun->eh)
     init_eh_for_function ();
 
@@ -2410,7 +2411,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
      before inlining, using the guessed edge frequencies, so that we don't
      end up with a 0-count inline body which can confuse downstream
      optimizations such as function splitting.  */
-  if (!ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count && count)
+  if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
     {
       /* Apply the larger of the call bb count and the total incoming
          call edge count to the callee.  */
@@ -2422,10 +2423,10 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
       freqs_to_counts (id->src_node, count > in_count ? count : in_count);
     }
 
-  if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
+  if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
     count_scale
         = GCOV_COMPUTE_SCALE (count,
-                              ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
+                              ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
   else
     count_scale = REG_BR_PROB_BASE;
 
@@ -2450,20 +2451,20 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
       incoming_count = apply_scale (incoming_count, count_scale);
       incoming_frequency
        = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
-      ENTRY_BLOCK_PTR->count = incoming_count;
-      ENTRY_BLOCK_PTR->frequency = incoming_frequency;
+      ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
+      ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
     }
 
   /* Must have a CFG here at this point.  */
-  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
+  gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
              (DECL_STRUCT_FUNCTION (callee_fndecl)));
 
   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
 
-  ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
-  EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
-  entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
-  exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
+  ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
+  EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
+  entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
+  exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
 
   /* Duplicate any exception-handling regions.  */
   if (cfun->eh)
@@ -2694,7 +2695,7 @@ copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
   tree body;
 
   /* If this body has a CFG, walk CFG and copy.  */
-  gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
+  gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
   body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
                        new_entry);
   copy_debug_stmts (id);
@@ -5098,7 +5099,8 @@ delete_unreachable_blocks_update_callgraph (copy_body_data *id)
 
   /* Delete all unreachable basic blocks.  */
 
-  for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
+  for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
+       != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
     {
       next_bb = b->next_bb;
 
@@ -5294,7 +5296,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
   id.transform_parameter = false;
   id.transform_lang_insert_block = NULL;
 
-  old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
+  old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
     (DECL_STRUCT_FUNCTION (old_decl));
   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
@@ -5413,7 +5415,8 @@ tree_function_versioning (tree old_decl, tree new_decl,
 
   /* Copy the Function's body.  */
   copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
-            ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, new_entry);
+            ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
+            new_entry);
 
   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
   number_blocks (new_decl);
@@ -5421,7 +5424,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
   /* We want to create the BB unconditionally, so that the addition of
      debug stmts doesn't affect BB count, which may in the end cause
      codegen differences.  */
-  bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
+  bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
   while (init_stmts.length ())
     insert_init_stmt (&id, bb, init_stmts.pop ());
   update_clone_info (&id);
@@ -5458,7 +5461,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
       struct cgraph_edge *e;
       rebuild_frequencies ();
 
-      new_version_node->count = ENTRY_BLOCK_PTR->count;
+      new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
       for (e = new_version_node->callees; e; e = e->next_callee)
        {
          basic_block bb = gimple_bb (e->call_stmt);
index b2b57992d55ae91632de41729dd27f277760d629..6cae27e34b7f40ffb13845a134c4cf8b0f60063f 100644 (file)
@@ -1221,10 +1221,12 @@ rewrite_debug_stmt_uses (gimple stmt)
       def = info->current_def;
       if (!def)
        {
-         if (TREE_CODE (var) == PARM_DECL && single_succ_p (ENTRY_BLOCK_PTR))
+         if (TREE_CODE (var) == PARM_DECL
+             && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
            {
              gimple_stmt_iterator gsi
-               = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
+               =
+            gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
              int lim;
              /* Search a few source bind stmts at the start of first bb to
                 see if a DEBUG_EXPR_DECL can't be reused.  */
@@ -1253,7 +1255,8 @@ rewrite_debug_stmt_uses (gimple stmt)
                  DECL_ARTIFICIAL (def) = 1;
                  TREE_TYPE (def) = TREE_TYPE (var);
                  DECL_MODE (def) = DECL_MODE (var);
-                 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
+                 gsi =
+                gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
                  gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
                }
              update = true;
@@ -1868,7 +1871,7 @@ maybe_register_def (def_operand_p def_p, gimple stmt,
                     bind stmts, but there wouldn't be a PC to bind
                     them to either, so avoid diverging the CFG.  */
                  if (ef && single_pred_p (ef->dest)
-                     && ef->dest != EXIT_BLOCK_PTR)
+                     && ef->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
                    {
                      /* If there were PHI nodes in the node, we'd
                         have to make sure the value we're binding
@@ -2331,7 +2334,7 @@ rewrite_into_ssa (void)
   insert_phi_nodes (dfs);
 
   /* 4- Rename all the blocks.  */
-  rewrite_blocks (ENTRY_BLOCK_PTR, REWRITE_ALL);
+  rewrite_blocks (ENTRY_BLOCK_PTR_FOR_FN (cfun), REWRITE_ALL);
 
   /* Free allocated memory.  */
   FOR_EACH_BB (bb)
@@ -3017,7 +3020,7 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
             common dominator of all the definition blocks.  */
          entry = nearest_common_dominator_for_set (CDI_DOMINATORS,
                                                    db->def_blocks);
-         if (entry != ENTRY_BLOCK_PTR)
+         if (entry != ENTRY_BLOCK_PTR_FOR_FN (cfun))
            EXECUTE_IF_SET_IN_BITMAP (idf, 0, i, bi)
              if (BASIC_BLOCK (i) != entry
                  && dominated_by_p (CDI_DOMINATORS, BASIC_BLOCK (i), entry))
@@ -3216,7 +3219,7 @@ update_ssa (unsigned update_flags)
         be possible to determine the nearest block that had a
         definition for each of the symbols that are marked for
         updating.  For now this seems more work than it's worth.  */
-      start_bb = ENTRY_BLOCK_PTR;
+      start_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
 
       /* Traverse the CFG looking for existing definitions and uses of
         symbols in SSA operands.  Mark interesting blocks and
@@ -3299,7 +3302,7 @@ update_ssa (unsigned update_flags)
       /* Insertion of PHI nodes may have added blocks to the region.
         We need to re-compute START_BB to include the newly added
         blocks.  */
-      if (start_bb != ENTRY_BLOCK_PTR)
+      if (start_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
        start_bb = nearest_common_dominator_for_set (CDI_DOMINATORS,
                                                     blocks_to_update);
     }
index 333ef767418176299defaac0556b86f406e1e772..9a7a73f407994d713f15c71b690350a970ba45b6 100644 (file)
@@ -931,7 +931,8 @@ expand_phi_nodes (struct ssaexpand *sa)
   elim_graph g = new_elim_graph (sa->map->num_partitions);
   g->map = sa->map;
 
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb,
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     if (!gimple_seq_empty_p (phi_nodes (bb)))
       {
        edge e;
index fb4df90b769945c9a20d6d60581c0f1646fd8713..0adc51a51aa960bd74b684a3f0255c943db02b42 100644 (file)
@@ -440,7 +440,8 @@ gimple_gen_ic_func_profiler (void)
     stmt1: __gcov_indirect_call_profiler_v2 (profile_id,
                                             &current_function_decl)
    */
-  gsi = gsi_after_labels (split_edge (single_succ_edge (ENTRY_BLOCK_PTR)));
+  gsi =
+                                            gsi_after_labels (split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))));
 
   cur_func = force_gimple_operand_gsi (&gsi,
                                       build_addr (current_function_decl,
index db7ac4c66f046ae4834d6fe8e2f8d3aa4fc04663..8846fbe50cca77d7903576a081cb957742d38b39 100644 (file)
@@ -47,7 +47,7 @@ static inline basic_block
 block_before_loop (loop_p loop)
 {
   edge preheader = loop_preheader_edge (loop);
-  return (preheader ? preheader->src : ENTRY_BLOCK_PTR);
+  return (preheader ? preheader->src : ENTRY_BLOCK_PTR_FOR_FN (cfun));
 }
 
 /* Analyze all the parameters of the chrec that were left under a
index ea1986cae7e387e07051bb5f6417601cbe098feb..5432048854eee4db1a054aaf14731475b15d80d3 100644 (file)
@@ -3409,7 +3409,7 @@ initialize_parameter_reductions (void)
 
   seq = gsi_seq (gsi);
   if (seq)
-    gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
+    gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
 }
 
 /* The "main" function of intraprocedural SRA passes.  Runs the analysis and if
@@ -3788,7 +3788,7 @@ propagate_dereference_distances (void)
   basic_block bb;
 
   queue.create (last_basic_block_for_function (cfun));
-  queue.quick_push (ENTRY_BLOCK_PTR);
+  queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
   FOR_EACH_BB (bb)
     {
       queue.quick_push (bb);
@@ -3818,7 +3818,7 @@ propagate_dereference_distances (void)
          {
            int succ_idx = e->dest->index * func_param_count + i;
 
-           if (e->src == EXIT_BLOCK_PTR)
+           if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
              continue;
 
            if (first)
@@ -3859,10 +3859,11 @@ dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
   basic_block bb;
 
   fprintf (dump_file, str);
-  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
     {
       fprintf (f, "%4i  %i   ", bb->index, bitmap_bit_p (final_bbs, bb->index));
-      if (bb != EXIT_BLOCK_PTR)
+      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
        {
          int i;
          for (i = 0; i < func_param_count; i++)
@@ -3914,7 +3915,7 @@ analyze_caller_dereference_legality (vec<access_p> representatives)
   for (i = 0; i < func_param_count; i++)
     {
       struct access *repr = representatives[i];
-      int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
+      int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
 
       if (!repr || no_accesses_p (repr))
        continue;
@@ -4728,9 +4729,9 @@ sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
   int i, len;
   gimple_stmt_iterator *gsip = NULL, gsi;
 
-  if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
+  if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
     {
-      gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
+      gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
       gsip = &gsi;
     }
   len = adjustments.length ();
index 6a542b8da79788ce3283fac254b93b0e75e4bb6d..3a9875de2e85b1328da79a993661e5ebe5919940 100644 (file)
@@ -1824,7 +1824,7 @@ gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
   while (gsi_end_p (*i))
     {
       dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
-      if (dom == NULL || dom == ENTRY_BLOCK_PTR)
+      if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        return;
 
       *i = gsi_last_bb (dom);
@@ -2314,7 +2314,7 @@ optimize_stack_restore (gimple_stmt_iterator i)
     case 0:
       break;
     case 1:
-      if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
+      if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
        return NULL_TREE;
       break;
     default:
index cc46370d1f9f393d034e03b8f527cbd9e8d8d991..d6fbb1cc27f2f4b451315820f2ef1a987427a76f 100644 (file)
@@ -1078,7 +1078,7 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
                  v2 = SSA_NAME_VERSION (var);
                  bitmap_set_bit (used_in_copy, v1);
                  bitmap_set_bit (used_in_copy, v2);
-                 cost = coalesce_cost_bb (EXIT_BLOCK_PTR);
+                 cost = coalesce_cost_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
                  add_coalesce (cl, v1, v2, cost);
                }
            }
index e07bd42932ff7a24857980e32dcfbe4736576e83..0c8110fb71a4628175006b54ad7d926ddc642dc1 100644 (file)
@@ -328,9 +328,9 @@ mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
   unsigned edge_number;
   bool skipped = false;
 
-  gcc_assert (bb != EXIT_BLOCK_PTR);
+  gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
 
-  if (bb == ENTRY_BLOCK_PTR)
+  if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
     return;
 
   EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
@@ -636,7 +636,7 @@ propagate_necessity (bool aggressive)
             containing STMT is control dependent, but only if we haven't
             already done so.  */
          basic_block bb = gimple_bb (stmt);
-         if (bb != ENTRY_BLOCK_PTR
+         if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
              && !bitmap_bit_p (visited_control_parents, bb->index))
            mark_control_dependent_edges_necessary (bb, false);
        }
@@ -742,7 +742,7 @@ propagate_necessity (bool aggressive)
                      if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
                        mark_last_stmt_necessary (arg_bb);
                    }
-                 else if (arg_bb != ENTRY_BLOCK_PTR
+                 else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
                           && !bitmap_bit_p (visited_control_parents,
                                         arg_bb->index))
                    mark_control_dependent_edges_necessary (arg_bb, true);
@@ -1076,7 +1076,7 @@ remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
         fake edges in the dominator tree.  */
       if (e)
         ;
-      else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR)
+      else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
        e = EDGE_SUCC (bb, 0);
       else
         e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
@@ -1168,7 +1168,8 @@ eliminate_unnecessary_stmts (void)
 
      as desired.  */
   gcc_assert (dom_info_available_p (CDI_DOMINATORS));
-  h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
+  h = get_all_dominated_blocks (CDI_DOMINATORS,
+                               single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
 
   while (h.length ())
     {
@@ -1265,7 +1266,8 @@ eliminate_unnecessary_stmts (void)
       find_unreachable_blocks ();
 
       /* Delete all unreachable basic blocks in reverse dominator order.  */
-      for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb)
+      for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
+          bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
        {
          prev_bb = bb->prev_bb;
 
index bfd865d09a7a4c0890b632c09f3a1b0e389ac5e3..a286c105615977e44c5da1d71316044487548db1 100644 (file)
@@ -902,7 +902,7 @@ tree_ssa_dominator_optimize (void)
          while (single_succ_p (bb)
                 && (single_succ_edge (bb)->flags & EDGE_EH) == 0)
            bb = single_succ (bb);
-         if (bb == EXIT_BLOCK_PTR)
+         if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
            continue;
          if ((unsigned) bb->index != i)
            bitmap_set_bit (need_eh_cleanup, bb->index);
@@ -3054,7 +3054,8 @@ eliminate_degenerate_phis (void)
      phase in dominator order.  Presumably this is because walking
      in dominator order leaves fewer PHIs for later examination
      by the worklist phase.  */
-  eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
+  eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR_FOR_FN (cfun),
+                              interesting_names);
 
   /* Second phase.  Eliminate second order degenerate PHIs as well
      as trivial copies or constant initializations identified by
index 5dc8d025f663f545adf22e7cd59419d3ec07dec8..51b41017c2f1a09d9329387cf02900f0f22ee2a9 100644 (file)
@@ -1009,7 +1009,7 @@ loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited,
   FOR_EACH_EDGE (e, ei, bb->preds)
     {
       pred_bb = e->src;
-      if (pred_bb == ENTRY_BLOCK_PTR)
+      if (pred_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        continue;
       /* TMP is variables live-on-entry from BB that aren't defined in the
         predecessor block.  This should be the live on entry vars to pred.
@@ -1087,7 +1087,7 @@ set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
        bitmap_set_bit (&live->liveout[def_bb->index], p);
     }
   else
-    def_bb = ENTRY_BLOCK_PTR;
+    def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
 
   /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
      add it to the list of live on entry blocks.  */
@@ -1103,7 +1103,7 @@ set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
             defined in that block, or whether its live on entry.  */
          int index = PHI_ARG_INDEX_FROM_USE (use);
          edge e = gimple_phi_arg_edge (use_stmt, index);
-         if (e->src != ENTRY_BLOCK_PTR)
+         if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
            {
              if (e->src != def_bb)
                add_block = e->src;
@@ -1169,14 +1169,14 @@ calculate_live_on_exit (tree_live_info_p liveinfo)
              if (p == NO_PARTITION)
                continue;
              e = gimple_phi_arg_edge (phi, i);
-             if (e->src != ENTRY_BLOCK_PTR)
+             if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
                bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
            }
        }
 
       /* Add each successors live on entry to this bock live on exit.  */
       FOR_EACH_EDGE (e, ei, bb->succs)
-        if (e->dest != EXIT_BLOCK_PTR)
+       if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
          bitmap_ior_into (&liveinfo->liveout[bb->index],
                           live_on_entry (liveinfo, e->dest));
     }
@@ -1369,12 +1369,12 @@ verify_live_on_entry (tree_live_info_p live)
    /* Check for live on entry partitions and report those with a DEF in
       the program. This will typically mean an optimization has done
       something wrong.  */
-  bb = ENTRY_BLOCK_PTR;
+  bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
   num = 0;
   FOR_EACH_EDGE (e, ei, bb->succs)
     {
       int entry_block = e->dest->index;
-      if (e->dest == EXIT_BLOCK_PTR)
+      if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
         continue;
       for (i = 0; i < (unsigned)num_var_partitions (map); i++)
        {
index 0aa9f0c43318d9ffd97ef970adacaa862ab2829c..e8074bd425e2f13d5033b60d1e7540f6cca79b1b 100644 (file)
@@ -273,8 +273,8 @@ static inline bitmap
 live_on_entry (tree_live_info_p live, basic_block bb)
 {
   gcc_checking_assert (live->livein
-                      && bb != ENTRY_BLOCK_PTR
-                      && bb != EXIT_BLOCK_PTR);
+                      && bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+                      && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   return &live->livein[bb->index];
 }
@@ -287,8 +287,8 @@ static inline bitmap
 live_on_exit (tree_live_info_p live, basic_block bb)
 {
   gcc_checking_assert (live->liveout
-                      && bb != ENTRY_BLOCK_PTR
-                      && bb != EXIT_BLOCK_PTR);
+                      && bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+                      && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   return &live->liveout[bb->index];
 }
index c20ffe692a965c32c099cf2868c66c4097e13ed1..6d7d78ea6e458a34b2dceb44fda9571c4aa9fcb2 100644 (file)
@@ -2007,7 +2007,7 @@ find_interesting_uses (struct ivopts_data *data)
       bb = body[i];
 
       FOR_EACH_EDGE (e, ei, bb->succs)
-       if (e->dest != EXIT_BLOCK_PTR
+       if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
            && !flow_bb_inside_loop_p (data->current_loop, e->dest))
          find_interesting_uses_outside (data, e);
 
index 246b66702dc9efccc11f4d571d833512efeda390..67291670418b72517930999d76250b7114b28d9f 100644 (file)
@@ -231,7 +231,7 @@ compute_live_loop_exits (bitmap live_exits, bitmap use_blocks,
          bool pred_visited;
 
          /* We should have met DEF_BB along the way.  */
-         gcc_assert (pred != ENTRY_BLOCK_PTR);
+         gcc_assert (pred != ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
          if (pred_loop_depth >= def_loop_depth)
            {
index 1e0dcd61e3137244af60526464dd928fcffada37..9c61c3c97a4a743942b6f1de139fb2a89d93729c 100644 (file)
@@ -496,7 +496,7 @@ bound_difference (struct loop *loop, tree x, tree y, bounds *bnds)
   /* Now walk the dominators of the loop header and use the entry
      guards to refine the estimates.  */
   for (bb = loop->header;
-       bb != ENTRY_BLOCK_PTR && cnt < MAX_DOMINATORS_TO_WALK;
+       bb != ENTRY_BLOCK_PTR_FOR_FN (cfun) && cnt < MAX_DOMINATORS_TO_WALK;
        bb = get_immediate_dominator (CDI_DOMINATORS, bb))
     {
       if (!single_pred_p (bb))
@@ -1781,7 +1781,7 @@ simplify_using_initial_conditions (struct loop *loop, tree expr)
      the number of BBs times the number of loops in degenerate
      cases.  */
   for (bb = loop->header;
-       bb != ENTRY_BLOCK_PTR && cnt < MAX_DOMINATORS_TO_WALK;
+       bb != ENTRY_BLOCK_PTR_FOR_FN (cfun) && cnt < MAX_DOMINATORS_TO_WALK;
        bb = get_immediate_dominator (CDI_DOMINATORS, bb))
     {
       if (!single_pred_p (bb))
index 4e49d762442897eeecc213787f7ad8628b688068..f2b4e95f8738d6eeaee355b2f40c73302aa710b1 100644 (file)
@@ -1282,7 +1282,7 @@ may_use_storent_in_loop_p (struct loop *loop)
 
       FOR_EACH_VEC_ELT (exits, i, exit)
        if ((exit->flags & EDGE_ABNORMAL)
-           && exit->dest == EXIT_BLOCK_PTR)
+           && exit->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
          ret = false;
 
       exits.release ();
index 9f4d4926a559cd44ab0e25d9c16fbef95f537e79..27f52b28025f19f4b0934da4901132efb6423355 100644 (file)
@@ -194,7 +194,7 @@ simplify_using_entry_checks (struct loop *loop, tree cond)
        return cond;
 
       e = single_pred_edge (e->src);
-      if (e->src == ENTRY_BLOCK_PTR)
+      if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        return cond;
     }
 }
index 67117bcc3f698eee431c50357691c0fa8d88dc6d..ce7116ee4fcd481879794d954b1d22654e7b0b68 100644 (file)
@@ -288,7 +288,7 @@ register_division_in (basic_block bb)
   if (!occ)
     {
       occ = occ_new (bb, NULL);
-      insert_bb (occ, ENTRY_BLOCK_PTR, &occ_head);
+      insert_bb (occ, ENTRY_BLOCK_PTR_FOR_FN (cfun), &occ_head);
     }
 
   occ->bb_has_division = true;
index e76404094252e092aa4c21ec0dfd2d1e9d4ed2a7..389423b04c45ac5336a92337188b1804a9b0dce9 100644 (file)
@@ -381,7 +381,7 @@ tree_ssa_phiprop (void)
 
   /* Walk the dominator tree in preorder.  */
   bbs = get_all_dominated_blocks (CDI_DOMINATORS,
-                                 single_succ (ENTRY_BLOCK_PTR));
+                                 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
   FOR_EACH_VEC_ELT (bbs, i, bb)
     for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
       did_something |= propagate_with_phi (bb, gsi_stmt (gsi), phivn, n);
index b16fd1704d1e073dc7312dc9162b332d6e196e2b..29d56b1c660330f7942722dd3e7ef0abdc69eaad 100644 (file)
@@ -2467,7 +2467,7 @@ compute_antic (void)
     }
 
   /* At the exit block we anticipate nothing.  */
-  BB_VISITED (EXIT_BLOCK_PTR) = 1;
+  BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
 
   changed_blocks = sbitmap_alloc (last_basic_block + 1);
   bitmap_ones (changed_blocks);
@@ -3668,7 +3668,7 @@ insert (void)
       num_iterations++;
       if (dump_file && dump_flags & TDF_DETAILS)
        fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
-      new_stuff = insert_aux (ENTRY_BLOCK_PTR);
+      new_stuff = insert_aux (ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
       /* Clear the NEW sets before the next iteration.  We have already
          fully propagated its contents.  */
@@ -3713,15 +3713,16 @@ compute_avail (void)
 
       e = get_or_alloc_expr_for_name (name);
       add_to_value (get_expr_value_id (e), e);
-      bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
-      bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
+      bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
+      bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
+                                   e);
     }
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
-      print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR),
+      print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
                        "tmp_gen", ENTRY_BLOCK);
-      print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR),
+      print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
                        "avail_out", ENTRY_BLOCK);
     }
 
@@ -3730,7 +3731,7 @@ compute_avail (void)
 
   /* Seed the algorithm by putting the dominator children of the entry
      block on the worklist.  */
-  for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
+  for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
        son;
        son = next_dom_son (CDI_DOMINATORS, son))
     worklist[sp++] = son;
index bd33071303b1ec67eb329c1e85baffce3332cdb1..b9db34c5057369ad11a37580399b9a273bfd3d89 100644 (file)
@@ -184,7 +184,8 @@ cfg_blocks_add (basic_block bb)
 {
   bool head = false;
 
-  gcc_assert (bb != ENTRY_BLOCK_PTR && bb != EXIT_BLOCK_PTR);
+  gcc_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
+             && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
   gcc_assert (!bitmap_bit_p (bb_in_list, bb->index));
 
   if (cfg_blocks_empty_p ())
@@ -279,7 +280,7 @@ static void
 add_control_edge (edge e)
 {
   basic_block bb = e->dest;
-  if (bb == EXIT_BLOCK_PTR)
+  if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return;
 
   /* If the edge had already been executed, skip it.  */
@@ -408,7 +409,7 @@ simulate_block (basic_block block)
   gimple_stmt_iterator gsi;
 
   /* There is nothing to do for the exit block.  */
-  if (block == EXIT_BLOCK_PTR)
+  if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
@@ -519,7 +520,7 @@ ssa_prop_init (void)
 
   /* Seed the algorithm by adding the successors of the entry block to the
      edge worklist.  */
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
     add_control_edge (e);
 }
 
index eedccc6654b9e18bb886dca37b6e5acbee782ecb..4c4924c721c26c79a204b9c009b8adbb928ac400 100644 (file)
@@ -1270,11 +1270,11 @@ build_and_add_sum (tree type, tree op1, tree op2, enum tree_code opcode)
   if ((!op1def || gimple_nop_p (op1def))
       && (!op2def || gimple_nop_p (op2def)))
     {
-      gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
+      gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
       if (gsi_end_p (gsi))
        {
          gimple_stmt_iterator gsi2
-           = gsi_last_bb (single_succ (ENTRY_BLOCK_PTR));
+           = gsi_last_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
          gimple_set_uid (sum,
                          gsi_end_p (gsi2) ? 1 : gimple_uid (gsi_stmt (gsi2)));
        }
@@ -4529,8 +4529,8 @@ debug_ops_vector (vec<operand_entry_t> ops)
 static void
 do_reassoc (void)
 {
-  break_up_subtract_bb (ENTRY_BLOCK_PTR);
-  reassociate_bb (EXIT_BLOCK_PTR);
+  break_up_subtract_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
+  reassociate_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
 }
 
 /* Initialize the reassociation pass.  */
index f0c831d113bbd9034fbdefdcbc2f55a820793cda..305882dd06fd473f50cdb59bae86025b80ce712d 100644 (file)
@@ -170,7 +170,7 @@ nearest_common_dominator_of_uses (gimple stmt, bool *debug_stmts)
            }
 
          /* Short circuit. Nothing dominates the entry block.  */
-         if (useblock == ENTRY_BLOCK_PTR)
+         if (useblock == ENTRY_BLOCK_PTR_FOR_FN (cfun))
            {
              BITMAP_FREE (blocks);
              return NULL;
@@ -568,7 +568,7 @@ execute_sink_code (void)
   memset (&sink_stats, 0, sizeof (sink_stats));
   calculate_dominance_info (CDI_DOMINATORS);
   calculate_dominance_info (CDI_POST_DOMINATORS);
-  sink_code_in_bb (EXIT_BLOCK_PTR);
+  sink_code_in_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
   statistics_counter_event (cfun, "Sunk statements", sink_stats.sunk);
   free_dominance_info (CDI_POST_DOMINATORS);
   remove_fake_exit_edges ();
index a15e37c5151266e7aca91e646daabb8feee4bf57..3b8d1df9c8514daf6b8b78daaafd5f2405f38d61 100644 (file)
@@ -175,7 +175,7 @@ warn_uninitialized_vars (bool warn_possibly_uninitialized)
   FOR_EACH_BB (bb)
     {
       bool always_executed = dominated_by_p (CDI_POST_DOMINATORS,
-                                            single_succ (ENTRY_BLOCK_PTR), bb);
+                                            single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)), bb);
       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
          gimple stmt = gsi_stmt (gsi);
@@ -315,14 +315,14 @@ compute_uninit_opnds_pos (gimple phi)
 static inline basic_block
 find_pdom (basic_block block)
 {
-   if (block == EXIT_BLOCK_PTR)
-     return EXIT_BLOCK_PTR;
+   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
+     return EXIT_BLOCK_PTR_FOR_FN (cfun);
    else
      {
        basic_block bb
            = get_immediate_dominator (CDI_POST_DOMINATORS, block);
        if (! bb)
-         return EXIT_BLOCK_PTR;
+        return EXIT_BLOCK_PTR_FOR_FN (cfun);
        return bb;
      }
 }
@@ -333,13 +333,13 @@ find_pdom (basic_block block)
 static inline basic_block
 find_dom (basic_block block)
 {
-   if (block == ENTRY_BLOCK_PTR)
-     return ENTRY_BLOCK_PTR;
+   if (block == ENTRY_BLOCK_PTR_FOR_FN (cfun))
+     return ENTRY_BLOCK_PTR_FOR_FN (cfun);
    else
      {
        basic_block bb = get_immediate_dominator (CDI_DOMINATORS, block);
        if (! bb)
-         return ENTRY_BLOCK_PTR;
+        return ENTRY_BLOCK_PTR_FOR_FN (cfun);
        return bb;
      }
 }
@@ -454,7 +454,8 @@ compute_control_dep_chain (basic_block bb, basic_block dep_bb,
 
           cd_bb = find_pdom (cd_bb);
           post_dom_check++;
-          if (cd_bb == EXIT_BLOCK_PTR || post_dom_check > MAX_POSTDOM_CHECK)
+         if (cd_bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || post_dom_check >
+             MAX_POSTDOM_CHECK)
             break;
         }
       cur_cd_chain->pop ();
index 221e7d705e03d69467516b0955c08f3a03d8c9e6..982937462db376425775196335d521e71da42cbe 100644 (file)
@@ -97,7 +97,7 @@ reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
          break;
        }
 
-      gcc_assert (src != ENTRY_BLOCK_PTR);
+      gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
       if (! bitmap_bit_p (visited, src->index))
        {
index 33677ce1277114e26a2f4faac50561301ba4ec46..9a30400c0d942c679b753362044cdc7146369cdb 100644 (file)
@@ -821,7 +821,7 @@ eliminate_tail_call (struct tailcall *t)
 
   gcc_assert (is_gimple_call (stmt));
 
-  first = single_succ (ENTRY_BLOCK_PTR);
+  first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
   /* Remove the code after call_gsi that will become unreachable.  The
      possibly unreachable code in other blocks is removed later in
@@ -842,9 +842,10 @@ eliminate_tail_call (struct tailcall *t)
 
   /* Number of executions of function has reduced by the tailcall.  */
   e = single_succ_edge (gsi_bb (t->call_gsi));
-  decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
-  decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
-  if (e->dest != EXIT_BLOCK_PTR)
+  decrease_profile (EXIT_BLOCK_PTR_FOR_FN (cfun), e->count, EDGE_FREQUENCY (e));
+  decrease_profile (ENTRY_BLOCK_PTR_FOR_FN (cfun), e->count,
+                   EDGE_FREQUENCY (e));
+  if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
     decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
 
   /* Replace the call by a jump to the start of function.  */
@@ -948,7 +949,7 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls)
   bool phis_constructed = false;
   struct tailcall *tailcalls = NULL, *act, *next;
   bool changed = false;
-  basic_block first = single_succ (ENTRY_BLOCK_PTR);
+  basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
   tree param;
   gimple stmt;
   edge_iterator ei;
@@ -958,7 +959,7 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls)
   if (opt_tailcalls)
     opt_tailcalls = suitable_for_tail_call_opt_p ();
 
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     {
       /* Only traverse the normal exits, i.e. those that end with return
         statement.  */
@@ -982,7 +983,8 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls)
             or if there are existing degenerate PHI nodes.  */
          if (!single_pred_p (first)
              || !gimple_seq_empty_p (phi_nodes (first)))
-           first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
+           first =
+             split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
 
          /* Copy the args if needed.  */
          for (param = DECL_ARGUMENTS (current_function_decl);
@@ -1029,7 +1031,7 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls)
   if (a_acc || m_acc)
     {
       /* Modify the remaining return statements.  */
-      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
        {
          stmt = last_stmt (e->src);
 
index 42730f07f84e90dc29915ff8a4d8f156b1cb537c..9330074ce3069bd26d4cb33784a38c722b7b0773 100644 (file)
@@ -652,7 +652,7 @@ instrument_func_entry (void)
   tree ret_addr, builtin_decl;
   gimple g;
 
-  succ_bb = single_succ (ENTRY_BLOCK_PTR);
+  succ_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
   gsi = gsi_after_labels (succ_bb);
 
   builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
@@ -682,7 +682,7 @@ instrument_func_exit (void)
   edge_iterator ei;
 
   /* Find all function exits.  */
-  exit_bb = EXIT_BLOCK_PTR;
+  exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
   FOR_EACH_EDGE (e, ei, exit_bb->preds)
     {
       gsi = gsi_last_bb (e->src);
index cfda63a0682d4df204edddef4a30545a231ccb3d..591747be516772d2e169054c532cd242fb9bf9e6 100644 (file)
@@ -836,16 +836,18 @@ vt_stack_adjustments (void)
   int sp;
 
   /* Initialize entry block.  */
-  VTI (ENTRY_BLOCK_PTR)->visited = true;
-  VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET;
-  VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET;
+  VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
+  VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust =
+ INCOMING_FRAME_SP_OFFSET;
+  VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust =
+ INCOMING_FRAME_SP_OFFSET;
 
   /* Allocate stack for back-tracking up CFG.  */
   stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
   sp = 0;
 
   /* Push the first edge on to the stack.  */
-  stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
+  stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
 
   while (sp)
     {
@@ -866,7 +868,7 @@ vt_stack_adjustments (void)
          VTI (dest)->visited = true;
          VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
 
-         if (dest != EXIT_BLOCK_PTR)
+         if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
            for (insn = BB_HEAD (dest);
                 insn != NEXT_INSN (BB_END (dest));
                 insn = NEXT_INSN (insn))
@@ -7035,7 +7037,7 @@ vt_find_locations (void)
                {
                  FOR_EACH_EDGE (e, ei, bb->succs)
                    {
-                     if (e->dest == EXIT_BLOCK_PTR)
+                     if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
                        continue;
 
                      if (bitmap_bit_p (visited, e->dest->index))
@@ -9584,7 +9586,7 @@ vt_add_function_parameter (tree parm)
   if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
     return;
 
-  out = &VTI (ENTRY_BLOCK_PTR)->out;
+  out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
 
   dv = dv_from_decl (parm);
 
@@ -9931,7 +9933,7 @@ vt_initialize (void)
       for (;;)
        {
          edge e;
-         if (bb->next_bb == EXIT_BLOCK_PTR
+         if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
              || ! single_pred_p (bb->next_bb))
            break;
          e = find_edge (bb, bb->next_bb);
@@ -10034,7 +10036,7 @@ vt_initialize (void)
     }
 
   hard_frame_pointer_adjustment = -1;
-  VTI (ENTRY_BLOCK_PTR)->flooded = true;
+  VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
   cfa_base_rtx = NULL_RTX;
   return true;
 }
index 0f944652e358c9f96219baabb80b322b208ee718..3ca4700b71b204e3ee20b09076c0e2dc642a493d 100644 (file)
@@ -1639,7 +1639,7 @@ assemble_start_function (tree decl, const char *fnname)
         align the hot section and write out the hot section label.
         But if the current function is a thunk, we do not have a CFG.  */
       if (!cfun->is_thunk
-         && BB_PARTITION (ENTRY_BLOCK_PTR->next_bb) == BB_COLD_PARTITION)
+         && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION)
        {
          switch_to_section (text_section);
          assemble_align (DECL_ALIGN (decl));