gcc/
2013-11-20 David Malcolm <dmalcolm@redhat.com>
* cfg.c (dump_edge_info): Remove redundant comment.
* cfgcleanup.c (outgoing_edges_match): Reword reference to
EXIT_BLOCK_PTR in comment.
(try_optimize_cfg): Likewise.
* cfgrtl.c (last_bb_in_partition): Likewise.
* cgraph.c (cgraph_node_cannot_return): Likewise.
* function.c (thread_prologue_and_epilogue_insns): Likewise.
* graphite-scop-detection.c (scopdet_basic_block_info): Likewise.
* ipa-split.c (consider_split): Likewise.
* profile.c (find_spanning_tree): Likewise.
* sched-int.h (common_sched_info_def.add_block): Likewise.
* dominance.c (calc_dfs_tree_nonrec): Reword references in
comments to now removed ENTRY_BLOCK_PTR and EXIT_BLOCK_PTR macros.
* tree-cfgcleanup.c (cleanup_control_flow_bb): Reword references
in comments to now removed ENTRY_BLOCK_PTR macro.
(tree_forwarder_block_p): Reword reference in comment to
EXIT_BLOCK_PTR.
* tree-inline.c (copy_cfg_body): Reword references in comments to
now removed ENTRY_BLOCK_PTR macro.
* tree-ssa-propagate.c (ssa_prop_init): Likewise.
* tree-scalar-evolution.h ( block_before_loop): Likewise. Add
a comma to the comment to clarify the meaning.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@205182
138bc75d-0d04-0410-961f-
82ee72b054a4
+2013-11-20 David Malcolm <dmalcolm@redhat.com>
+
+ * cfg.c (dump_edge_info): Remove redundant comment.
+ * cfgcleanup.c (outgoing_edges_match): Reword reference to
+ EXIT_BLOCK_PTR in comment.
+ (try_optimize_cfg): Likewise.
+ * cfgrtl.c (last_bb_in_partition): Likewise.
+ * cgraph.c (cgraph_node_cannot_return): Likewise.
+ * function.c (thread_prologue_and_epilogue_insns): Likewise.
+ * graphite-scop-detection.c (scopdet_basic_block_info): Likewise.
+ * ipa-split.c (consider_split): Likewise.
+ * profile.c (find_spanning_tree): Likewise.
+ * sched-int.h (common_sched_info_def.add_block): Likewise.
+ * dominance.c (calc_dfs_tree_nonrec): Reword references in
+ comments to now removed ENTRY_BLOCK_PTR and EXIT_BLOCK_PTR macros.
+ * tree-cfgcleanup.c (cleanup_control_flow_bb): Reword references
+ in comments to now removed ENTRY_BLOCK_PTR macro.
+ (tree_forwarder_block_p): Reword reference in comment to
+ EXIT_BLOCK_PTR.
+ * tree-inline.c (copy_cfg_body): Reword references in comments to
+ now removed ENTRY_BLOCK_PTR macro.
+ * tree-ssa-propagate.c (ssa_prop_init): Likewise.
+ * tree-scalar-evolution.h ( block_before_loop): Likewise. Add
+ a comma to the comment to clarify the meaning.
+
2013-11-20 Andrew MacLeod <amacleod@redhat.com>
* gimplify.h (gimplify_hasher : typed_free_remove, struct gimplify_ctx):
&& (flags & TDF_SLIM) == 0)
do_details = true;
- /* ENTRY_BLOCK_PTR/EXIT_BLOCK_PTR depend on cfun.
- Compare against ENTRY_BLOCK/EXIT_BLOCK to avoid that dependency. */
if (side->index == ENTRY_BLOCK)
fputs (" ENTRY", file);
else if (side->index == EXIT_BLOCK)
edge e1, e2;
edge_iterator ei;
- /* If we performed shrink-wrapping, edges to the EXIT_BLOCK_PTR can
+ /* If we performed shrink-wrapping, edges to the exit block can
only be distinguished for JUMP_INSNs. The two paths may differ in
whether they went through the prologue. Sibcalls are fine, we know
that we either didn't need or inserted an epilogue before them. */
}
delete_basic_block (b);
changed = true;
- /* Avoid trying to remove ENTRY_BLOCK_PTR. */
+ /* Avoid trying to remove the exit block. */
b = (c == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? c->next_bb : c);
continue;
}
if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
return bb;
}
- /* Return bb before EXIT_BLOCK_PTR. */
+ /* Return bb before the exit block. */
return bb->prev_bb;
}
and thus it is safe to ignore its side effects for IPA analysis
when computing side effects of the caller.
FIXME: We could actually mark all edges that have no reaching
- patch to EXIT_BLOCK_PTR or throw to get better results. */
+ patch to the exit block or throw to get better results. */
bool
cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
{
edge_iterator *stack;
edge_iterator ei, einext;
int sp;
- /* Start block (ENTRY_BLOCK_PTR for forward problem, EXIT_BLOCK for backward
+ /* Start block (the entry block for forward problem, exit block for backward
problem). */
basic_block en_block;
/* Ending block. */
{
unsigned i, last;
- /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
+ /* convert_jumps_to_returns may add to preds of the exit block
(but won't remove). Stop at end of current preds. */
last = EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
for (i = 0; i < last; i++)
result.next = exit_e->dest;
/* If we do not dominate result.next, remove it. It's either
- the EXIT_BLOCK_PTR, or another bb dominates it and will
+ the exit block, or another bb dominates it and will
call the scop detection for this bb. */
if (!dominated_by_p (CDI_DOMINATORS, result.next, bb))
result.next = NULL;
<retval> = tmp_var;
return <retval>
but return_bb can not be more complex than this.
- If nothing is found, return EXIT_BLOCK_PTR.
+ If nothing is found, return the exit block.
When there are multiple RETURN statement, chose one with return value,
since that one is more likely shared by multiple code paths.
union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
/* First add all abnormal edges to the tree unless they form a cycle. Also
- add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
+ add all edges to the exit block to avoid inserting profiling code behind
setting return value from function. */
for (i = 0; i < num_edges; i++)
{
/* Called to notify frontend, that new basic block is being added.
The first parameter - new basic block.
The second parameter - block, after which new basic block is being added,
- or EXIT_BLOCK_PTR, if recovery block is being added,
+ or the exit block, if recovery block is being added,
or NULL, if standalone block is being added. */
void (*add_block) (basic_block, basic_block);
the start of the successor block.
As a precondition, we require that BB be not equal to
- ENTRY_BLOCK_PTR. */
+ the entry block. */
static bool
tree_forwarder_block_p (basic_block bb, bool phi_wanted)
/* If PHI_WANTED is false, BB must not have any PHI nodes.
Otherwise, BB must have PHI nodes. */
|| gimple_seq_empty_p (phi_nodes (bb)) == phi_wanted
- /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
+ /* BB may not be a predecessor of the exit block. */
|| single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
/* Nor should this be an infinite loop. */
|| single_succ (bb) == bb
/* Register specific tree functions. */
gimple_register_cfg_hooks ();
- /* If we are inlining just region of the function, make sure to connect new entry
- to ENTRY_BLOCK_PTR. Since new entry can be part of loop, we must compute
- frequency and probability of ENTRY_BLOCK_PTR based on the frequencies and
+ /* If we are inlining just region of the function, make sure to connect
+ new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
+ part of loop, we must compute frequency and probability of
+ ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
probabilities of edges incoming from nonduplicated region. */
if (new_entry)
{
bool);
extern tree compute_overall_effect_of_inner_loop (struct loop *, tree);
-/* Returns the basic block preceding LOOP or ENTRY_BLOCK_PTR when the
- loop is function's body. */
+/* Returns the basic block preceding LOOP, or the CFG entry block when
+ the loop is function's body. */
static inline basic_block
block_before_loop (loop_p loop)
cfg_blocks.safe_grow_cleared (20);
/* Initially assume that every edge in the CFG is not executable.
- (including the edges coming out of ENTRY_BLOCK_PTR). */
+ (including the edges coming out of the entry block). */
FOR_ALL_BB (bb)
{
gimple_stmt_iterator si;