to consecutive statements in it. Use the ids to determine which
precedes which. This avoids the linear traversal on subsequent
visits to the same block. */
- for (auto si = gsi_start_bb (inval_bb); !gsi_end_p (si);
- gsi_next_nondebug (&si))
- {
- gimple *stmt = gsi_stmt (si);
- unsigned uid = inc_gimple_stmt_max_uid (m_func);
- gimple_set_uid (stmt, uid);
- }
+ renumber_gimple_stmt_uids_in_block (m_func, inval_bb);
return gimple_uid (inval_stmt) < gimple_uid (use_stmt);
}
tree_code code = gimple_cond_code (cond);
equality = code == EQ_EXPR || code == NE_EXPR;
}
+ else if (gimple_code (use_stmt) == GIMPLE_PHI)
+ {
+ /* Only add a PHI result to POINTERS if all its
+ operands are related to PTR, otherwise continue. */
+ tree lhs = gimple_phi_result (use_stmt);
+ if (!pointers_related_p (stmt, lhs, ptr, m_ptr_qry))
+ continue;
+
+ if (TREE_CODE (lhs) == SSA_NAME)
+ {
+ pointers.safe_push (lhs);
+ continue;
+ }
+ }
/* Warn if USE_STMT is dominated by the deallocation STMT.
Otherwise, add the pointer to POINTERS so that the uses
of any other pointers derived from it can be checked. */
if (use_after_inval_p (stmt, use_stmt, check_dangling))
{
- if (gimple_code (use_stmt) == GIMPLE_PHI)
- {
- /* Only add a PHI result to POINTERS if all its
- operands are related to PTR, otherwise continue. */
- tree lhs = gimple_phi_result (use_stmt);
- if (!pointers_related_p (stmt, lhs, ptr, m_ptr_qry))
- continue;
-
- if (TREE_CODE (lhs) == SSA_NAME)
- {
- pointers.safe_push (lhs);
- continue;
- }
- }
-
basic_block use_bb = gimple_bb (use_stmt);
bool this_maybe
= (maybe
Dataflow analysis (DFA) routines
---------------------------------------------------------------------------*/
+/* Renumber the gimple stmt uids in one block. The caller is responsible
+ of calling set_gimple_stmt_max_uid (fun, 0) at some point. */
+
+void
+renumber_gimple_stmt_uids_in_block (struct function *fun, basic_block bb)
+{
+ gimple_stmt_iterator bsi;
+ for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ {
+ gimple *stmt = gsi_stmt (bsi);
+ gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
+ }
+ for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ {
+ gimple *stmt = gsi_stmt (bsi);
+ gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
+ }
+}
+
/* Renumber all of the gimple stmt uids. */
void
set_gimple_stmt_max_uid (fun, 0);
FOR_ALL_BB_FN (bb, fun)
- {
- gimple_stmt_iterator bsi;
- for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
- {
- gimple *stmt = gsi_stmt (bsi);
- gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
- }
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
- {
- gimple *stmt = gsi_stmt (bsi);
- gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
- }
- }
+ renumber_gimple_stmt_uids_in_block (fun, bb);
}
/* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
set_gimple_stmt_max_uid (cfun, 0);
for (i = 0; i < n_blocks; i++)
- {
- basic_block bb = blocks[i];
- gimple_stmt_iterator bsi;
- for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
- {
- gimple *stmt = gsi_stmt (bsi);
- gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
- }
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
- {
- gimple *stmt = gsi_stmt (bsi);
- gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
- }
- }
+ renumber_gimple_stmt_uids_in_block (cfun, blocks[i]);
}
#ifndef GCC_TREE_DFA_H
#define GCC_TREE_DFA_H
+extern void renumber_gimple_stmt_uids_in_block (struct function *, basic_block);
extern void renumber_gimple_stmt_uids (struct function *);
extern void renumber_gimple_stmt_uids_in_blocks (basic_block *, int);
extern void dump_variable (FILE *, tree);