+2013-09-13 Jakub Jelinek <jakub@redhat.com>
+
+ PR tree-optimization/58392
+ * tree-cfg.c (move_sese_region_to_fn): Rename loop variable
+ to avoid shadowing of outer loop variable. If
+ saved_cfun->has_simduid_loops or saved_cfun->has_force_vect_loops,
+ replace_by_duplicate_decl simduid of loops that have it set and
+ set dest_cfun->has_simduid_loops and/or
+ dest_cfun->has_force_vect_loops.
+ * omp-low.c (build_outer_var_ref): Call maybe_lookup_decl_in_outer_ctx
+ instead of maybe_lookup_decl.
+ * tree-inline.c (copy_loops): Change blocks_to_copy argument to id.
+ Use id->blocks_to_copy instead of blocks_to_copy. Adjust recursive
+ call. Copy over force_vect and copy and remap simduid. Set
+ cfun->has_simduid_loops and/or cfun->has_force_vect_loops.
+ (copy_cfg_body): Remove blocks_to_copy argument. Use
+ id->blocks_to_copy instead of blocks_to_copy. Adjust copy_loops
+ caller. Don't set cfun->has_simduid_loops and/or
+ cfun->has_force_vect_loops here.
+ (copy_body): Remove blocks_to_copy argument. Adjust copy_cfg_body
+ caller.
+ (expand_call_inline, tree_function_versioning): Adjust copy_body
+ callers.
+
2013-09-13 Martin Jambor <mjambor@suse.cz>
PR bootstrap/58388
if (bb->loop_father->header == bb
&& loop_outer (bb->loop_father) == loop)
{
- struct loop *loop = bb->loop_father;
+ struct loop *this_loop = bb->loop_father;
flow_loop_tree_node_remove (bb->loop_father);
- flow_loop_tree_node_add (get_loop (dest_cfun, 0), loop);
- fixup_loop_arrays_after_move (saved_cfun, cfun, loop);
+ flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
+ fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
}
/* Remove loop exits from the outlined region. */
outer; outer = loop_outer (outer))
outer->num_nodes -= bbs.length ();
+ if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
+ {
+ struct loop *aloop;
+ for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
+ if (aloop != NULL)
+ {
+ if (aloop->simduid)
+ {
+ replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
+ d.to_context);
+ dest_cfun->has_simduid_loops = true;
+ }
+ if (aloop->force_vect)
+ dest_cfun->has_force_vect_loops = true;
+ }
+ }
+
/* Rewire BLOCK_SUBBLOCKS of orig_block. */
if (orig_block)
{
as siblings of DEST_PARENT. */
static void
-copy_loops (bitmap blocks_to_copy,
+copy_loops (copy_body_data *id,
struct loop *dest_parent, struct loop *src_parent)
{
struct loop *src_loop = src_parent->inner;
while (src_loop)
{
- if (!blocks_to_copy
- || bitmap_bit_p (blocks_to_copy, src_loop->header->index))
+ if (!id->blocks_to_copy
+ || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
{
struct loop *dest_loop = alloc_loop ();
place_new_loop (cfun, dest_loop);
flow_loop_tree_node_add (dest_parent, dest_loop);
+ if (src_loop->simduid)
+ {
+ dest_loop->simduid = remap_decl (src_loop->simduid, id);
+ cfun->has_simduid_loops = true;
+ }
+ if (src_loop->force_vect)
+ {
+ dest_loop->force_vect = true;
+ cfun->has_force_vect_loops = true;
+ }
+
/* Recurse. */
- copy_loops (blocks_to_copy, dest_loop, src_loop);
+ copy_loops (id, dest_loop, src_loop);
}
src_loop = src_loop->next;
}
static tree
copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
basic_block entry_block_map, basic_block exit_block_map,
- bitmap blocks_to_copy, basic_block new_entry)
+ basic_block new_entry)
{
tree callee_fndecl = id->src_fn;
/* Original cfun for the callee, doesn't change. */
/* Use aux pointers to map the original blocks to copy. */
FOR_EACH_BB_FN (bb, cfun_to_copy)
- if (!blocks_to_copy || bitmap_bit_p (blocks_to_copy, bb->index))
+ if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
{
basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
bb->aux = new_bb;
bool can_make_abormal_goto
= id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call);
FOR_ALL_BB_FN (bb, cfun_to_copy)
- if (!blocks_to_copy
- || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
+ if (!id->blocks_to_copy
+ || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
can_make_abormal_goto);
if (loops_for_fn (src_cfun) != NULL
&& current_loops != NULL)
{
- copy_loops (blocks_to_copy, entry_block_map->loop_father,
+ copy_loops (id, entry_block_map->loop_father,
get_loop (src_cfun, 0));
/* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
loops_state_set (LOOPS_NEED_FIXUP);
- cfun->has_force_vect_loops |= src_cfun->has_force_vect_loops;
- cfun->has_simduid_loops |= src_cfun->has_simduid_loops;
}
/* If the loop tree in the source function needed fixup, mark the
if (gimple_in_ssa_p (cfun))
FOR_ALL_BB_FN (bb, cfun_to_copy)
- if (!blocks_to_copy
- || (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
+ if (!id->blocks_to_copy
+ || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
copy_phis_for_bb (bb, id);
FOR_ALL_BB_FN (bb, cfun_to_copy)
static tree
copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
basic_block entry_block_map, basic_block exit_block_map,
- bitmap blocks_to_copy, basic_block new_entry)
+ basic_block new_entry)
{
tree fndecl = id->src_fn;
tree body;
/* If this body has a CFG, walk CFG and copy. */
gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
- blocks_to_copy, new_entry);
+ new_entry);
copy_debug_stmts (id);
return body;
duplicate our body before altering anything. */
copy_body (id, bb->count,
GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
- bb, return_block, NULL, NULL);
+ bb, return_block, NULL);
/* Reset the escaped solution. */
if (cfun->gimple_df)
/* Copy the Function's body. */
copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
- ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
+ ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, new_entry);
/* Renumber the lexical scoping (non-code) blocks consecutively. */
number_blocks (new_decl);