/* Scalar Replacement of Aggregates (SRA) converts some structure
references into scalar references, exposing them to the scalar
optimizers.
- Copyright (C) 2008-2020 Free Software Foundation, Inc.
+ Copyright (C) 2008-2021 Free Software Foundation, Inc.
Contributed by Martin Jambor <mjambor@suse.cz>
This file is part of GCC.
/* Numbber of components created when splitting aggregate parameters. */
int param_reductions_created;
+
+ /* Number of deferred_init calls that are modified. */
+ int deferred_init;
+
+ /* Number of deferred_init calls that are created by
+ generate_subtree_deferred_init. */
+ int subtree_deferred_init;
} sra_stats;
static void
if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
return NULL;
+ if (write && TREE_READONLY (base))
+ {
+ disqualify_candidate (base, "Encountered a store to a read-only decl.");
+ return NULL;
+ }
+
HOST_WIDE_INT offset, size, max_size;
if (!poffset.is_constant (&offset)
|| !psize.is_constant (&size)
disqualify_candidate (base, "Encountered an unconstrained access.");
return NULL;
}
+ if (offset + size > tree_to_shwi (DECL_SIZE (base)))
+ {
+ disqualify_candidate (base, "Encountered an access beyond the base.");
+ return NULL;
+ }
access = create_access_1 (base, offset, size);
access->expr = expr;
t = gimple_call_lhs (stmt);
if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
- ret |= build_access_from_expr (t, stmt, true);
+ {
+ /* If the STMT is a call to DEFERRED_INIT, avoid setting
+ cannot_scalarize_away_bitmap. */
+ if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
+ ret |= !!build_access_from_expr_1 (t, stmt, true);
+ else
+ ret |= build_access_from_expr (t, stmt, true);
+ }
break;
case GIMPLE_ASM:
reject (var, "has incomplete type");
return false;
}
- if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
+ if (!tree_fits_shwi_p (TYPE_SIZE (type)))
{
reject (var, "type size not fixed");
return false;
}
- if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
+ if (tree_to_shwi (TYPE_SIZE (type)) == 0)
{
reject (var, "type size is zero");
return false;
DECL_HAS_DEBUG_EXPR_P (repl) = 1;
}
if (access->grp_no_warning)
- TREE_NO_WARNING (repl) = 1;
+ suppress_warning (repl /* Be more selective! */);
else
- TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
+ copy_warning (repl, access->base);
}
else
- TREE_NO_WARNING (repl) = 1;
+ suppress_warning (repl /* Be more selective! */);
if (dump_file)
{
return true;
}
+/* Return true if ACC or any of its subaccesses has grp_child set. */
+
+static bool
+access_or_its_child_written (struct access *acc)
+{
+ if (acc->grp_write)
+ return true;
+ for (struct access *sub = acc->first_child; sub; sub = sub->next_sibling)
+ if (access_or_its_child_written (sub))
+ return true;
+ return false;
+}
+
/* Propagate subaccesses and grp_write flags of RACC across an assignment link
to LACC. Enqueue sub-accesses as necessary so that the write flag is
propagated transitively. Return true if anything changed. Additionally, if
{
/* We are about to change the access type from aggregate to scalar,
so we need to put the reverse flag onto the access, if any. */
- const bool reverse = TYPE_REVERSE_STORAGE_ORDER (lacc->type);
+ const bool reverse
+ = TYPE_REVERSE_STORAGE_ORDER (lacc->type)
+ && !POINTER_TYPE_P (racc->type)
+ && !VECTOR_TYPE_P (racc->type);
tree t = lacc->base;
lacc->type = racc->type;
if (rchild->grp_unscalarizable_region
|| !budget_for_propagation_access (lacc->base))
{
- if (rchild->grp_write && !lacc->grp_write)
+ if (!lacc->grp_write && access_or_its_child_written (rchild))
{
ret = true;
subtree_mark_written_and_rhs_enqueue (lacc);
}
else
{
- TREE_NO_WARNING (repl) = 1;
+ suppress_warning (repl /* Be more selective! */);
if (access->grp_partial_lhs)
repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
!insert_after,
gsi_insert_after (gsi, ds, GSI_NEW_STMT);
}
- if (access->first_child)
+ if (access->first_child && !TREE_READONLY (access->base))
{
HOST_WIDE_INT start_offset, chunk_size;
if (bfr
handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
{
tree src;
+ /* If the RHS is a load from a constant, we do not need to (and must not)
+ flush replacements to it and can use it directly as if we did. */
+ if (TREE_READONLY (sad->top_racc->base))
+ {
+ sad->refreshed = SRA_UDH_RIGHT;
+ return;
+ }
if (sad->top_racc->grp_unscalarized_data)
{
src = sad->assignment_rhs;
return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
}
+
+/* Generate statements to call .DEFERRED_INIT to initialize scalar replacements
+ of accesses within a subtree ACCESS; all its children, siblings and their
+ children are to be processed.
+ GSI is a statement iterator used to place the new statements. */
+static void
+generate_subtree_deferred_init (struct access *access,
+ tree init_type,
+ tree is_vla,
+ gimple_stmt_iterator *gsi,
+ location_t loc)
+{
+ do
+ {
+ if (access->grp_to_be_replaced)
+ {
+ tree repl = get_access_replacement (access);
+ gimple *call
+ = gimple_build_call_internal (IFN_DEFERRED_INIT, 3,
+ TYPE_SIZE_UNIT (TREE_TYPE (repl)),
+ init_type, is_vla);
+ gimple_call_set_lhs (call, repl);
+ gsi_insert_before (gsi, call, GSI_SAME_STMT);
+ update_stmt (call);
+ gimple_set_location (call, loc);
+ sra_stats.subtree_deferred_init++;
+ }
+ if (access->first_child)
+ generate_subtree_deferred_init (access->first_child, init_type,
+ is_vla, gsi, loc);
+
+ access = access ->next_sibling;
+ }
+ while (access);
+}
+
+/* For a call to .DEFERRED_INIT:
+ var = .DEFERRED_INIT (size_of_var, init_type, is_vla);
+ examine the LHS variable VAR and replace it with a scalar replacement if
+ there is one, also replace the RHS call to a call to .DEFERRED_INIT of
+ the corresponding scalar relacement variable. Examine the subtree and
+ do the scalar replacements in the subtree too. STMT is the call, GSI is
+ the statment iterator to place newly created statement. */
+
+static enum assignment_mod_result
+sra_modify_deferred_init (gimple *stmt, gimple_stmt_iterator *gsi)
+{
+ tree lhs = gimple_call_lhs (stmt);
+ tree init_type = gimple_call_arg (stmt, 1);
+ tree is_vla = gimple_call_arg (stmt, 2);
+
+ struct access *lhs_access = get_access_for_expr (lhs);
+ if (!lhs_access)
+ return SRA_AM_NONE;
+
+ location_t loc = gimple_location (stmt);
+
+ if (lhs_access->grp_to_be_replaced)
+ {
+ tree lhs_repl = get_access_replacement (lhs_access);
+ gimple_call_set_lhs (stmt, lhs_repl);
+ tree arg0_repl = TYPE_SIZE_UNIT (TREE_TYPE (lhs_repl));
+ gimple_call_set_arg (stmt, 0, arg0_repl);
+ sra_stats.deferred_init++;
+ gcc_assert (!lhs_access->first_child);
+ return SRA_AM_MODIFIED;
+ }
+
+ if (lhs_access->first_child)
+ generate_subtree_deferred_init (lhs_access->first_child,
+ init_type, is_vla, gsi, loc);
+ if (lhs_access->grp_covered)
+ {
+ unlink_stmt_vdef (stmt);
+ gsi_remove (gsi, true);
+ release_defs (stmt);
+ return SRA_AM_REMOVED;
+ }
+
+ return SRA_AM_MODIFIED;
+}
+
/* Examine both sides of the assignment statement pointed to by STMT, replace
them with a scalare replacement if there is one and generate copying of
replacements if scalarized aggregates have been used in the assignment. GSI
|| contains_vce_or_bfcref_p (lhs)
|| stmt_ends_bb_p (stmt))
{
- /* No need to copy into a constant-pool, it comes pre-initialized. */
- if (access_has_children_p (racc) && !constant_decl_p (racc->base))
+ /* No need to copy into a constant, it comes pre-initialized. */
+ if (access_has_children_p (racc) && !TREE_READONLY (racc->base))
generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
gsi, false, false, loc);
if (access_has_children_p (lacc))
}
/* Restore the aggregate RHS from its components so the
prevailing aggregate copy does the right thing. */
- if (access_has_children_p (racc))
+ if (access_has_children_p (racc) && !TREE_READONLY (racc->base))
generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
gsi, false, false, loc);
/* Re-load the components of the aggregate copy destination.
break;
case GIMPLE_CALL:
- /* Operands must be processed before the lhs. */
- for (i = 0; i < gimple_call_num_args (stmt); i++)
+ /* Handle calls to .DEFERRED_INIT specially. */
+ if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
{
- t = gimple_call_arg_ptr (stmt, i);
- modified |= sra_modify_expr (t, &gsi, false);
+ assign_result = sra_modify_deferred_init (stmt, &gsi);
+ modified |= assign_result == SRA_AM_MODIFIED;
+ deleted = assign_result == SRA_AM_REMOVED;
}
-
- if (gimple_call_lhs (stmt))
+ else
{
- t = gimple_call_lhs_ptr (stmt);
- modified |= sra_modify_expr (t, &gsi, true);
+ /* Operands must be processed before the lhs. */
+ for (i = 0; i < gimple_call_num_args (stmt); i++)
+ {
+ t = gimple_call_arg_ptr (stmt, i);
+ modified |= sra_modify_expr (t, &gsi, false);
+ }
+
+ if (gimple_call_lhs (stmt))
+ {
+ t = gimple_call_lhs_ptr (stmt);
+ modified |= sra_modify_expr (t, &gsi, true);
+ }
}
break;