/* Scalar Replacement of Aggregates (SRA) converts some structure
references into scalar references, exposing them to the scalar
optimizers.
- Copyright (C) 2008-2020 Free Software Foundation, Inc.
+ Copyright (C) 2008-2021 Free Software Foundation, Inc.
Contributed by Martin Jambor <mjambor@suse.cz>
This file is part of GCC.
/* Base (tree) -> Vector (vec<access_p> *) map. */
static hash_map<tree, auto_vec<access_p> > *base_access_vec;
+/* Hash to limit creation of artificial accesses */
+static hash_map<tree, unsigned> *propagation_budget;
+
/* Candidate hash table helpers. */
struct uid_decl_hasher : nofree_ptr_hash <tree_node>
/* Numbber of components created when splitting aggregate parameters. */
int param_reductions_created;
+
+ /* Number of deferred_init calls that are modified. */
+ int deferred_init;
+
+ /* Number of deferred_init calls that are created by
+ generate_subtree_deferred_init. */
+ int subtree_deferred_init;
} sra_stats;
static void
if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
return NULL;
+ if (write && TREE_READONLY (base))
+ {
+ disqualify_candidate (base, "Encountered a store to a read-only decl.");
+ return NULL;
+ }
+
HOST_WIDE_INT offset, size, max_size;
if (!poffset.is_constant (&offset)
|| !psize.is_constant (&size)
}
if (size == 0)
return NULL;
+ if (offset < 0)
+ {
+ disqualify_candidate (base, "Encountered a negative offset access.");
+ return NULL;
+ }
if (size < 0)
{
disqualify_candidate (base, "Encountered an unconstrained access.");
return NULL;
}
+ if (offset + size > tree_to_shwi (DECL_SIZE (base)))
+ {
+ disqualify_candidate (base, "Encountered an access beyond the base.");
+ return NULL;
+ }
access = create_access_1 (base, offset, size);
access->expr = expr;
t = gimple_call_lhs (stmt);
if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
- ret |= build_access_from_expr (t, stmt, true);
+ {
+ /* If the STMT is a call to DEFERRED_INIT, avoid setting
+ cannot_scalarize_away_bitmap. */
+ if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
+ ret |= !!build_access_from_expr_1 (t, stmt, true);
+ else
+ ret |= build_access_from_expr (t, stmt, true);
+ }
break;
case GIMPLE_ASM:
struct access *model, gimple_stmt_iterator *gsi,
bool insert_after)
{
+ gcc_assert (offset >= 0);
if (TREE_CODE (model->expr) == COMPONENT_REF
&& DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
{
reject (var, "has incomplete type");
return false;
}
- if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
+ if (!tree_fits_shwi_p (TYPE_SIZE (type)))
{
reject (var, "type size not fixed");
return false;
}
- if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
+ if (tree_to_shwi (TYPE_SIZE (type)) == 0)
{
reject (var, "type size is zero");
return false;
/* Create a variable for the given ACCESS which determines the type, name and a
few other properties. Return the variable declaration and store it also to
ACCESS->replacement. REG_TREE is used when creating a declaration to base a
- default-definition SSA name on on in order to facilitate an uninitialized
+ default-definition SSA name on in order to facilitate an uninitialized
warning. It is used instead of the actual ACCESS type if that is not of a
gimple register type. */
variant. This avoids issues with weirdo ABIs like AAPCS. */
repl = create_tmp_var (build_qualified_type (TYPE_MAIN_VARIANT (type),
TYPE_QUALS (type)), "SR");
- if (TREE_CODE (type) == COMPLEX_TYPE
- || TREE_CODE (type) == VECTOR_TYPE)
- {
- if (!access->grp_partial_lhs)
- DECL_GIMPLE_REG_P (repl) = 1;
- }
- else if (access->grp_partial_lhs
- && is_gimple_reg_type (type))
- TREE_ADDRESSABLE (repl) = 1;
+ if (access->grp_partial_lhs
+ && is_gimple_reg_type (type))
+ DECL_NOT_GIMPLE_REG_P (repl) = 1;
DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
DECL_ARTIFICIAL (repl) = 1;
DECL_HAS_DEBUG_EXPR_P (repl) = 1;
}
if (access->grp_no_warning)
- TREE_NO_WARNING (repl) = 1;
+ suppress_warning (repl /* Be more selective! */);
else
- TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
+ copy_warning (repl, access->base);
}
else
- TREE_NO_WARNING (repl) = 1;
+ suppress_warning (repl /* Be more selective! */);
if (dump_file)
{
print_generic_expr (dump_file, access->base);
fprintf (dump_file, " offset: %u, size: %u: ",
(unsigned) access->offset, (unsigned) access->size);
- print_generic_expr (dump_file, repl);
+ print_generic_expr (dump_file, repl, TDF_UID);
fprintf (dump_file, "\n");
}
}
gcc_assert (base == first_base);
gcc_assert (offset == access->offset);
gcc_assert (access->grp_unscalarizable_region
+ || access->grp_total_scalarization
|| size == max_size);
- gcc_assert (max_size == access->size);
+ gcc_assert (access->grp_unscalarizable_region
+ || !is_gimple_reg_type (access->type)
+ || size == access->size);
gcc_assert (reverse == access->reverse);
if (access->first_child)
subtree_mark_written_and_rhs_enqueue (child);
}
+/* If there is still budget to create a propagation access for DECL, return
+ true and decrement the budget. Otherwise return false. */
+
+static bool
+budget_for_propagation_access (tree decl)
+{
+ unsigned b, *p = propagation_budget->get (decl);
+ if (p)
+ b = *p;
+ else
+ b = param_sra_max_propagations;
+
+ if (b == 0)
+ return false;
+ b--;
+
+ if (b == 0 && dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "The propagation budget of ");
+ print_generic_expr (dump_file, decl);
+ fprintf (dump_file, " (UID: %u) has been exhausted.\n", DECL_UID (decl));
+ }
+ propagation_budget->put (decl, b);
+ return true;
+}
+
+/* Return true if ACC or any of its subaccesses has grp_child set. */
+
+static bool
+access_or_its_child_written (struct access *acc)
+{
+ if (acc->grp_write)
+ return true;
+ for (struct access *sub = acc->first_child; sub; sub = sub->next_sibling)
+ if (access_or_its_child_written (sub))
+ return true;
+ return false;
+}
+
/* Propagate subaccesses and grp_write flags of RACC across an assignment link
to LACC. Enqueue sub-accesses as necessary so that the write flag is
propagated transitively. Return true if anything changed. Additionally, if
}
if (!lacc->first_child && !racc->first_child)
{
+ /* We are about to change the access type from aggregate to scalar,
+ so we need to put the reverse flag onto the access, if any. */
+ const bool reverse
+ = TYPE_REVERSE_STORAGE_ORDER (lacc->type)
+ && !POINTER_TYPE_P (racc->type)
+ && !VECTOR_TYPE_P (racc->type);
tree t = lacc->base;
lacc->type = racc->type;
lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
lacc->base, lacc->offset,
racc, NULL, false);
+ if (TREE_CODE (lacc->expr) == MEM_REF)
+ REF_REVERSE_STORAGE_ORDER (lacc->expr) = reverse;
lacc->grp_no_warning = true;
lacc->grp_same_access_path = false;
}
+ lacc->reverse = reverse;
}
return ret;
}
continue;
}
- if (rchild->grp_unscalarizable_region)
+ if (rchild->grp_unscalarizable_region
+ || !budget_for_propagation_access (lacc->base))
{
- if (rchild->grp_write && !lacc->grp_write)
+ if (!lacc->grp_write && access_or_its_child_written (rchild))
{
ret = true;
subtree_mark_written_and_rhs_enqueue (lacc);
if (lchild->grp_unscalarizable_region
|| child_would_conflict_in_acc (racc, norm_offset, lchild->size,
- &matching_acc))
+ &matching_acc)
+ || !budget_for_propagation_access (racc->base))
{
if (matching_acc
&& propagate_subaccesses_from_lhs (lchild, matching_acc))
static void
propagate_all_subaccesses (void)
{
+ propagation_budget = new hash_map<tree, unsigned>;
while (rhs_work_queue_head)
{
struct access *racc = pop_access_from_rhs_work_queue ();
add_access_to_lhs_work_queue (racc);
}
}
+ delete propagation_budget;
}
/* Return true if the forest beginning with ROOT does not contain
}
else
{
- TREE_NO_WARNING (repl) = 1;
+ suppress_warning (repl /* Be more selective! */);
if (access->grp_partial_lhs)
repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
!insert_after,
location_t loc;
struct access *access;
tree type, bfr, orig_expr;
+ bool partial_cplx_access = false;
if (TREE_CODE (*expr) == BIT_FIELD_REF)
{
bfr = NULL_TREE;
if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
- expr = &TREE_OPERAND (*expr, 0);
+ {
+ expr = &TREE_OPERAND (*expr, 0);
+ partial_cplx_access = true;
+ }
access = get_access_for_expr (*expr);
if (!access)
return false;
be accessed as a different type too, potentially creating a need for
type conversion (see PR42196) and when scalarized unions are involved
in assembler statements (see PR42398). */
- if (!useless_type_conversion_p (type, access->type))
+ if (!bfr && !useless_type_conversion_p (type, access->type))
{
tree ref;
ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
- if (write)
+ if (partial_cplx_access)
+ {
+ /* VIEW_CONVERT_EXPRs in partial complex access are always fine in
+ the case of a write because in such case the replacement cannot
+ be a gimple register. In the case of a load, we have to
+ differentiate in between a register an non-register
+ replacement. */
+ tree t = build1 (VIEW_CONVERT_EXPR, type, repl);
+ gcc_checking_assert (!write || access->grp_partial_lhs);
+ if (!access->grp_partial_lhs)
+ {
+ tree tmp = make_ssa_name (type);
+ gassign *stmt = gimple_build_assign (tmp, t);
+ /* This is always a read. */
+ gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+ t = tmp;
+ }
+ *expr = t;
+ }
+ else if (write)
{
gassign *stmt;
gsi_insert_after (gsi, ds, GSI_NEW_STMT);
}
- if (access->first_child)
+ if (access->first_child && !TREE_READONLY (access->base))
{
HOST_WIDE_INT start_offset, chunk_size;
if (bfr
handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
{
tree src;
+ /* If the RHS is a load from a constant, we do not need to (and must not)
+ flush replacements to it and can use it directly as if we did. */
+ if (TREE_READONLY (sad->top_racc->base))
+ {
+ sad->refreshed = SRA_UDH_RIGHT;
+ return;
+ }
if (sad->top_racc->grp_unscalarized_data)
{
src = sad->assignment_rhs;
return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
}
+
+/* Generate statements to call .DEFERRED_INIT to initialize scalar replacements
+ of accesses within a subtree ACCESS; all its children, siblings and their
+ children are to be processed.
+ GSI is a statement iterator used to place the new statements. */
+static void
+generate_subtree_deferred_init (struct access *access,
+ tree init_type,
+ tree is_vla,
+ gimple_stmt_iterator *gsi,
+ location_t loc)
+{
+ do
+ {
+ if (access->grp_to_be_replaced)
+ {
+ tree repl = get_access_replacement (access);
+ gimple *call
+ = gimple_build_call_internal (IFN_DEFERRED_INIT, 3,
+ TYPE_SIZE_UNIT (TREE_TYPE (repl)),
+ init_type, is_vla);
+ gimple_call_set_lhs (call, repl);
+ gsi_insert_before (gsi, call, GSI_SAME_STMT);
+ update_stmt (call);
+ gimple_set_location (call, loc);
+ sra_stats.subtree_deferred_init++;
+ }
+ if (access->first_child)
+ generate_subtree_deferred_init (access->first_child, init_type,
+ is_vla, gsi, loc);
+
+ access = access ->next_sibling;
+ }
+ while (access);
+}
+
+/* For a call to .DEFERRED_INIT:
+ var = .DEFERRED_INIT (size_of_var, init_type, is_vla);
+ examine the LHS variable VAR and replace it with a scalar replacement if
+ there is one, also replace the RHS call to a call to .DEFERRED_INIT of
+ the corresponding scalar relacement variable. Examine the subtree and
+ do the scalar replacements in the subtree too. STMT is the call, GSI is
+ the statment iterator to place newly created statement. */
+
+static enum assignment_mod_result
+sra_modify_deferred_init (gimple *stmt, gimple_stmt_iterator *gsi)
+{
+ tree lhs = gimple_call_lhs (stmt);
+ tree init_type = gimple_call_arg (stmt, 1);
+ tree is_vla = gimple_call_arg (stmt, 2);
+
+ struct access *lhs_access = get_access_for_expr (lhs);
+ if (!lhs_access)
+ return SRA_AM_NONE;
+
+ location_t loc = gimple_location (stmt);
+
+ if (lhs_access->grp_to_be_replaced)
+ {
+ tree lhs_repl = get_access_replacement (lhs_access);
+ gimple_call_set_lhs (stmt, lhs_repl);
+ tree arg0_repl = TYPE_SIZE_UNIT (TREE_TYPE (lhs_repl));
+ gimple_call_set_arg (stmt, 0, arg0_repl);
+ sra_stats.deferred_init++;
+ gcc_assert (!lhs_access->first_child);
+ return SRA_AM_MODIFIED;
+ }
+
+ if (lhs_access->first_child)
+ generate_subtree_deferred_init (lhs_access->first_child,
+ init_type, is_vla, gsi, loc);
+ if (lhs_access->grp_covered)
+ {
+ unlink_stmt_vdef (stmt);
+ gsi_remove (gsi, true);
+ release_defs (stmt);
+ return SRA_AM_REMOVED;
+ }
+
+ return SRA_AM_MODIFIED;
+}
+
/* Examine both sides of the assignment statement pointed to by STMT, replace
them with a scalare replacement if there is one and generate copying of
replacements if scalarized aggregates have been used in the assignment. GSI
|| contains_vce_or_bfcref_p (lhs)
|| stmt_ends_bb_p (stmt))
{
- /* No need to copy into a constant-pool, it comes pre-initialized. */
- if (access_has_children_p (racc) && !constant_decl_p (racc->base))
+ /* No need to copy into a constant, it comes pre-initialized. */
+ if (access_has_children_p (racc) && !TREE_READONLY (racc->base))
generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
gsi, false, false, loc);
if (access_has_children_p (lacc))
}
/* Restore the aggregate RHS from its components so the
prevailing aggregate copy does the right thing. */
- if (access_has_children_p (racc))
+ if (access_has_children_p (racc) && !TREE_READONLY (racc->base))
generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
gsi, false, false, loc);
/* Re-load the components of the aggregate copy destination.
break;
case GIMPLE_CALL:
- /* Operands must be processed before the lhs. */
- for (i = 0; i < gimple_call_num_args (stmt); i++)
+ /* Handle calls to .DEFERRED_INIT specially. */
+ if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
{
- t = gimple_call_arg_ptr (stmt, i);
- modified |= sra_modify_expr (t, &gsi, false);
+ assign_result = sra_modify_deferred_init (stmt, &gsi);
+ modified |= assign_result == SRA_AM_MODIFIED;
+ deleted = assign_result == SRA_AM_REMOVED;
}
-
- if (gimple_call_lhs (stmt))
+ else
{
- t = gimple_call_lhs_ptr (stmt);
- modified |= sra_modify_expr (t, &gsi, true);
+ /* Operands must be processed before the lhs. */
+ for (i = 0; i < gimple_call_num_args (stmt); i++)
+ {
+ t = gimple_call_arg_ptr (stmt, i);
+ modified |= sra_modify_expr (t, &gsi, false);
+ }
+
+ if (gimple_call_lhs (stmt))
+ {
+ t = gimple_call_lhs_ptr (stmt);
+ modified |= sra_modify_expr (t, &gsi, true);
+ }
}
break;