for the inner loop if collapse_bb is non-NULL.
(expand_omp_simd): Use cont_bb rather than e->dest as latch.
-2013-11-26 Bernd Edlinger <bernd.edlinger@hotmail.de>
-
- Remove parameter keep_aligning from get_inner_reference.
- * tree.h (get_inner_reference): Adjust header.
- * expr.c (get_inner_reference): Remove parameter keep_aligning.
- (get_bit_range, expand_assignment,
- expand_expr_addr_expr_1, expand_expr_real_1): Adjust.
- * asan.c (instrument_derefs): Adjust.
- * builtins.c (get_object_alignment_2): Adjust. Remove handling of
- VIEW_CONVERT_EXPR.
- * cfgexpand.c (expand_debug_expr): Adjust.
- * dbxout.c (dbxout_expand_expr): Adjust.
- * dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref,
- loc_list_from_tree, fortran_common): Adjust.
- * fold-const.c (optimize_bit_field_compare,
- decode_field_reference, fold_unary_loc, fold_comparison,
- split_address_to_core_and_offset): Adjust.
- * gimple-ssa-strength-reduction.c (slsr_process_ref): Adjust.
- * simplifx-rtx.c (delegitimize_mem_from_attrs): Adjust.
- * tree-affine.c (tree_to_aff_combination,
- get_inner_reference_aff): Adjust.
- * tree-data-ref.c (split_constant_offset_1,
- dr_analyze_innermost): Adjust.
- * tree-vect-data-refs.c (vect_check_gather,
- vect_analyze_data_refs): Adjust.
- * tree-scalar-evolution.c (interpret_rhs_expr): Adjust.
- * tree-ssa-loop-ivopts.c (may_be_unaligned_p,
- split_address_cost): Adjust.
- * tsan.c (instrument_expr): Adjust.
- * config/mips/mips.c (r10k_safe_mem_expr_p): Adjust.
-
2013-11-26 Yufeng Zhang <yufeng.zhang@arm.com>
* config/arm/arm.c (arm_legitimize_address): Check xop1 is not
-2013-11-26 Bernd Edlinger <bernd.edlinger@hotmail.de>
-
- Remove parameter keep_aligning from get_inner_reference.
- * gcc-interface/decl.c (elaborate_expression_1): Adjust.
- * gcc-interface/trans.c (Attribute_to_gnu): Adjust.
- * gcc-interface/utils2.c (build_unary_op): Adjust.
-
2013-11-23 Eric Botcazou <ebotcazou@adacore.com>
* gcc-interface/trans.c (Loop_Statement_to_gnu): Set TREE_SIDE_EFFECTS
int unsignedp, volatilep;
inner = get_inner_reference (inner, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep, false);
/* If the offset is variable, err on the side of caution. */
if (offset)
inner = NULL_TREE;
&& TREE_CODE (gnu_prefix) == FIELD_DECL));
get_inner_reference (gnu_prefix, &bitsize, &bitpos, &gnu_offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep, false);
if (TREE_CODE (gnu_prefix) == COMPONENT_REF)
{
int unsignedp, volatilep;
inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep,
+ false);
/* If INNER is a padding type whose field has a self-referential
size, convert to that inner type. We know the offset is zero
enum machine_mode mode;
int volatilep = 0, unsignedp = 0;
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep, false);
if (bitpos % (size_in_bytes * BITS_PER_UNIT)
|| bitsize != size_in_bytes * BITS_PER_UNIT)
{
/* Get the innermost object and the constant (bitpos) and possibly
variable (offset) offset of the access. */
exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep, true);
/* Extract alignment information from the innermost object and
possibly adjust bitpos and offset. */
align = DECL_ALIGN (exp);
known_alignment = true;
}
+ else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
+ {
+ align = TYPE_ALIGN (TREE_TYPE (exp));
+ }
else if (TREE_CODE (exp) == INDIRECT_REF
|| TREE_CODE (exp) == MEM_REF
|| TREE_CODE (exp) == TARGET_MEM_REF)
tree offset;
int volatilep = 0;
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep);
+ &mode1, &unsignedp, &volatilep, false);
rtx orig_op0;
if (bitsize == 0)
int unsigned_p, volatile_p;
inner = get_inner_reference (expr, &bitsize, &bitoffset, &var_offset, &mode,
- &unsigned_p, &volatile_p);
+ &unsigned_p, &volatile_p, false);
if (!DECL_P (inner) || !DECL_SIZE_UNIT (inner) || var_offset)
return false;
rtx x;
tem = get_inner_reference (expr, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep, true);
x = dbxout_expand_expr (tem);
if (x == NULL || !MEM_P (x))
obj = get_inner_reference (TREE_OPERAND (loc, 0),
&bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep);
+ &unsignedp, &volatilep, false);
STRIP_NOPS (obj);
if (bitpos % BITS_PER_UNIT)
{
int unsignedp, volatilep = 0;
obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep);
+ &unsignedp, &volatilep, false);
gcc_assert (obj != loc);
return NULL_TREE;
cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep, true);
if (cvar == NULL_TREE
|| TREE_CODE (cvar) != VAR_DECL
int unsignedp;
int volatilep = 0;
get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
- &roffset, &rmode, &unsignedp, &volatilep);
+ &roffset, &rmode, &unsignedp, &volatilep, false);
if ((rbitpos % BITS_PER_UNIT) != 0)
{
*bitstart = *bitend = 0;
push_temp_slots ();
tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
- &unsignedp, &volatilep);
+ &unsignedp, &volatilep, true);
/* Make sure bitpos is not negative, it can wreak havoc later. */
if (bitpos < 0)
If the field describes a variable-sized object, *PMODE is set to
BLKmode and *PBITSIZE is set to -1. An access cannot be made in
- this case, but the address of the object can be found. */
+ this case, but the address of the object can be found.
+
+ If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
+ look through nodes that serve as markers of a greater alignment than
+ the one that can be deduced from the expression. These nodes make it
+ possible for front-ends to prevent temporaries from being created by
+ the middle-end on alignment considerations. For that purpose, the
+ normal operating mode at high-level is to always pass FALSE so that
+ the ultimate containing object is really returned; moreover, the
+ associated predicate handled_component_p will always return TRUE
+ on these nodes, thus indicating that they are essentially handled
+ by get_inner_reference. TRUE should only be passed when the caller
+ is scanning the expression in order to build another representation
+ and specifically knows how to handle these nodes; as such, this is
+ the normal operating mode in the RTL expanders. */
tree
get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
HOST_WIDE_INT *pbitpos, tree *poffset,
enum machine_mode *pmode, int *punsignedp,
- int *pvolatilep)
+ int *pvolatilep, bool keep_aligning)
{
tree size_tree = 0;
enum machine_mode mode = VOIDmode;
break;
case VIEW_CONVERT_EXPR:
+ if (keep_aligning && STRICT_ALIGNMENT
+ && (TYPE_ALIGN (TREE_TYPE (exp))
+ > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
+ < BIGGEST_ALIGNMENT)
+ && (TYPE_ALIGN_OK (TREE_TYPE (exp))
+ || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ goto done;
break;
case MEM_REF:
they won't change the final object whose address will be returned
(they actually exist only for that purpose). */
inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep);
+ &mode1, &unsignedp, &volatilep, false);
break;
}
tree offset;
int volatilep = 0, must_force_mem;
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep);
+ &mode1, &unsignedp, &volatilep, true);
rtx orig_op0, memloc;
bool mem_attrs_from_type = false;
int volatilep = 0;
tree tem
= get_inner_reference (treeop0, &bitsize, &bitpos,
- &offset, &mode1, &unsignedp, &volatilep);
+ &offset, &mode1, &unsignedp, &volatilep,
+ true);
rtx orig_op0;
/* ??? We should work harder and deal with non-zero offsets. */
do anything if the inner expression is a PLACEHOLDER_EXPR since we
then will no longer be able to replace it. */
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
- &lunsignedp, &lvolatilep);
+ &lunsignedp, &lvolatilep, false);
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
return 0;
/* If this is not a constant, we can only do something if bit positions,
sizes, and signedness are the same. */
rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
- &runsignedp, &rvolatilep);
+ &runsignedp, &rvolatilep, false);
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|| lunsignedp != runsignedp || offset != 0
}
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
- punsignedp, pvolatilep);
+ punsignedp, pvolatilep, false);
if ((inner == exp && and_mask == 0)
|| *pbitsize < 0 || offset != 0
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
int unsignedp, volatilep;
tree base = TREE_OPERAND (op0, 0);
base = get_inner_reference (base, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep, false);
/* If the reference was to a (constant) zero offset, we can use
the address of the base if it has the same base type
as the result type and the pointer type is unqualified. */
{
base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
&bitsize, &bitpos0, &offset0, &mode,
- &unsignedp, &volatilep);
+ &unsignedp, &volatilep, false);
if (TREE_CODE (base0) == INDIRECT_REF)
base0 = TREE_OPERAND (base0, 0);
else
{
base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
&bitsize, &bitpos1, &offset1, &mode,
- &unsignedp, &volatilep);
+ &unsignedp, &volatilep, false);
if (TREE_CODE (base1) == INDIRECT_REF)
base1 = TREE_OPERAND (base1, 0);
else
if (TREE_CODE (exp) == ADDR_EXPR)
{
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
- poffset, &mode, &unsignedp, &volatilep);
+ poffset, &mode, &unsignedp, &volatilep,
+ false);
core = build_fold_addr_expr_loc (loc, core);
}
else
return;
base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep);
+ &unsignedp, &volatilep, false);
index = double_int::from_uhwi (bitpos);
if (!restructure_reference (&base, &offset, &index, &type))
int unsignedp, volatilep = 0;
decl = get_inner_reference (decl, &bitsize, &bitpos, &toffset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep, false);
if (bitsize != GET_MODE_BITSIZE (mode)
|| (bitpos % BITS_PER_UNIT)
|| (toffset && !tree_fits_shwi_p (toffset)))
return;
}
core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
- &toffset, &mode, &unsignedp, &volatilep);
+ &toffset, &mode, &unsignedp, &volatilep,
+ false);
if (bitpos % BITS_PER_UNIT != 0)
break;
aff_combination_const (comb, type,
int uns, vol;
aff_tree tmp;
tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
- &uns, &vol);
+ &uns, &vol, false);
tree base_addr = build_fold_addr_expr (base);
/* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
op0 = TREE_OPERAND (op0, 0);
base = get_inner_reference (op0, &pbitsize, &pbitpos, &poffset,
- &pmode, &punsignedp, &pvolatilep);
+ &pmode, &punsignedp, &pvolatilep, false);
if (pbitpos % BITS_PER_UNIT != 0)
return false;
fprintf (dump_file, "analyze_innermost: ");
base = get_inner_reference (ref, &pbitsize, &pbitpos, &poffset,
- &pmode, &punsignedp, &pvolatilep);
+ &pmode, &punsignedp, &pvolatilep, false);
gcc_assert (base != NULL_TREE);
if (pbitpos % BITS_PER_UNIT != 0)
base = get_inner_reference (TREE_OPERAND (rhs1, 0),
&bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep, false);
if (TREE_CODE (base) == MEM_REF)
{
does to check whether the object must be loaded by parts when
STRICT_ALIGNMENT is true. */
base = get_inner_reference (ref, &bitsize, &bitpos, &toffset, &mode,
- &unsignedp, &volatilep);
+ &unsignedp, &volatilep, true);
base_type = TREE_TYPE (base);
base_align = get_object_alignment (base);
base_align = MAX (base_align, TYPE_ALIGN (base_type));
int unsignedp, volatilep;
core = get_inner_reference (addr, &bitsize, &bitpos, &toffset, &mode,
- &unsignedp, &volatilep);
+ &unsignedp, &volatilep, false);
if (toffset != 0
|| bitpos % BITS_PER_UNIT != 0
SSA_NAME OFF and put the loop invariants into a tree BASE
that can be gimplified before the loop. */
base = get_inner_reference (DR_REF (dr), &pbitsize, &pbitpos, &off,
- &pmode, &punsignedp, &pvolatilep);
+ &pmode, &punsignedp, &pvolatilep, false);
gcc_assert (base != NULL_TREE && (pbitpos % BITS_PER_UNIT) == 0);
if (TREE_CODE (base) == MEM_REF)
}
outer_base = get_inner_reference (inner_base, &pbitsize, &pbitpos,
- &poffset, &pmode, &punsignedp, &pvolatilep);
+ &poffset, &pmode, &punsignedp, &pvolatilep, false);
gcc_assert (outer_base != NULL_TREE);
if (pbitpos % BITS_PER_UNIT != 0)
look for the ultimate containing object, which is returned and specify
the access position and size. */
extern tree get_inner_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
- tree *, enum machine_mode *, int *, int *);
+ tree *, enum machine_mode *, int *, int *,
+ bool);
/* Return a tree representing the lower bound of the array mentioned in
EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
enum machine_mode mode;
int volatilep = 0, unsignedp = 0;
base = get_inner_reference (expr, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep, false);
/* No need to instrument accesses to decls that don't escape,
they can't escape to other threads then. */