/* Memory address lowering and addressing mode selection.
- Copyright (C) 2004-2017 Free Software Foundation, Inc.
+ Copyright (C) 2004-2020 Free Software Foundation, Inc.
This file is part of GCC.
? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
: NULL_RTX);
+ /* addr->base could be an SSA_NAME that was set to a constant value. The
+ call to expand_expr may expose that constant. If so, fold the value
+ into OFF and clear BSE. Otherwise we may later try to pull a mode from
+ BSE to generate a REG, which won't work with constants because they
+ are modeless. */
+ if (bse && GET_CODE (bse) == CONST_INT)
+ {
+ if (off)
+ off = simplify_gen_binary (PLUS, pointer_mode, bse, off);
+ else
+ off = bse;
+ gcc_assert (GET_CODE (off) == CONST_INT);
+ bse = NULL_RTX;
+ }
gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
if (pointer_mode != address_mode)
address = convert_memory_address (address_mode, address);
true, GSI_SAME_STMT);
}
+/* Return true if the OFFSET in PARTS is the only thing that is making
+ it an invalid address for type TYPE. */
+
+static bool
+mem_ref_valid_without_offset_p (tree type, mem_address parts)
+{
+ if (!parts.base)
+ parts.base = parts.offset;
+ parts.offset = NULL_TREE;
+ return valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), &parts);
+}
+
+/* Fold PARTS->offset into PARTS->base, so that there is no longer
+ a separate offset. Emit any new instructions before GSI. */
+
+static void
+add_offset_to_base (gimple_stmt_iterator *gsi, mem_address *parts)
+{
+ tree tmp = parts->offset;
+ if (parts->base)
+ {
+ tmp = fold_build_pointer_plus (parts->base, tmp);
+ tmp = force_gimple_operand_gsi_1 (gsi, tmp, is_gimple_mem_ref_addr,
+ NULL_TREE, true, GSI_SAME_STMT);
+ }
+ parts->base = tmp;
+ parts->offset = NULL_TREE;
+}
+
/* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
computations are emitted in front of GSI. TYPE is the mode
of created memory reference. IV_CAND is the selected iv candidate in ADDR,
if (parts.step && !integer_onep (parts.step))
{
gcc_assert (parts.index);
+ if (parts.offset && mem_ref_valid_without_offset_p (type, parts))
+ {
+ add_offset_to_base (gsi, &parts);
+ mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
+ gcc_assert (mem_ref);
+ return mem_ref;
+ }
+
parts.index = force_gimple_operand_gsi (gsi,
fold_build2 (MULT_EXPR, sizetype,
parts.index, parts.step),
[base']. */
if (parts.offset && !integer_zerop (parts.offset))
{
- tmp = parts.offset;
- parts.offset = NULL_TREE;
- /* Add offset to base. */
- if (parts.base)
- {
- tmp = fold_build_pointer_plus (parts.base, tmp);
- tmp = force_gimple_operand_gsi_1 (gsi, tmp,
- is_gimple_mem_ref_addr,
- NULL_TREE, true, GSI_SAME_STMT);
- }
- parts.base = tmp;
-
+ add_offset_to_base (gsi, &parts);
mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
if (mem_ref)
return mem_ref;
&& (TREE_INT_CST_LOW (TMR_STEP (new_ref))
< align)))))
{
- unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
- - mem_ref_offset (new_ref).to_short_addr ());
+ poly_uint64 inc = (mem_ref_offset (old_ref)
+ - mem_ref_offset (new_ref)).force_uhwi ();
adjust_ptr_info_misalignment (new_pi, inc);
}
else
return new_ref;
}
+/* Return the preferred index scale factor for accessing memory of mode
+ MEM_MODE in the address space of pointer BASE. Assume that we're
+ optimizing for speed if SPEED is true and for size otherwise. */
+unsigned int
+preferred_mem_scale_factor (tree base, machine_mode mem_mode,
+ bool speed)
+{
+ /* For BLKmode, we can't do anything so return 1. */
+ if (mem_mode == BLKmode)
+ return 1;
+
+ struct mem_address parts = {};
+ addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
+ unsigned int fact = GET_MODE_UNIT_SIZE (mem_mode);
+
+ /* Addressing mode "base + index". */
+ parts.index = integer_one_node;
+ parts.base = integer_one_node;
+ rtx addr = addr_for_mem_ref (&parts, as, false);
+ unsigned cost = address_cost (addr, mem_mode, as, speed);
+
+ /* Addressing mode "base + index << scale". */
+ parts.step = wide_int_to_tree (sizetype, fact);
+ addr = addr_for_mem_ref (&parts, as, false);
+ unsigned new_cost = address_cost (addr, mem_mode, as, speed);
+
+ /* Compare the cost of an address with an unscaled index with
+ a scaled index and return factor if useful. */
+ if (new_cost < cost)
+ return GET_MODE_UNIT_SIZE (mem_mode);
+ return 1;
+}
+
/* Dump PARTS to FILE. */
extern void dump_mem_address (FILE *, struct mem_address *);