/* Variable tracking routines for the GNU compiler.
- Copyright (C) 2002-2017 Free Software Foundation, Inc.
+ Copyright (C) 2002-2020 Free Software Foundation, Inc.
This file is part of GCC.
#include "tree-dfa.h"
#include "tree-ssa.h"
#include "cselib.h"
-#include "params.h"
#include "tree-pretty-print.h"
#include "rtl-iter.h"
#include "fibonacci_heap.h"
+#include "print-rtl.h"
+#include "function-abi.h"
typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
static inline HOST_WIDE_INT
int_mem_offset (const_rtx mem)
{
- if (MEM_OFFSET_KNOWN_P (mem))
- return MEM_OFFSET (mem);
+ HOST_WIDE_INT offset;
+ if (MEM_OFFSET_KNOWN_P (mem) && MEM_OFFSET (mem).is_constant (&offset))
+ return offset;
return 0;
}
static void dataflow_set_destroy (dataflow_set *);
static bool track_expr_p (tree, bool);
-static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
static void add_uses_1 (rtx *, void *);
static void add_stores (rtx, const_rtx, void *);
static bool compute_bb_dataflow (basic_block);
static void emit_notes_in_bb (basic_block, dataflow_set *);
static void vt_emit_notes (void);
-static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
static void vt_add_function_parameters (void);
static bool vt_initialize (void);
static void vt_finalize (void);
or hard_frame_pointer_rtx. */
static inline rtx
-compute_cfa_pointer (HOST_WIDE_INT adjustment)
+compute_cfa_pointer (poly_int64 adjustment)
{
return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
}
/* Adjustment for hard_frame_pointer_rtx to cfa base reg,
or -1 if the replacement shouldn't be done. */
-static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
+static poly_int64 hard_frame_pointer_adjustment = -1;
/* Data for adjust_mems callback. */
-struct adjust_mem_data
+class adjust_mem_data
{
+public:
bool store;
machine_mode mem_mode;
HOST_WIDE_INT stack_adjust;
case MULT:
break;
case ASHIFT:
+ if (GET_MODE (XEXP (x, 1)) != VOIDmode)
+ {
+ enum machine_mode mode = GET_MODE (subreg);
+ rtx op1 = XEXP (x, 1);
+ enum machine_mode op1_mode = GET_MODE (op1);
+ if (GET_MODE_PRECISION (as_a <scalar_int_mode> (mode))
+ < GET_MODE_PRECISION (as_a <scalar_int_mode> (op1_mode)))
+ {
+ poly_uint64 byte = subreg_lowpart_offset (mode, op1_mode);
+ if (GET_CODE (op1) == SUBREG || GET_CODE (op1) == CONCAT)
+ {
+ if (!simplify_subreg (mode, op1, op1_mode, byte))
+ return false;
+ }
+ else if (!validate_subreg (mode, op1_mode, op1, byte))
+ return false;
+ }
+ }
iter.substitute (XEXP (x, 0));
break;
default:
static rtx
adjust_mems (rtx loc, const_rtx old_rtx, void *data)
{
- struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
+ class adjust_mem_data *amd = (class adjust_mem_data *) data;
rtx mem, addr = loc, tem;
machine_mode mem_mode_save;
bool store_save;
scalar_int_mode tem_mode, tem_subreg_mode;
+ poly_int64 size;
switch (GET_CODE (loc))
{
case REG:
return compute_cfa_pointer (amd->stack_adjust);
else if (loc == hard_frame_pointer_rtx
&& frame_pointer_needed
- && hard_frame_pointer_adjustment != -1
+ && maybe_ne (hard_frame_pointer_adjustment, -1)
&& cfa_base_rtx)
return compute_cfa_pointer (hard_frame_pointer_adjustment);
gcc_checking_assert (loc != virtual_incoming_args_rtx);
return mem;
case PRE_INC:
case PRE_DEC:
- addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
- gen_int_mode (GET_CODE (loc) == PRE_INC
- ? GET_MODE_SIZE (amd->mem_mode)
- : -GET_MODE_SIZE (amd->mem_mode),
- GET_MODE (loc)));
+ size = GET_MODE_SIZE (amd->mem_mode);
+ addr = plus_constant (GET_MODE (loc), XEXP (loc, 0),
+ GET_CODE (loc) == PRE_INC ? size : -size);
/* FALLTHRU */
case POST_INC:
case POST_DEC:
addr = XEXP (loc, 0);
gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
- tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
- gen_int_mode ((GET_CODE (loc) == PRE_INC
- || GET_CODE (loc) == POST_INC)
- ? GET_MODE_SIZE (amd->mem_mode)
- : -GET_MODE_SIZE (amd->mem_mode),
- GET_MODE (loc)));
+ size = GET_MODE_SIZE (amd->mem_mode);
+ tem = plus_constant (GET_MODE (loc), XEXP (loc, 0),
+ (GET_CODE (loc) == PRE_INC
+ || GET_CODE (loc) == POST_INC) ? size : -size);
store_save = amd->store;
amd->store = false;
tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
amd.stack_adjust = -VTI (bb)->out.stack_adjust;
amd.store = true;
- note_stores (PATTERN (insn), adjust_mem_stores, &amd);
+ note_stores (insn, adjust_mem_stores, &amd);
amd.store = false;
if (GET_CODE (PATTERN (insn)) == PARALLEL
set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
}
+/* Return true if we should track a location that is OFFSET bytes from
+ a variable. Store the constant offset in *OFFSET_OUT if so. */
+
+static bool
+track_offset_p (poly_int64 offset, HOST_WIDE_INT *offset_out)
+{
+ HOST_WIDE_INT const_offset;
+ if (!offset.is_constant (&const_offset)
+ || !IN_RANGE (const_offset, 0, MAX_VAR_PARTS - 1))
+ return false;
+ *offset_out = const_offset;
+ return true;
+}
+
+/* Return the offset of a register that track_offset_p says we
+ should track. */
+
+static HOST_WIDE_INT
+get_tracked_reg_offset (rtx loc)
+{
+ HOST_WIDE_INT offset;
+ if (!track_offset_p (REG_OFFSET (loc), &offset))
+ gcc_unreachable ();
+ return offset;
+}
+
/* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
static void
rtx set_src)
{
tree decl = REG_EXPR (loc);
- HOST_WIDE_INT offset = REG_OFFSET (loc);
+ HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
var_reg_decl_set (set, loc, initialized,
dv_from_decl (decl), offset, set_src, INSERT);
enum var_init_status initialized, rtx set_src)
{
tree decl = REG_EXPR (loc);
- HOST_WIDE_INT offset = REG_OFFSET (loc);
+ HOST_WIDE_INT offset = get_tracked_reg_offset (loc);
attrs *node, *next;
attrs **nextp;
attrs **nextp = &set->regs[REGNO (loc)];
attrs *node, *next;
- if (clobber)
+ HOST_WIDE_INT offset;
+ if (clobber && track_offset_p (REG_OFFSET (loc), &offset))
{
tree decl = REG_EXPR (loc);
- HOST_WIDE_INT offset = REG_OFFSET (loc);
decl = var_debug_decl (decl);
static rtx
vt_canonicalize_addr (dataflow_set *set, rtx oloc)
{
- HOST_WIDE_INT ofst = 0;
+ poly_int64 ofst = 0, term;
machine_mode mode = GET_MODE (oloc);
rtx loc = oloc;
rtx x;
while (retry)
{
while (GET_CODE (loc) == PLUS
- && GET_CODE (XEXP (loc, 1)) == CONST_INT)
+ && poly_int_rtx_p (XEXP (loc, 1), &term))
{
- ofst += INTVAL (XEXP (loc, 1));
+ ofst += term;
loc = XEXP (loc, 0);
}
loc = get_addr_from_global_cache (loc);
/* Consolidate plus_constants. */
- while (ofst && GET_CODE (loc) == PLUS
- && GET_CODE (XEXP (loc, 1)) == CONST_INT)
+ while (maybe_ne (ofst, 0)
+ && GET_CODE (loc) == PLUS
+ && poly_int_rtx_p (XEXP (loc, 1), &term))
{
- ofst += INTVAL (XEXP (loc, 1));
+ ofst += term;
loc = XEXP (loc, 0);
}
}
/* Add OFST back in. */
- if (ofst)
+ if (maybe_ne (ofst, 0))
{
/* Don't build new RTL if we can help it. */
- if (GET_CODE (oloc) == PLUS
- && XEXP (oloc, 0) == loc
- && INTVAL (XEXP (oloc, 1)) == ofst)
+ if (strip_offset (oloc, &term) == loc && known_eq (term, ofst))
return oloc;
loc = plus_constant (mode, loc, ofst);
else
return 1;
+ case 'p':
+ r = compare_sizes_for_sort (SUBREG_BYTE (x), SUBREG_BYTE (y));
+ if (r != 0)
+ return r;
+ break;
+
case 'V':
case 'E':
/* Compare the vector length first. */
{
unsigned int r;
hard_reg_set_iterator hrsi;
- HARD_REG_SET invalidated_regs;
- get_call_reg_set_usage (call_insn, &invalidated_regs,
- regs_invalidated_by_call);
+ HARD_REG_SET callee_clobbers
+ = insn_callee_abi (call_insn).full_reg_clobbers ();
- EXECUTE_IF_SET_IN_HARD_REG_SET (invalidated_regs, 0, r, hrsi)
+ EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, r, hrsi)
var_regno_delete (set, r);
if (MAY_HAVE_DEBUG_BIND_INSNS)
|| (TREE_CODE (realdecl) == MEM_REF
&& TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
{
- HOST_WIDE_INT bitsize, bitpos, maxsize;
+ HOST_WIDE_INT bitsize, bitpos;
bool reverse;
tree innerdecl
- = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
- &maxsize, &reverse);
- if (!DECL_P (innerdecl)
+ = get_ref_base_and_extent_hwi (realdecl, &bitpos,
+ &bitsize, &reverse);
+ if (!innerdecl
+ || !DECL_P (innerdecl)
|| DECL_IGNORED_P (innerdecl)
/* Do not track declarations for parts of tracked record
parameters since we want to track them as a whole. */
|| tracked_record_parameter_p (innerdecl)
|| TREE_STATIC (innerdecl)
- || bitsize <= 0
- || bitpos + bitsize > 256
- || bitsize != maxsize)
+ || bitsize == 0
+ || bitpos + bitsize > 256)
return 0;
else
realdecl = expr;
&& !tracked_record_parameter_p (realdecl))
return 0;
if (MEM_SIZE_KNOWN_P (decl_rtl)
- && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
+ && maybe_gt (MEM_SIZE (decl_rtl), MAX_VAR_PARTS))
return 0;
}
EXPR+OFFSET. */
static bool
-same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
+same_variable_part_p (rtx loc, tree expr, poly_int64 offset)
{
tree expr2;
- HOST_WIDE_INT offset2;
+ poly_int64 offset2;
if (! DECL_P (expr))
return false;
expr = var_debug_decl (expr);
expr2 = var_debug_decl (expr2);
- return (expr == expr2 && offset == offset2);
+ return (expr == expr2 && known_eq (offset, offset2));
}
/* LOC is a REG or MEM that we would like to track if possible.
from EXPR in *OFFSET_OUT (if nonnull). */
static bool
-track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
+track_loc_p (rtx loc, tree expr, poly_int64 offset, bool store_reg_p,
machine_mode *mode_out, HOST_WIDE_INT *offset_out)
{
machine_mode mode;
|| (store_reg_p
&& !COMPLEX_MODE_P (DECL_MODE (expr))
&& hard_regno_nregs (REGNO (loc), DECL_MODE (expr)) == 1))
- && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
+ && known_eq (offset + byte_lowpart_offset (DECL_MODE (expr), mode), 0))
{
mode = DECL_MODE (expr);
offset = 0;
}
- if (offset < 0 || offset >= MAX_VAR_PARTS)
+ HOST_WIDE_INT const_offset;
+ if (!track_offset_p (offset, &const_offset))
return false;
if (mode_out)
*mode_out = mode;
if (offset_out)
- *offset_out = offset;
+ *offset_out = const_offset;
return true;
}
static rtx
var_lowpart (machine_mode mode, rtx loc)
{
- unsigned int offset, reg_offset, regno;
+ unsigned int regno;
if (GET_MODE (loc) == mode)
return loc;
if (!REG_P (loc) && !MEM_P (loc))
return NULL;
- offset = byte_lowpart_offset (mode, GET_MODE (loc));
+ poly_uint64 offset = byte_lowpart_offset (mode, GET_MODE (loc));
if (MEM_P (loc))
return adjust_address_nv (loc, mode, offset);
- reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
+ poly_uint64 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
reg_offset, mode);
return gen_rtx_REG_offset (loc, mode, regno, offset);
compile time for ridiculously complex expressions, although they're
seldom useful, and they may often have to be discarded as not
representable anyway. */
-#define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
+#define EXPR_USE_DEPTH (param_max_vartrack_expr_depth)
/* Attempt to reverse the EXPR operation in the debug info and record
it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
&& (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
return;
/* Avoid creating too large locs lists. */
- else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
+ else if (count == param_max_vartrack_reverse_op_size)
return;
switch (GET_CODE (src))
mo.type = MO_CLOBBER;
mo.u.loc = loc;
if (GET_CODE (expr) == SET
- && SET_DEST (expr) == loc
+ && (SET_DEST (expr) == loc
+ || (GET_CODE (SET_DEST (expr)) == STRICT_LOW_PART
+ && XEXP (SET_DEST (expr), 0) == loc))
&& !unsuitable_loc (SET_SRC (expr))
&& find_use_val (loc, mode, cui))
{
}
if (loc == stack_pointer_rtx
- && hard_frame_pointer_adjustment != -1
+ && (maybe_ne (hard_frame_pointer_adjustment, -1)
+ || (!frame_pointer_needed && !ACCUMULATE_OUTGOING_ARGS))
&& preserve)
cselib_set_value_sp_based (v);
+ /* Don't record MO_VAL_SET for VALUEs that can be described using
+ cfa_base_rtx or cfa_base_rtx + CONST_INT, cselib already knows
+ all the needed equivalences and they shouldn't change depending
+ on which register holds that VALUE in some instruction. */
+ if (!frame_pointer_needed
+ && cfa_base_rtx
+ && cselib_sp_derived_value_p (v))
+ {
+ if (preserve)
+ preserve_value (v);
+ return;
+ }
+
nloc = replace_expr_with_values (oloc);
if (nloc)
oloc = nloc;
&& targetm.calls.struct_value_rtx (type, 0) == 0)
{
tree struct_addr = build_pointer_type (TREE_TYPE (type));
- machine_mode mode = TYPE_MODE (struct_addr);
+ function_arg_info arg (struct_addr, /*named=*/true);
rtx reg;
INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
nargs + 1);
- reg = targetm.calls.function_arg (args_so_far, mode,
- struct_addr, true);
- targetm.calls.function_arg_advance (args_so_far, mode,
- struct_addr, true);
+ reg = targetm.calls.function_arg (args_so_far, arg);
+ targetm.calls.function_arg_advance (args_so_far, arg);
if (reg == NULL_RTX)
{
for (; link; link = XEXP (link, 1))
nargs);
if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
{
- machine_mode mode;
t = TYPE_ARG_TYPES (type);
- mode = TYPE_MODE (TREE_VALUE (t));
- this_arg = targetm.calls.function_arg (args_so_far, mode,
- TREE_VALUE (t), true);
+ function_arg_info arg (TREE_VALUE (t), /*named=*/true);
+ this_arg = targetm.calls.function_arg (args_so_far, arg);
if (this_arg && !REG_P (this_arg))
this_arg = NULL_RTX;
else if (this_arg == NULL_RTX)
if (!frame_pointer_needed)
{
- struct adjust_mem_data amd;
+ class adjust_mem_data amd;
amd.mem_mode = VOIDmode;
amd.stack_adjust = -VTI (bb)->out.stack_adjust;
amd.store = true;
}
if (t && t != void_list_node)
{
- tree argtype = TREE_VALUE (t);
- machine_mode mode = TYPE_MODE (argtype);
rtx reg;
- if (pass_by_reference (&args_so_far_v, mode, argtype, true))
- {
- argtype = build_pointer_type (argtype);
- mode = TYPE_MODE (argtype);
- }
- reg = targetm.calls.function_arg (args_so_far, mode,
- argtype, true);
- if (TREE_CODE (argtype) == REFERENCE_TYPE
- && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
+ function_arg_info arg (TREE_VALUE (t), /*named=*/true);
+ apply_pass_by_reference_rules (&args_so_far_v, arg);
+ reg = targetm.calls.function_arg (args_so_far, arg);
+ if (TREE_CODE (arg.type) == REFERENCE_TYPE
+ && INTEGRAL_TYPE_P (TREE_TYPE (arg.type))
&& reg
&& REG_P (reg)
- && GET_MODE (reg) == mode
- && (GET_MODE_CLASS (mode) == MODE_INT
- || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
+ && GET_MODE (reg) == arg.mode
+ && (GET_MODE_CLASS (arg.mode) == MODE_INT
+ || GET_MODE_CLASS (arg.mode) == MODE_PARTIAL_INT)
&& REG_P (x)
&& REGNO (x) == REGNO (reg)
- && GET_MODE (x) == mode
+ && GET_MODE (x) == arg.mode
&& item)
{
machine_mode indmode
- = TYPE_MODE (TREE_TYPE (argtype));
+ = TYPE_MODE (TREE_TYPE (arg.type));
rtx mem = gen_rtx_MEM (indmode, x);
cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
if (val && cselib_preserved_value_p (val))
}
}
}
- targetm.calls.function_arg_advance (args_so_far, mode,
- argtype, true);
+ targetm.calls.function_arg_advance (args_so_far, arg);
t = TREE_CHAIN (t);
}
}
insert notes before it without worrying about any
notes that MO_USEs might emit after the insn. */
cui.store_p = true;
- note_stores (PATTERN (insn), add_stores, &cui);
+ note_stores (insn, add_stores, &cui);
n2 = VTI (bb)->mos.length () - 1;
mos = VTI (bb)->mos.address ();
int *rc_order;
int i;
int htabsz = 0;
- int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
+ int htabmax = param_max_vartrack_size;
bool success = true;
timevar_push (TV_VAR_TRACKING_DATAFLOW);
if (MAY_HAVE_DEBUG_BIND_INSNS)
inform (DECL_SOURCE_LOCATION (cfun->decl),
"variable tracking size limit exceeded with "
- "-fvar-tracking-assignments, retrying without");
+ "%<-fvar-tracking-assignments%>, retrying without");
else
inform (DECL_SOURCE_LOCATION (cfun->decl),
"variable tracking size limit exceeded");
static void
dump_vars (variable_table_type *vars)
{
- if (vars->elements () > 0)
+ if (!vars->is_empty ())
{
fprintf (dump_file, "Variables:\n");
vars->traverse <void *, dump_var_tracking_slot> (NULL);
/* Structure for passing some other parameters to function
vt_expand_loc_callback. */
-struct expand_loc_callback_data
+class expand_loc_callback_data
{
+public:
/* The variables and values active at this point. */
variable_table_type *vars;
vt_expand_var_loc_chain (variable *var, bitmap regs, void *data,
bool *pendrecp)
{
- struct expand_loc_callback_data *elcd
- = (struct expand_loc_callback_data *) data;
+ class expand_loc_callback_data *elcd
+ = (class expand_loc_callback_data *) data;
location_chain *loc, *next;
rtx result = NULL;
int first_child, result_first_child, last_child;
int max_depth ATTRIBUTE_UNUSED,
void *data)
{
- struct expand_loc_callback_data *elcd
- = (struct expand_loc_callback_data *) data;
+ class expand_loc_callback_data *elcd
+ = (class expand_loc_callback_data *) data;
decl_or_value dv;
variable *var;
rtx result, subreg;
/* Invalid SUBREGs are ok in debug info. ??? We could try
alternate expansions for the VALUE as well. */
- if (!result)
+ if (!result && GET_MODE (subreg) != VOIDmode)
result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
return result;
static rtx
vt_expand_loc (rtx loc, variable_table_type *vars)
{
- struct expand_loc_callback_data data;
+ class expand_loc_callback_data data;
rtx result;
if (!MAY_HAVE_DEBUG_BIND_INSNS)
static rtx
vt_expand_1pvar (variable *var, variable_table_type *vars)
{
- struct expand_loc_callback_data data;
+ class expand_loc_callback_data data;
rtx loc;
gcc_checking_assert (var->onepart && var->n_var_parts == 1);
bool complete;
enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
HOST_WIDE_INT last_limit;
- tree type_size_unit;
HOST_WIDE_INT offsets[MAX_VAR_PARTS];
rtx loc[MAX_VAR_PARTS];
tree decl;
{
machine_mode mode, wider_mode;
rtx loc2;
- HOST_WIDE_INT offset;
+ HOST_WIDE_INT offset, size, wider_size;
if (i == 0 && var->onepart)
{
mode = GET_MODE (var->var_part[i].cur_loc);
if (mode == VOIDmode && var->onepart)
mode = DECL_MODE (decl);
- last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
+ /* We ony track subparts of constant-sized objects, since at present
+ there's no representation for polynomial pieces. */
+ if (!GET_MODE_SIZE (mode).is_constant (&size))
+ {
+ complete = false;
+ continue;
+ }
+ last_limit = offsets[n_var_parts] + size;
/* Attempt to merge adjacent registers or memory. */
for (j = i + 1; j < var->n_var_parts; j++)
break;
if (j < var->n_var_parts
&& GET_MODE_WIDER_MODE (mode).exists (&wider_mode)
+ && GET_MODE_SIZE (wider_mode).is_constant (&wider_size)
&& var->var_part[j].cur_loc
&& mode == GET_MODE (var->var_part[j].cur_loc)
&& (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
&& GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
{
rtx new_loc = NULL;
+ poly_int64 offset2;
if (REG_P (loc[n_var_parts])
&& hard_regno_nregs (REGNO (loc[n_var_parts]), mode) * 2
else if (MEM_P (loc[n_var_parts])
&& GET_CODE (XEXP (loc2, 0)) == PLUS
&& REG_P (XEXP (XEXP (loc2, 0), 0))
- && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
+ && poly_int_rtx_p (XEXP (XEXP (loc2, 0), 1), &offset2))
{
- if ((REG_P (XEXP (loc[n_var_parts], 0))
- && rtx_equal_p (XEXP (loc[n_var_parts], 0),
- XEXP (XEXP (loc2, 0), 0))
- && INTVAL (XEXP (XEXP (loc2, 0), 1))
- == GET_MODE_SIZE (mode))
- || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
- && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
- && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
- XEXP (XEXP (loc2, 0), 0))
- && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
- + GET_MODE_SIZE (mode)
- == INTVAL (XEXP (XEXP (loc2, 0), 1))))
+ poly_int64 end1 = size;
+ rtx base1 = strip_offset_and_add (XEXP (loc[n_var_parts], 0),
+ &end1);
+ if (rtx_equal_p (base1, XEXP (XEXP (loc2, 0), 0))
+ && known_eq (end1, offset2))
new_loc = adjust_address_nv (loc[n_var_parts],
wider_mode, 0);
}
{
loc[n_var_parts] = new_loc;
mode = wider_mode;
- last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
+ last_limit = offsets[n_var_parts] + wider_size;
i = j;
}
}
++n_var_parts;
}
- type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
- if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
+ poly_uint64 type_size_unit
+ = tree_to_poly_uint64 (TYPE_SIZE_UNIT (TREE_TYPE (decl)));
+ if (maybe_lt (poly_uint64 (last_limit), type_size_unit))
complete = false;
if (! flag_var_tracking_uninit)
/* Make sure that the call related notes come first. */
while (NEXT_INSN (insn)
&& NOTE_P (insn)
- && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
- && NOTE_DURING_CALL_P (insn))
- || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
+ && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
+ && NOTE_DURING_CALL_P (insn))
insn = NEXT_INSN (insn);
if (NOTE_P (insn)
- && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
- && NOTE_DURING_CALL_P (insn))
- || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
+ && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
+ && NOTE_DURING_CALL_P (insn))
note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
else
note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
emit_note_data data;
variable_table_type *htab = shared_hash_htab (vars);
- if (!changed_variables->elements ())
+ if (changed_variables->is_empty ())
return;
if (MAY_HAVE_DEBUG_BIND_INSNS)
emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
{
rtx arguments = mo->u.loc, *p = &arguments;
- rtx_note *note;
while (*p)
{
XEXP (XEXP (*p, 0), 1)
shared_hash_htab (set->vars));
/* If expansion is successful, keep it in the list. */
if (XEXP (XEXP (*p, 0), 1))
- p = &XEXP (*p, 1);
+ {
+ XEXP (XEXP (*p, 0), 1)
+ = copy_rtx_if_shared (XEXP (XEXP (*p, 0), 1));
+ p = &XEXP (*p, 1);
+ }
/* Otherwise, if the following item is data_value for it,
drop it too too. */
else if (XEXP (*p, 1)
else
*p = XEXP (*p, 1);
}
- note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
- NOTE_VAR_LOCATION (note) = arguments;
+ add_reg_note (insn, REG_CALL_ARG_LOCATION, arguments);
}
break;
}
}
-/* Return BB's head, unless BB is the block that succeeds ENTRY_BLOCK,
- in which case it searches back from BB's head for the very first
- insn. Use [get_first_insn (bb), BB_HEAD (bb->next_bb)[ as a range
- to iterate over all insns of a function while iterating over its
- BBs. */
-
-static rtx_insn *
-get_first_insn (basic_block bb)
-{
- rtx_insn *insn = BB_HEAD (bb);
-
- if (bb->prev_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
- while (rtx_insn *prev = PREV_INSN (insn))
- insn = prev;
-
- return insn;
-}
-
/* Emit notes for the whole function. */
static void
basic_block bb;
dataflow_set cur;
- gcc_assert (!changed_variables->elements ());
+ gcc_assert (changed_variables->is_empty ());
/* Free memory occupied by the out hash tables, as they aren't used
anymore. */
{
/* Emit the notes for changes of variable locations between two
subsequent basic blocks. */
- emit_notes_for_differences (get_first_insn (bb),
- &cur, &VTI (bb)->in);
+ emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
if (MAY_HAVE_DEBUG_BIND_INSNS)
local_get_addr_cache = new hash_map<rtx, rtx>;
assign declaration to *DECLP and offset to *OFFSETP, and return true. */
static bool
-vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
+vt_get_decl_and_offset (rtx rtl, tree *declp, poly_int64 *offsetp)
{
if (REG_P (rtl))
{
decl = REG_EXPR (reg);
if (REG_EXPR (reg) != decl)
break;
- if (REG_OFFSET (reg) < offset)
- offset = REG_OFFSET (reg);
+ HOST_WIDE_INT this_offset;
+ if (!track_offset_p (REG_OFFSET (reg), &this_offset))
+ break;
+ offset = MIN (offset, this_offset);
}
if (i == len)
rtx incoming = DECL_INCOMING_RTL (parm);
tree decl;
machine_mode mode;
- HOST_WIDE_INT offset;
+ poly_int64 offset;
dataflow_set *out;
decl_or_value dv;
+ bool incoming_ok = true;
if (TREE_CODE (parm) != PARM_DECL)
return;
rewrite the incoming location of parameters passed on the stack
into MEMs based on the argument pointer, so that incoming doesn't
depend on a pseudo. */
+ poly_int64 incoming_offset = 0;
if (MEM_P (incoming)
- && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
- || (GET_CODE (XEXP (incoming, 0)) == PLUS
- && XEXP (XEXP (incoming, 0), 0)
- == crtl->args.internal_arg_pointer
- && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
+ && (strip_offset (XEXP (incoming, 0), &incoming_offset)
+ == crtl->args.internal_arg_pointer))
{
HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
- if (GET_CODE (XEXP (incoming, 0)) == PLUS)
- off += INTVAL (XEXP (XEXP (incoming, 0), 1));
incoming
= replace_equiv_address_nv (incoming,
plus_constant (Pmode,
- arg_pointer_rtx, off));
+ arg_pointer_rtx,
+ off + incoming_offset));
}
#ifdef HAVE_window_save
if (!vt_get_decl_and_offset (incoming, &decl, &offset))
{
+ incoming_ok = false;
if (MEM_P (incoming))
{
/* This means argument is passed by invisible reference. */
offset = 0;
}
- if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
+ HOST_WIDE_INT const_offset;
+ if (!track_loc_p (incoming, parm, offset, false, &mode, &const_offset))
return;
out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
arguments passed by invisible reference aren't dealt with
above: incoming-rtl will have Pmode rather than the
expected mode for the type. */
- if (offset)
+ if (const_offset)
return;
lowpart = var_lowpart (mode, incoming);
if (val)
{
preserve_value (val);
- set_variable_part (out, val->val_rtx, dv, offset,
+ set_variable_part (out, val->val_rtx, dv, const_offset,
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
dv = dv_from_value (val->val_rtx);
}
{
incoming = var_lowpart (mode, incoming);
gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
- attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
+ attrs_list_insert (&out->regs[REGNO (incoming)], dv, const_offset,
incoming);
- set_variable_part (out, incoming, dv, offset,
+ set_variable_part (out, incoming, dv, const_offset,
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
if (dv_is_value_p (dv))
{
{
int i;
+ /* The following code relies on vt_get_decl_and_offset returning true for
+ incoming, which might not be always the case. */
+ if (!incoming_ok)
+ return;
for (i = 0; i < XVECLEN (incoming, 0); i++)
{
rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
- offset = REG_OFFSET (reg);
+ /* vt_get_decl_and_offset has already checked that the offset
+ is a valid variable part. */
+ const_offset = get_tracked_reg_offset (reg);
gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
- attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
- set_variable_part (out, reg, dv, offset,
+ attrs_list_insert (&out->regs[REGNO (reg)], dv, const_offset, reg);
+ set_variable_part (out, reg, dv, const_offset,
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
}
}
else if (MEM_P (incoming))
{
incoming = var_lowpart (mode, incoming);
- set_variable_part (out, incoming, dv, offset,
+ set_variable_part (out, incoming, dv, const_offset,
VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
}
}
for (parm = DECL_ARGUMENTS (current_function_decl);
parm; parm = DECL_CHAIN (parm))
- if (!POINTER_BOUNDS_P (parm))
- vt_add_function_parameter (parm);
+ vt_add_function_parameter (parm);
if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
{
/* Reemit INSN, a MARKER_DEBUG_INSN, as a note. */
static rtx_insn *
-reemit_marker_as_note (rtx_insn *insn, basic_block *bb)
+reemit_marker_as_note (rtx_insn *insn)
{
gcc_checking_assert (DEBUG_MARKER_INSN_P (insn));
switch (kind)
{
case NOTE_INSN_BEGIN_STMT:
+ case NOTE_INSN_INLINE_ENTRY:
{
rtx_insn *note = NULL;
if (cfun->debug_nonbind_markers)
{
note = emit_note_before (kind, insn);
NOTE_MARKER_LOCATION (note) = INSN_LOCATION (insn);
- if (bb)
- BLOCK_FOR_INSN (note) = *bb;
}
delete_insn (insn);
return note;
vt_initialize (void)
{
basic_block bb;
- HOST_WIDE_INT fp_cfa_offset = -1;
+ poly_int64 fp_cfa_offset = -1;
alloc_aux_for_blocks (sizeof (variable_tracking_info));
preserve_value (val);
if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
cselib_preserve_cfa_base_value (val, REGNO (reg));
- expr = plus_constant (GET_MODE (stack_pointer_rtx),
- stack_pointer_rtx, -ofst);
- cselib_add_permanent_equiv (val, expr, get_insns ());
-
if (ofst)
{
- val = cselib_lookup_from_insn (stack_pointer_rtx,
- GET_MODE (stack_pointer_rtx), 1,
- VOIDmode, get_insns ());
- preserve_value (val);
+ cselib_val *valsp
+ = cselib_lookup_from_insn (stack_pointer_rtx,
+ GET_MODE (stack_pointer_rtx), 1,
+ VOIDmode, get_insns ());
+ preserve_value (valsp);
expr = plus_constant (GET_MODE (reg), reg, ofst);
- cselib_add_permanent_equiv (val, expr, get_insns ());
+ /* This cselib_add_permanent_equiv call needs to be done before
+ the other cselib_add_permanent_equiv a few lines later,
+ because after that one is done, cselib_lookup on this expr
+ will due to the cselib SP_DERIVED_VALUE_P optimizations
+ return valsp and so no permanent equivalency will be added. */
+ cselib_add_permanent_equiv (valsp, expr, get_insns ());
}
+
+ expr = plus_constant (GET_MODE (stack_pointer_rtx),
+ stack_pointer_rtx, -ofst);
+ cselib_add_permanent_equiv (val, expr, get_insns ());
}
/* In order to factor out the adjustments made to the stack pointer or to
{
if (GET_CODE (elim) == PLUS)
{
- fp_cfa_offset -= INTVAL (XEXP (elim, 1));
+ fp_cfa_offset -= rtx_to_poly_int64 (XEXP (elim, 1));
elim = XEXP (elim, 0);
}
if (elim != hard_frame_pointer_rtx)
vt_add_function_parameters ();
+ bool record_sp_value = false;
FOR_EACH_BB_FN (bb, cfun)
{
rtx_insn *insn;
- HOST_WIDE_INT pre, post = 0;
basic_block first_bb, last_bb;
if (MAY_HAVE_DEBUG_BIND_INSNS)
cselib_get_next_uid ());
}
+ if (MAY_HAVE_DEBUG_BIND_INSNS
+ && cfa_base_rtx
+ && !frame_pointer_needed
+ && record_sp_value)
+ cselib_record_sp_cfa_base_equiv (-cfa_base_offset
+ - VTI (bb)->in.stack_adjust,
+ BB_HEAD (bb));
+ record_sp_value = true;
+
first_bb = bb;
for (;;)
{
HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
- /* If we are walking the first basic block, walk any HEADER
- insns that might be before it too. Unfortunately,
- BB_HEADER and BB_FOOTER are not set while we run this
- pass. */
rtx_insn *next;
- bool outside_bb = true;
- for (insn = get_first_insn (bb); insn != BB_HEAD (bb->next_bb);
- insn = next)
+ FOR_BB_INSNS_SAFE (bb, insn, next)
{
- if (insn == BB_HEAD (bb))
- outside_bb = false;
- else if (insn == NEXT_INSN (BB_END (bb)))
- outside_bb = true;
- next = NEXT_INSN (insn);
if (INSN_P (insn))
{
- if (outside_bb)
- {
- /* Ignore non-debug insns outside of basic blocks. */
- if (!DEBUG_INSN_P (insn))
- continue;
- /* Debug binds shouldn't appear outside of bbs. */
- gcc_assert (!DEBUG_BIND_INSN_P (insn));
- }
- basic_block save_bb = BLOCK_FOR_INSN (insn);
- if (!BLOCK_FOR_INSN (insn))
- {
- gcc_assert (outside_bb);
- BLOCK_FOR_INSN (insn) = bb;
- }
- else
- gcc_assert (BLOCK_FOR_INSN (insn) == bb);
+ HOST_WIDE_INT pre = 0, post = 0;
if (!frame_pointer_needed)
{
log_op_type (PATTERN (insn), bb, insn,
MO_ADJUST, dump_file);
VTI (bb)->mos.safe_push (mo);
- VTI (bb)->out.stack_adjust += pre;
}
}
cselib_hook_called = false;
adjust_insn (bb, insn);
+
+ if (pre)
+ VTI (bb)->out.stack_adjust += pre;
+
if (DEBUG_MARKER_INSN_P (insn))
{
- insn = reemit_marker_as_note (insn, &save_bb);
+ reemit_marker_as_note (insn);
continue;
}
cselib_process_insn (insn);
if (dump_file && (dump_flags & TDF_DETAILS))
{
- print_rtl_single (dump_file, insn);
+ if (dump_flags & TDF_SLIM)
+ dump_insn_slim (dump_file, insn);
+ else
+ print_rtl_single (dump_file, insn);
dump_cselib_table (dump_file);
}
}
add_with_sets (insn, 0, 0);
cancel_changes (0);
- if (!frame_pointer_needed && post)
+ if (post)
{
micro_operation mo;
mo.type = MO_ADJUST;
VTI (bb)->out.stack_adjust += post;
}
- if (fp_cfa_offset != -1
- && hard_frame_pointer_adjustment == -1
+ if (maybe_ne (fp_cfa_offset, -1)
+ && known_eq (hard_frame_pointer_adjustment, -1)
&& fp_setter_insn (insn))
{
vt_init_cfa_base ();
}
}
}
- BLOCK_FOR_INSN (insn) = save_bb;
}
}
gcc_assert (offset == VTI (bb)->out.stack_adjust);
static int debug_label_num = 1;
+/* Remove from the insn stream a single debug insn used for
+ variable tracking at assignments. */
+
+static inline void
+delete_vta_debug_insn (rtx_insn *insn)
+{
+ if (DEBUG_MARKER_INSN_P (insn))
+ {
+ reemit_marker_as_note (insn);
+ return;
+ }
+
+ tree decl = INSN_VAR_LOCATION_DECL (insn);
+ if (TREE_CODE (decl) == LABEL_DECL
+ && DECL_NAME (decl)
+ && !DECL_RTL_SET_P (decl))
+ {
+ PUT_CODE (insn, NOTE);
+ NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
+ NOTE_DELETED_LABEL_NAME (insn)
+ = IDENTIFIER_POINTER (DECL_NAME (decl));
+ SET_DECL_RTL (decl, insn);
+ CODE_LABEL_NUMBER (insn) = debug_label_num++;
+ }
+ else
+ delete_insn (insn);
+}
+
/* Remove from the insn stream all debug insns used for variable
- tracking at assignments. */
+ tracking at assignments. USE_CFG should be false if the cfg is no
+ longer usable. */
-static void
-delete_vta_debug_insns (void)
+void
+delete_vta_debug_insns (bool use_cfg)
{
basic_block bb;
rtx_insn *insn, *next;
if (!MAY_HAVE_DEBUG_INSNS)
return;
- FOR_EACH_BB_FN (bb, cfun)
- {
- for (insn = get_first_insn (bb);
- insn != BB_HEAD (bb->next_bb)
- ? next = NEXT_INSN (insn), true : false;
- insn = next)
+ if (use_cfg)
+ FOR_EACH_BB_FN (bb, cfun)
+ {
+ FOR_BB_INSNS_SAFE (bb, insn, next)
+ if (DEBUG_INSN_P (insn))
+ delete_vta_debug_insn (insn);
+ }
+ else
+ for (insn = get_insns (); insn; insn = next)
+ {
+ next = NEXT_INSN (insn);
if (DEBUG_INSN_P (insn))
- {
- if (DEBUG_MARKER_INSN_P (insn))
- {
- insn = reemit_marker_as_note (insn, NULL);
- continue;
- }
-
- tree decl = INSN_VAR_LOCATION_DECL (insn);
- if (TREE_CODE (decl) == LABEL_DECL
- && DECL_NAME (decl)
- && !DECL_RTL_SET_P (decl))
- {
- PUT_CODE (insn, NOTE);
- NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
- NOTE_DELETED_LABEL_NAME (insn)
- = IDENTIFIER_POINTER (DECL_NAME (decl));
- SET_DECL_RTL (decl, insn);
- CODE_LABEL_NUMBER (insn) = debug_label_num++;
- }
- else
- delete_insn (insn);
- }
- }
+ delete_vta_debug_insn (insn);
+ }
}
/* Run a fast, BB-local only version of var tracking, to take care of
handled as well.. */
static void
-vt_debug_insns_local (bool skipped)
+vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
{
- /* ??? Just skip it all for now. If we skipped the global pass,
- arrange for stmt markers to be dropped as well. */
- if (skipped)
- cfun->debug_nonbind_markers = 0;
- delete_vta_debug_insns ();
+ /* ??? Just skip it all for now. */
+ delete_vta_debug_insns (true);
}
/* Free the data structures needed for variable tracking. */
any pseudos at this point. */
|| targetm.no_register_allocation)
{
- delete_vta_debug_insns ();
+ delete_vta_debug_insns (true);
return 0;
}
{
vt_finalize ();
- cfun->debug_nonbind_markers = 0;
-
- delete_vta_debug_insns ();
+ delete_vta_debug_insns (true);
/* This is later restored by our caller. */
flag_var_tracking_assignments = 0;