int, bool);
static void output_reload_in_const (rtx *, rtx, int *, bool);
static struct machine_function *avr_init_machine_status (void);
-static int _reg_unused_after (rtx_insn *insn, rtx reg, bool look_at_insn);
+static bool _reg_unused_after (rtx_insn *insn, rtx reg, bool look_at_insn);
/* Prototypes for hook implementors if needed before their implementation. */
start_sequence();
- rtx_insn *seq1, *seq2, *last1, *last2;
-
rtx reg = copy_to_mode_reg (mode, xop[10]);
rtx (*gen_add)(rtx,rtx,rtx) = QImode == mode ? gen_addqi3 : gen_addhi3;
JUMP_LABEL (cbranch) = xop[4];
++LABEL_NUSES (xop[4]);
- seq1 = get_insns();
- last1 = get_last_insn();
+ rtx_insn *seq1 = get_insns();
+ rtx_insn *last1 = get_last_insn();
end_sequence();
emit_insn_after (seq1, insns[2]);
emit_insn (pat_4);
- seq2 = get_insns();
- last2 = get_last_insn();
+ rtx_insn *seq2 = get_insns();
+ rtx_insn *last2 = get_last_insn();
end_sequence();
emit_insn_after (seq2, insns[3]);
/* A helper for the subsequent function attribute used to dig for
attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
-static inline int
+static inline bool
avr_lookup_function_attribute1 (const_tree func, const char *name)
{
if (FUNCTION_DECL == TREE_CODE (func))
/* Return nonzero if FUNC is a naked function. */
-static int
+static bool
avr_naked_function_p (tree func)
{
return avr_lookup_function_attribute1 (func, "naked");
/* Return nonzero if FUNC is an interrupt function as specified
by the "interrupt" attribute. */
-static int
+static bool
avr_interrupt_function_p (tree func)
{
return avr_lookup_function_attribute1 (func, "interrupt");
/* Return nonzero if FUNC is a signal function as specified
by the "signal" attribute. */
-static int
+static bool
avr_signal_function_p (tree func)
{
return avr_lookup_function_attribute1 (func, "signal");
/* Return nonzero if FUNC is an OS_task function. */
-static int
+static bool
avr_OS_task_function_p (tree func)
{
return avr_lookup_function_attribute1 (func, "OS_task");
/* Return nonzero if FUNC is an OS_main function. */
-static int
+static bool
avr_OS_main_function_p (tree func)
{
return avr_lookup_function_attribute1 (func, "OS_main");
/* Return nonzero if FUNC is a no_gccisr function as specified
by the "no_gccisr" attribute. */
-static int
+static bool
avr_no_gccisr_function_p (tree func)
{
return avr_lookup_function_attribute1 (func, "no_gccisr");
static int
avr_regs_to_save (HARD_REG_SET *set)
{
- int count;
+ int count = 0;
int int_or_sig_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
if (set)
CLEAR_HARD_REG_SET (*set);
- count = 0;
/* No need to save any registers if the function never returns or
has the "OS_task" or "OS_main" attribute. */
/* Return true if register FROM can be eliminated via register TO. */
static bool
-avr_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
+avr_can_eliminate (const int /*from*/, const int to)
{
return ((frame_pointer_needed && to == FRAME_POINTER_REGNUM)
|| !frame_pointer_needed);
static void
emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p, int treg)
{
- rtx_insn *insn;
-
gcc_assert (MEM_P (sfr));
/* IN treg, IO(SFR) */
- insn = emit_move_insn (all_regs_rtx[treg], sfr);
+ rtx_insn *insn = emit_move_insn (all_regs_rtx[treg], sfr);
if (frame_related_p)
RTX_FRAME_RELATED_P (insn) = 1;
|| live_seq > 7))
{
rtx pattern;
- int first_reg, reg, offset;
+ int reg, offset;
emit_move_insn (gen_rtx_REG (HImode, REG_X),
gen_int_mode (size, HImode));
/* Note that live_seq always contains r28+r29, but the other
registers to be saved are all below 18. */
- first_reg = (LAST_CALLEE_SAVED_REG + 1) - (live_seq - 2);
+ int first_reg = (LAST_CALLEE_SAVED_REG + 1) - (live_seq - 2);
for (reg = REG_29, offset = -live_seq + 1;
reg >= first_reg;
int irq_state = -1;
HOST_WIDE_INT size_cfa = size, neg_size;
rtx_insn *fp_plus_insns;
- rtx fp, my_fp;
gcc_assert (frame_pointer_needed
|| !isr_p
|| !crtl->is_leaf);
- fp = my_fp = (frame_pointer_needed
- ? frame_pointer_rtx
- : gen_rtx_REG (Pmode, REG_X));
+ rtx my_fp = (frame_pointer_needed
+ ? frame_pointer_rtx
+ : gen_rtx_REG (Pmode, REG_X));
+ rtx fp = my_fp;
if (AVR_HAVE_8BIT_SP)
{
avr_expand_prologue (void)
{
HARD_REG_SET set;
- HOST_WIDE_INT size;
-
- size = get_frame_size() + avr_outgoing_args_size();
+ HOST_WIDE_INT size = get_frame_size() + avr_outgoing_args_size();
cfun->machine->stack_usage = 0;
/* Worker function for `EPILOGUE_USES'. */
int
-avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
+avr_epilogue_uses (int /*regno*/)
{
if (reload_completed
&& cfun->machine
static void
emit_pop_byte (unsigned regno)
{
- rtx mem, reg;
-
- mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
+ rtx mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
mem = gen_frame_mem (QImode, mem);
- reg = gen_rtx_REG (QImode, regno);
+ rtx reg = gen_rtx_REG (QImode, regno);
emit_insn (gen_rtx_SET (reg, mem));
}
void
avr_expand_epilogue (bool sibcall_p)
{
- int live_seq;
HARD_REG_SET set;
- int minimize;
- HOST_WIDE_INT size;
bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
- size = get_frame_size() + avr_outgoing_args_size();
+ HOST_WIDE_INT size = get_frame_size() + avr_outgoing_args_size();
/* epilogue: naked */
if (cfun->machine->is_naked)
}
avr_regs_to_save (&set);
- live_seq = sequent_regs_live ();
+ int live_seq = sequent_regs_live ();
- minimize = (TARGET_CALL_PROLOGUES
- && live_seq
- && !isr_p
- && !cfun->machine->is_OS_task
- && !cfun->machine->is_OS_main
- && !AVR_TINY);
+ bool minimize = (TARGET_CALL_PROLOGUES
+ && live_seq
+ && !isr_p
+ && !cfun->machine->is_OS_task
+ && !cfun->machine->is_OS_main
+ && !AVR_TINY);
if (minimize
&& (live_seq > 4
/* Try two methods to adjust stack and select shortest. */
int irq_state = -1;
- rtx fp, my_fp;
- rtx_insn *fp_plus_insns;
- HOST_WIDE_INT size_max;
gcc_assert (frame_pointer_needed
|| !isr_p
|| !crtl->is_leaf);
- fp = my_fp = (frame_pointer_needed
- ? frame_pointer_rtx
- : gen_rtx_REG (Pmode, REG_X));
+ rtx my_fp = (frame_pointer_needed
+ ? frame_pointer_rtx
+ : gen_rtx_REG (Pmode, REG_X));
+ rtx fp = my_fp;
if (AVR_HAVE_8BIT_SP)
{
/* For rationale see comment in prologue generation. */
- size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
+ HOST_WIDE_INT size_max = (HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (my_fp));
if (size > size_max)
size = size_max;
size = trunc_int_for_mode (size, GET_MODE (my_fp));
emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
GEN_INT (irq_state)));
- fp_plus_insns = get_insns ();
+ rtx_insn *fp_plus_insns = get_insns ();
end_sequence ();
/********** Method 2: Adjust Stack pointer **********/
if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
{
- rtx_insn *sp_plus_insns;
-
start_sequence ();
emit_move_insn (stack_pointer_rtx,
plus_constant (Pmode, stack_pointer_rtx, size));
- sp_plus_insns = get_insns ();
+ rtx_insn *sp_plus_insns = get_insns ();
end_sequence ();
/************ Use shortest method ************/
CLEAR_HARD_REG_BIT (set, treg);
}
- for (int reg = 31; reg >= 0; --reg)
+ for (int reg = REG_31; reg >= REG_0; --reg)
if (TEST_HARD_REG_BIT (set, reg))
emit_pop_byte (reg);
/* Naked Functions must not have any instructions after
their epilogue, see PR42240 */
- if (reload_completed
- && cfun->machine
- && cfun->machine->is_naked)
- {
- return true;
- }
-
- return false;
+ return (reload_completed
+ && cfun->machine
+ && cfun->machine->is_naked);
}
/* Implement `TARGET_MODE_DEPENDENT_ADDRESS_P'. */
static bool
-avr_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED, addr_space_t as)
+avr_mode_dependent_address_p (const_rtx /*addr*/, addr_space_t as)
{
/* FIXME: Non-generic addresses are not mode-dependent in themselves.
This hook just serves to hack around PR rtl-optimization/52543 by
than 63 bytes or for R++ or --R addressing. */
rtx
-avr_legitimize_reload_address (rtx *px, machine_mode mode,
- int opnum, int type, int addr_type,
- int ind_levels ATTRIBUTE_UNUSED,
+avr_legitimize_reload_address (rtx *px, machine_mode mode, int opnum,
+ int type, int addr_type, int /*ind_levels*/,
rtx (*mk_memloc)(rtx,int))
{
rtx x = *px;
static bool
avr_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
- unsigned int align ATTRIBUTE_UNUSED,
- enum by_pieces_operation op,
- bool speed_p)
+ unsigned int align,
+ enum by_pieces_operation op, bool speed_p)
{
if (op != MOVE_BY_PIECES
|| (speed_p && size > MOVE_MAX_PIECES))
/* Output insn cost for next insn. */
void
-avr_final_prescan_insn (rtx_insn *insn, rtx *operand ATTRIBUTE_UNUSED,
- int num_operands ATTRIBUTE_UNUSED)
+avr_final_prescan_insn (rtx_insn *insn, rtx * /*operands*/,
+ int /*num_operands*/)
{
if (avr_log.rtx_costs)
{
void
avr_init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
- tree fndecl ATTRIBUTE_UNUSED)
+ tree /*fndecl*/)
{
cum->nregs = AVR_TINY ? 1 + REG_25 - REG_20 : 1 + REG_25 - REG_8;
cum->regno = FIRST_CUM_REG;
static int
avr_num_arg_regs (machine_mode mode, const_tree type)
{
- int size;
-
- if (mode == BLKmode)
- size = int_size_in_bytes (type);
- else
- size = GET_MODE_SIZE (mode);
+ int size = (mode == BLKmode
+ ? int_size_in_bytes (type)
+ : GET_MODE_SIZE (mode));
/* Align all function arguments to start in even-numbered registers.
Odd-sized arguments leave holes above them. */
in the argument list. */
static void
-avr_function_arg_advance (cumulative_args_t cum_v,
- const function_arg_info &arg)
+avr_function_arg_advance (cumulative_args_t cum_v, const function_arg_info &arg)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int bytes = avr_num_arg_regs (arg.mode, arg.type);
static bool
avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
{
- tree fntype_callee;
-
/* Tail-calling must fail if callee-saved regs are used to pass
function args. We must not tail-call when `epilogue_restores'
is used. Unfortunately, we cannot tell at this point if that
return false;
}
- fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
+ tree fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
if (decl_callee)
{
rtx xop[7];
rtx dest = op[0];
rtx src = SET_SRC (single_set (insn));
- rtx addr;
int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
- int segment;
- RTX_CODE code;
addr_space_t as = MEM_ADDR_SPACE (src);
if (plen)
return "";
}
- addr = XEXP (src, 0);
- code = GET_CODE (addr);
+ rtx addr = XEXP (src, 0);
+ RTX_CODE code = GET_CODE (addr);
gcc_assert (REG_P (dest));
gcc_assert (REG == code || POST_INC == code);
xop[5] = tmp_reg_rtx;
xop[6] = XEXP (rampz_rtx, 0);
- segment = avr_addrspace[as].segment;
+ int segment = avr_addrspace[as].segment;
/* Set RAMPZ as needed. */
/* Worker function for xload_8 insn. */
const char *
-avr_out_xload (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
+avr_out_xload (rtx_insn * /*insn*/, rtx *op, int *plen)
{
rtx xop[4];
static const char *
avr_out_movhi_r_mr_pre_dec_tiny (rtx_insn *insn, rtx op[], int *plen)
{
- int mem_volatile_p = 0;
rtx dest = op[0];
rtx src = op[1];
rtx base = XEXP (src, 0);
/* "volatile" forces reading low byte first, even if less efficient,
for correct operation with 16-bit I/O registers. */
- mem_volatile_p = MEM_VOLATILE_P (src);
+ bool mem_volatile_p = MEM_VOLATILE_P (src);
if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
fatal_insn ("incorrect insn:", insn);
int reg_base = true_regnum (base);
/* "volatile" forces reading low byte first, even if less efficient,
for correct operation with 16-bit I/O registers. */
- int mem_volatile_p = MEM_VOLATILE_P (src);
+ bool mem_volatile_p = MEM_VOLATILE_P (src);
if (reg_base > 0)
{
/* "volatile" forces writing low byte first, even if less efficient,
for correct operation with 16-bit I/O registers like SP. */
- int mem_volatile_p = MEM_VOLATILE_P (dest);
+ bool mem_volatile_p = MEM_VOLATILE_P (dest);
if (CONSTANT_ADDRESS_P (base))
{
rtx base = XEXP (dest, 0);
int reg_base = true_regnum (base);
int reg_src = true_regnum (src);
- int mem_volatile_p = MEM_VOLATILE_P (dest);
+ bool mem_volatile_p = MEM_VOLATILE_P (dest);
if (reg_base == reg_src)
{
rtx base = XEXP (dest, 0);
int reg_base = true_regnum (base);
int reg_src = true_regnum (src);
- int mem_volatile_p;
/* "volatile" forces writing high-byte first (no-xmega) resp.
low-byte first (xmega) even if less efficient, for correct
if (AVR_XMEGA)
return avr_out_movhi_mr_r_xmega (insn, op, plen);
- mem_volatile_p = MEM_VOLATILE_P (dest);
+ bool mem_volatile_p = MEM_VOLATILE_P (dest);
if (CONSTANT_ADDRESS_P (base))
{
rtx xreg = xop[0];
rtx xval = xop[1];
- /* MODE of the comparison. */
- machine_mode mode;
-
/* Number of bytes to operate on. */
int n_bytes = GET_MODE_SIZE (GET_MODE (xreg));
xval = avr_to_int_mode (xop[1]);
}
- mode = GET_MODE (xreg);
+ /* MODE of the comparison. */
+ machine_mode mode = GET_MODE (xreg);
gcc_assert (REG_P (xreg));
gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
const char *
avr_out_compare64 (rtx_insn *insn, rtx *op, int *plen)
{
- rtx xop[3];
-
- xop[0] = gen_rtx_REG (DImode, ACC_A);
- xop[1] = op[0];
- xop[2] = op[1];
+ rtx xop[3] = { gen_rtx_REG (DImode, ACC_A), op[0], op[1] };
return avr_out_compare (insn, xop, plen);
}
preparing operands for calls to `avr_out_plus' and `avr_out_bitop'. */
const char *
-avr_out_round (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *xop, int *plen)
+avr_out_round (rtx_insn * /*insn*/, rtx *xop, int *plen)
{
scalar_mode mode = as_a <scalar_mode> (GET_MODE (xop[0]));
scalar_int_mode imode = int_mode_for_mode (mode).require ();
/* Work out if byte or word move is needed. Odd byte rotates need QImode.
Word move if no scratch is needed, otherwise use size of scratch. */
machine_mode move_mode = QImode;
- int move_size, offset, size;
if (num & 0xf)
move_mode = QImode;
&& QImode == move_mode)
scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
- move_size = GET_MODE_SIZE (move_mode);
+ int move_size = GET_MODE_SIZE (move_mode);
/* Number of bytes/words to rotate. */
- offset = (num >> 3) / move_size;
+ int offset = (num >> 3) / move_size;
/* Number of moves needed. */
- size = GET_MODE_SIZE (mode) / move_size;
+ int size = GET_MODE_SIZE (mode) / move_size;
/* Himode byte swap is special case to avoid a scratch register. */
if (mode == HImode && same_reg)
{
avr_adjust_insn_length (rtx_insn *insn, int len)
{
rtx *op = recog_data.operand;
- enum attr_adjust_len adjust_len;
/* As we pretend jump tables in .text, fix branch offsets crossing jump
tables now. */
/* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
- adjust_len = get_attr_adjust_len (insn);
+ enum attr_adjust_len adjust_len = get_attr_adjust_len (insn);
if (adjust_len == ADJUST_LEN_NO)
{
We assume REG is a reload reg, and therefore does
not live past labels. It may live past calls or jumps though. */
-int
+bool
_reg_unused_after (rtx_insn *insn, rtx reg, bool look_at_insn)
{
if (look_at_insn)
return default_assemble_integer (x, size, aligned_p);
}
-/* Implement TARGET_CLASS_MAX_NREGS. Reasons described in comments for
+/* Implement `TARGET_CLASS_MAX_NREGS'. Reasons described in comments for
avr_hard_regno_nregs. */
static unsigned char
avr_class_max_nregs (reg_class_t rclass, machine_mode mode)
{
if (rclass == CC_REG && mode == CCmode)
- return 1;
+ return 1;
return CEIL (GET_MODE_SIZE (mode), UNITS_PER_WORD);
}
static bool
avr_class_likely_spilled_p (reg_class_t c)
{
- return (c != ALL_REGS &&
- (AVR_TINY ? 1 : c != ADDW_REGS));
+ return (c != ALL_REGS
+ && (AVR_TINY ? 1 : c != ADDW_REGS));
}
struct attribute_spec.handler. */
static tree
-avr_handle_progmem_attribute (tree *node, tree name,
- tree args ATTRIBUTE_UNUSED,
- int flags ATTRIBUTE_UNUSED,
- bool *no_add_attrs)
+avr_handle_progmem_attribute (tree *node, tree name, tree args,
+ int /*flags*/, bool *no_add_attrs)
{
if (DECL_P (*node))
{
struct attribute_spec.handler. */
static tree
-avr_handle_fndecl_attribute (tree *node, tree name,
- tree args ATTRIBUTE_UNUSED,
- int flags ATTRIBUTE_UNUSED,
- bool *no_add_attrs)
+avr_handle_fndecl_attribute (tree *node, tree name, tree /*args*/,
+ int /*flags*/, bool *no_add_attrs)
{
if (TREE_CODE (*node) != FUNCTION_DECL)
{
}
static tree
-avr_handle_fntype_attribute (tree *node, tree name,
- tree args ATTRIBUTE_UNUSED,
- int flags ATTRIBUTE_UNUSED,
- bool *no_add_attrs)
+avr_handle_fntype_attribute (tree *node, tree name, tree /*args*/,
+ int /*flags*/, bool *no_add_attrs)
{
if (TREE_CODE (*node) != FUNCTION_TYPE)
{
static tree
avr_handle_addr_attribute (tree *node, tree name, tree args,
- int flags ATTRIBUTE_UNUSED, bool *no_add)
+ int /*flags*/, bool *no_add)
{
bool io_p = startswith (IDENTIFIER_POINTER (name), "io");
HOST_WIDE_INT io_start = avr_arch->sfr_offset;
int
avr_progmem_p (tree decl, tree attributes)
{
- tree a;
-
if (TREE_CODE (decl) != VAR_DECL)
return 0;
!= lookup_attribute ("progmem", attributes))
return -1;
- a = decl;
+ tree a = decl;
do
a = TREE_TYPE(a);
if (POINTER_TYPE_P (typ))
{
- addr_space_t as;
tree target = TREE_TYPE (typ);
/* Pointer to function: Test the function's return type. */
/* Pointers to non-generic address space must be const. */
- as = TYPE_ADDR_SPACE (target);
+ addr_space_t as = TYPE_ADDR_SPACE (target);
if (!ADDR_SPACE_GENERIC_P (as)
&& !TYPE_READONLY (target)
&& (TREE_STATIC (node) || DECL_EXTERNAL (node))
&& avr_progmem_p (node, *attributes))
{
- addr_space_t as;
tree node0 = node;
/* For C++, we have to peel arrays in order to get correct
if (error_mark_node == node0)
return;
- as = TYPE_ADDR_SPACE (TREE_TYPE (node));
+ addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (node));
if (!TYPE_READONLY (node0)
&& !TREE_READONLY (node))
if (!avr_has_rodata_p)
avr_has_rodata_p = (startswith (name, ".rodata")
- || startswith (name, ".gnu.linkonce.r"));
+ || startswith (name, ".gnu.linkonce.r"));
if (!avr_need_clear_bss_p)
avr_need_clear_bss_p = startswith (name, ".bss");
/* Implement `TARGET_REGISTER_MOVE_COST' */
static int
-avr_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
- reg_class_t from, reg_class_t to)
+avr_register_move_cost (machine_mode /*mode*/, reg_class_t from, reg_class_t to)
{
return (from == STACK_REG ? 6
: to == STACK_REG ? 12
/* Implement `TARGET_MEMORY_MOVE_COST' */
static int
-avr_memory_move_cost (machine_mode mode,
- reg_class_t rclass ATTRIBUTE_UNUSED,
- bool in ATTRIBUTE_UNUSED)
+avr_memory_move_cost (machine_mode mode, reg_class_t /*rclass*/, bool /*in*/)
{
return (mode == QImode ? 2
: mode == HImode ? 4
int opno, bool speed)
{
enum rtx_code code = GET_CODE (x);
- int total;
switch (code)
{
break;
}
- total = 0;
+ int total = 0;
avr_rtx_costs (x, mode, outer, opno, &total, speed);
return total;
}
static bool
avr_rtx_costs_1 (rtx x, machine_mode mode, int outer_code,
- int opno ATTRIBUTE_UNUSED, int *total, bool speed)
+ int /*opno*/, int *total, bool speed)
{
enum rtx_code code = GET_CODE (x);
HOST_WIDE_INT val;
/* Implement `TARGET_ADDRESS_COST'. */
static int
-avr_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
- addr_space_t as ATTRIBUTE_UNUSED,
- bool speed ATTRIBUTE_UNUSED)
+avr_address_cost (rtx x, machine_mode mode, addr_space_t /*as*/,
+ bool /*speed*/)
{
int cost = 4;
static inline unsigned int
avr_ret_register (void)
{
- return 24;
+ return REG_24;
}
static bool
avr_function_value_regno_p (const unsigned int regno)
{
- return (regno == avr_ret_register ());
+ return regno == avr_ret_register ();
}
library function returns a value of mode MODE. */
static rtx
-avr_libcall_value (machine_mode mode,
- const_rtx func ATTRIBUTE_UNUSED)
+avr_libcall_value (machine_mode mode, const_rtx /*func*/)
{
int offs = GET_MODE_SIZE (mode);
function returns a value of data type VALTYPE. */
static rtx
-avr_function_value (const_tree type,
- const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
- bool outgoing ATTRIBUTE_UNUSED)
+avr_function_value (const_tree type, const_tree /*fn_decl_or_type*/,
+ bool /*outgoing*/)
{
- unsigned int offs;
-
if (TYPE_MODE (type) != BLKmode)
return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
- offs = int_size_in_bytes (type);
+ unsigned int offs = int_size_in_bytes (type);
if (offs < 2)
offs = 2;
if (offs > 2 && offs < GET_MODE_SIZE (SImode))
/* Implement `MODE_CODE_BASE_REG_CLASS'. */
enum reg_class
-avr_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
- addr_space_t as, RTX_CODE outer_code,
- RTX_CODE index_code ATTRIBUTE_UNUSED)
+avr_mode_code_base_reg_class (machine_mode /*mode*/, addr_space_t as,
+ RTX_CODE outer_code, RTX_CODE /*index_code*/)
{
if (!ADDR_SPACE_GENERIC_P (as))
{
/* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
bool
-avr_regno_mode_code_ok_for_base_p (int regno,
- machine_mode mode ATTRIBUTE_UNUSED,
- addr_space_t as ATTRIBUTE_UNUSED,
- RTX_CODE outer_code,
- RTX_CODE index_code ATTRIBUTE_UNUSED)
+avr_regno_mode_code_ok_for_base_p (int regno, machine_mode /*mode*/,
+ addr_space_t as, RTX_CODE outer_code,
+ RTX_CODE /*index_code*/)
{
bool ok = false;
for (int n = 0; n < n_bytes; n++)
{
- int ldreg_p;
bool done_byte = false;
rtx xop[3];
/* Crop the n-th destination byte. */
xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
- ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
+ int ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
if (!CONST_INT_P (src)
&& !CONST_FIXED_P (src)
|| CONST_FIXED_P (op[1])
|| CONST_DOUBLE_P (op[1])))
{
- int len_clr, len_noclr;
-
/* In some cases it is better to clear the destination beforehand, e.g.
CLR R2 CLR R3 MOVW R4,R2 INC R2
Instead, we call the print function twice to get the lengths of
both methods and use the shortest one. */
+ int len_clr, len_noclr;
output_reload_in_const (op, clobber_reg, &len_clr, true);
output_reload_in_const (op, clobber_reg, &len_noclr, false);
/* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
int
-avr_hard_regno_rename_ok (unsigned int old_reg,
- unsigned int new_reg)
+avr_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
{
/* Interrupt functions can only use registers that have already been
saved by the prologue, even if they would normally be
/* Implement `TARGET_RETURN_IN_MEMORY'. */
static bool
-avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
+avr_return_in_memory (const_tree type, const_tree /*fntype*/)
{
HOST_WIDE_INT size = int_size_in_bytes (type);
HOST_WIDE_INT ret_size_limit = AVR_TINY ? 4 : 8;
if (as_from != ADDR_SPACE_MEMX
&& as_to == ADDR_SPACE_MEMX)
{
- int msb;
rtx sym = src;
rtx reg = gen_reg_rtx (PSImode);
/* Linearize memory: RAM has bit 23 set. */
- msb = ADDR_SPACE_GENERIC_P (as_from)
+ int msb = ADDR_SPACE_GENERIC_P (as_from)
? 0x80
: avr_addrspace[as_from].segment;
/* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
static bool
-avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
- addr_space_t superset ATTRIBUTE_UNUSED)
+avr_addr_space_subset_p (addr_space_t /*subset*/, addr_space_t /*superset*/)
{
/* Allow any kind of pointer mess. */
bool
avr_emit_cpymemhi (rtx *xop)
{
- HOST_WIDE_INT count;
machine_mode loop_mode;
addr_space_t as = MEM_ADDR_SPACE (xop[1]);
- rtx loop_reg, addr1, a_src, a_dest, insn, xas;
+ rtx loop_reg, addr1, insn;
rtx a_hi8 = NULL_RTX;
if (avr_mem_flash_p (xop[0]))
if (!CONST_INT_P (xop[2]))
return false;
- count = INTVAL (xop[2]);
+ HOST_WIDE_INT count = INTVAL (xop[2]);
if (count <= 0)
return false;
- a_src = XEXP (xop[1], 0);
- a_dest = XEXP (xop[0], 0);
+ rtx a_src = XEXP (xop[1], 0);
+ rtx a_dest = XEXP (xop[0], 0);
if (PSImode == GET_MODE (a_src))
{
loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
}
- xas = GEN_INT (as);
+ rtx xas = GEN_INT (as);
/* FIXME: Register allocator might come up with spill fails if it is left
on its own. Thus, we allocate the pointer registers by hand:
*/
const char *
-avr_out_cpymem (rtx_insn *insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
+avr_out_cpymem (rtx_insn * /*insn*/, rtx *op, int *plen)
{
addr_space_t as = (addr_space_t) INTVAL (op[0]);
machine_mode loop_mode = GET_MODE (op[1]);
bool sbiw_p = avr_adiw_reg_p (op[1]);
- rtx xop[3];
+ rtx xop[3] = { op[0], op[1], tmp_reg_rtx };
if (plen)
*plen = 0;
- xop[0] = op[0];
- xop[1] = op[1];
- xop[2] = tmp_reg_rtx;
-
/* Loop label */
avr_asm_len ("0:", xop, plen, 0);
avr_out_insert_bits (rtx *op, int *plen)
{
unsigned int map = UINTVAL (op[1]) & GET_MODE_MASK (SImode);
- unsigned mask_fixed;
bool fixp_p = true;
rtx xop[4];
/* If MAP has fixed points it might be better to initialize the result
with the bits to be inserted instead of moving all bits by hand. */
- mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
+ unsigned mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
if (REGNO (xop[0]) == REGNO (xop[1]))
{
/* Implement `TARGET_BUILTIN_DECL'. */
static tree
-avr_builtin_decl (unsigned id, bool initialize_p ATTRIBUTE_UNUSED)
+avr_builtin_decl (unsigned id, bool /*initialize_p*/)
{
if (id < AVR_BUILTIN_COUNT)
return avr_bdesc[id].fndecl;
IGNORE is nonzero if the value is to be ignored. */
static rtx
-avr_expand_builtin (tree exp, rtx target,
- rtx subtarget ATTRIBUTE_UNUSED,
- machine_mode mode ATTRIBUTE_UNUSED,
- int ignore)
+avr_expand_builtin (tree exp, rtx target, rtx /*subtarget*/,
+ machine_mode mode, int ignore)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
const char *bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
/* Implement `TARGET_FOLD_BUILTIN'. */
static tree
-avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
- bool ignore ATTRIBUTE_UNUSED)
+avr_fold_builtin (tree fndecl, int /*n_args*/, tree *arg, bool /*ignore*/)
{
unsigned int fcode = DECL_MD_FUNCTION_CODE (fndecl);
tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
{
tree tbits = arg[1];
tree tval = arg[2];
- tree tmap;
tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
- unsigned int map;
bool changed = false;
avr_map_op_t best_g;
break;
}
- tmap = wide_int_to_tree (map_type, wi::to_wide (arg[0]));
- map = TREE_INT_CST_LOW (tmap);
+ tree tmap = wide_int_to_tree (map_type, wi::to_wide (arg[0]));
+ unsigned int map = TREE_INT_CST_LOW (tmap);
if (TREE_CODE (tval) != INTEGER_CST
&& avr_map_metric (map, MAP_MASK_PREIMAGE_F) == 0)
rtx offset = gen_int_mode (targetm.starting_frame_offset (), Pmode);
emit_move_insn (virtual_stack_vars_rtx,
gen_rtx_PLUS (Pmode, hard_frame_pointer_rtx, offset));
- /* ; This might change the hard frame pointer in ways that aren't
- ; apparent to early optimization passes, so force a clobber. */
+ // This might change the hard frame pointer in ways that aren't
+ // apparent to early optimization passes, so force a clobber.
emit_clobber (hard_frame_pointer_rtx);
DONE;
})
"&& 1"
[(clobber (const_int 0))]
{
- /* ; Split away the high part of the address. GCC's register allocator
- ; in not able to allocate segment registers and reload the resulting
- ; expressions. Notice that no address register can hold a PSImode. */
+ // Split away the high part of the address. GCC's register allocator
+ // in not able to allocate segment registers and reload the resulting
+ // expressions. Notice that no address register can hold a PSImode.
- rtx_insn *insn;
rtx addr = XEXP (operands[1], 0);
rtx hi8 = gen_reg_rtx (QImode);
rtx reg_z = gen_rtx_REG (HImode, REG_Z);
emit_move_insn (reg_z, simplify_gen_subreg (HImode, addr, PSImode, 0));
emit_move_insn (hi8, simplify_gen_subreg (QImode, addr, PSImode, 2));
- insn = emit_insn (gen_xload<mode>_8 (operands[0], hi8));
+ rtx_insn *insn = emit_insn (gen_xload<mode>_8 (operands[0], hi8));
set_mem_addr_space (SET_SRC (single_set (insn)),
MEM_ADDR_SPACE (operands[1]));
DONE;
addr_space_t as = MEM_ADDR_SPACE (operands[1]);
rtx_insn *insn;
- /* Split the address to R21:Z */
+ // Split the address to R21:Z
emit_move_insn (reg_z, simplify_gen_subreg (HImode, addr, PSImode, 0));
emit_move_insn (gen_rtx_REG (QImode, 21), addr_hi8);
- /* Load with code from libgcc */
+ // Load with code from libgcc.
insn = emit_insn (gen_xload_<mode>_libgcc ());
set_mem_addr_space (SET_SRC (single_set (insn)), as);
- /* Move to destination */
+ // Move to destination.
emit_move_insn (operands[0], gen_rtx_REG (<MODE>mode, 22));
DONE;
SUBREG_REG (src) = copy_to_mode_reg (GET_MODE (const_addr), const_addr);
}
- /* One of the operands has to be in a register. */
+ // One of the operands has to be in a register.
if (!register_operand (dest, <MODE>mode)
&& !reg_or_0_operand (src, <MODE>mode))
{
src = replace_equiv_address (src, copy_to_mode_reg (PSImode, addr));
if (!avr_xload_libgcc_p (<MODE>mode))
- /* ; No <mode> here because gen_xload8<mode>_A only iterates over ALL1.
- ; insn-emit does not depend on the mode, it's all about operands. */
+ // No <mode> here because gen_xload8<mode>_A only iterates over ALL1.
+ // insn-emit does not depend on the mode, it's all about operands.
emit_insn (gen_xload8qi_A (dest, src));
else
emit_insn (gen_xload<mode>_A (dest, src));
if (avr_load_libgcc_p (src))
{
- /* For the small devices, do loads per libgcc call. */
+ // For the small devices, do loads per libgcc call.
emit_insn (gen_load<mode>_libgcc (dest, src));
DONE;
}
(clobber (match_dup 4))])]
""
{
- rtx addr0;
- machine_mode mode;
-
- /* If value to set is not zero, use the library routine. */
+ // If value to set is not zero, use the library routine.
if (operands[2] != const0_rtx)
FAIL;
if (!CONST_INT_P (operands[1]))
FAIL;
- mode = u8_operand (operands[1], VOIDmode) ? QImode : HImode;
+ machine_mode mode = u8_operand (operands[1], VOIDmode) ? QImode : HImode;
operands[4] = gen_rtx_SCRATCH (mode);
operands[1] = copy_to_mode_reg (mode,
gen_int_mode (INTVAL (operands[1]), mode));
- addr0 = copy_to_mode_reg (Pmode, XEXP (operands[0], 0));
+ rtx addr0 = copy_to_mode_reg (Pmode, XEXP (operands[0], 0));
operands[0] = gen_rtx_MEM (BLKmode, addr0);
})
(clobber (scratch:QI))])]
""
{
- rtx addr;
if (operands[2] != const0_rtx)
FAIL;
- addr = copy_to_mode_reg (Pmode, XEXP (operands[1], 0));
+ rtx addr = copy_to_mode_reg (Pmode, XEXP (operands[1], 0));
operands[1] = gen_rtx_MEM (BLKmode, addr);
operands[5] = addr;
operands[4] = gen_reg_rtx (HImode);
""
[(const_int 0)]
{
- /* Do not attempt to split this pattern. This FAIL is necessary
- to prevent the splitter from matching *add<ALL2>3_split, splitting
- it, and then failing later because constraints don't match, as split
- does not look at constraints. */
+ // Do not attempt to split this pattern. This FAIL is necessary
+ // to prevent the splitter from matching *add<ALL2>3_split, splitting
+ // it, and then failing later because constraints don't match, as split
+ // does not look at constraints.
FAIL;
}
[(set_attr "length" "6")
DONE;
}
- /* ; For small constants we can do better by extending them on the fly.
- ; The constant can be loaded in one instruction and the widening
- ; multiplication is shorter. First try the unsigned variant because it
- ; allows constraint "d" instead of "a" for the signed version. */
+ // For small constants we can do better by extending them on the fly.
+ // The constant can be loaded in one instruction and the widening
+ // multiplication is shorter. First try the unsigned variant because it
+ // allows constraint "d" instead of "a" for the signed version. */
if (s9_operand (operands[2], HImode))
{
(set (match_dup 0)
(reg:SI 22))]
{
- /* Do the QI -> HI extension explicitely before the multiplication. */
- /* Do the HI -> SI extension implicitely and after the multiplication. */
+ // Do the QI -> HI extension explicitely before the multiplication.
+ // Do the HI -> SI extension implicitely and after the multiplication.
if (QImode == <MODE>mode)
operands[1] = gen_rtx_ZERO_EXTEND (HImode, operands[1]);
(set (match_dup 0)
(reg:SI 22))]
{
- /* Do the QI -> HI extension explicitely before the multiplication. */
- /* Do the HI -> SI extension implicitely and after the multiplication. */
+ // Do the QI -> HI extension explicitely before the multiplication.
+ // Do the HI -> SI extension implicitely and after the multiplication.
if (QImode == <MODE>mode)
operands[1] = gen_rtx_SIGN_EXTEND (HImode, operands[1]);
rtx xop1 = operands[1];
rtx xop2 = operands[2];
- /* Do the QI -> HI extension explicitely before the multiplication. */
- /* Do the HI -> SI extension implicitely and after the multiplication. */
+ // Do the QI -> HI extension explicitely before the multiplication.
+ // Do the HI -> SI extension implicitely and after the multiplication.
if (QImode == <QIHI:MODE>mode)
xop1 = gen_rtx_fmt_e (<any_extend:CODE>, HImode, xop1);
}
else
{
- /* <any_extend:CODE> = SIGN_EXTEND */
- /* <any_extend2:CODE> = ZERO_EXTEND */
+ // <any_extend:CODE> = SIGN_EXTEND
+ // <any_extend2:CODE> = ZERO_EXTEND
operands[1] = xop2;
operands[2] = xop1;
(match_operand:SI 2 "pseudo_register_operand")))
(set (match_operand:SI 3 "pseudo_register_operand")
(mod:SI (match_dup 1)
- (match_dup 2)))
+ (match_dup 2)))
(clobber (reg:SI 18))
(clobber (reg:SI 22))
(clobber (reg:HI 26))
else if (offset == 1
|| offset == GET_MODE_BITSIZE (<MODE>mode) -1)
{
- /*; Support rotate left/right by 1 */
+ // Support rotate left/right by 1.
emit_move_insn (operands[0],
gen_rtx_ROTATE (<MODE>mode, operands[1], operands[2]));
(lshiftrt:ALL1 (match_operand:ALL1 1 "register_operand" "")
(match_operand:QI 2 "nop_general_operand" "")))])
-(define_split ; lshrqi3_const4
+(define_split ; lshrqi3_const4
[(set (match_operand:ALL1 0 "d_register_operand" "")
(lshiftrt:ALL1 (match_dup 0)
(const_int 4)))]
operands[1] = avr_to_int_mode (operands[0]);
})
-(define_split ; lshrqi3_const5
+(define_split ; lshrqi3_const5
[(set (match_operand:ALL1 0 "d_register_operand" "")
(lshiftrt:ALL1 (match_dup 0)
(const_int 5)))]
operands[1] = avr_to_int_mode (operands[0]);
})
-(define_split ; lshrqi3_const6
+(define_split ; lshrqi3_const6
[(set (match_operand:QI 0 "d_register_operand" "")
(lshiftrt:QI (match_dup 0)
(const_int 6)))]
(define_insn_and_split "negsf2"
[(set (match_operand:SF 0 "register_operand" "=d,r")
- (neg:SF (match_operand:SF 1 "register_operand" "0,0")))]
+ (neg:SF (match_operand:SF 1 "register_operand" "0,0")))]
""
"#"
"&& reload_completed"
[(parallel [(set (match_dup 0)
- (neg:SF (match_dup 1)))
+ (neg:SF (match_dup 1)))
(clobber (reg:CC REG_CC))])])
(define_insn "*negsf2"
[(set (match_operand:SF 0 "register_operand" "=d,r")
- (neg:SF (match_operand:SF 1 "register_operand" "0,0")))
+ (neg:SF (match_operand:SF 1 "register_operand" "0,0")))
(clobber (reg:CC REG_CC))]
"reload_completed"
"@
: "rjmp %x0";
}
[(set (attr "length")
- (if_then_else (match_operand 0 "symbol_ref_operand" "")
+ (if_then_else (match_operand 0 "symbol_ref_operand" "")
(if_then_else (match_test "!AVR_HAVE_JMP_CALL")
(const_int 1)
(const_int 2))
(pc)))]
"dead_or_set_regno_p (insn, REG_CC)"
{
- const char *op;
int jump_mode;
if (avr_adiw_reg_p (operands[0]))
output_asm_insn ("sbiw %0,1", operands);
"sbc %B0,__zero_reg__", operands);
jump_mode = avr_jump_mode (operands[2], insn);
- op = ((EQ == <CODE>) ^ (jump_mode == 1)) ? "brcc" : "brcs";
+ const char *op = ((EQ == <CODE>) ^ (jump_mode == 1)) ? "brcc" : "brcs";
operands[1] = gen_rtx_CONST_STRING (VOIDmode, op);
switch (jump_mode)
(pc)))]
"dead_or_set_regno_p (insn, REG_CC)"
{
- const char *op;
- int jump_mode;
if (avr_adiw_reg_p (operands[0]))
output_asm_insn ("sbiw %0,1", operands);
else
output_asm_insn ("subi %A0,1" CR_TAB
"sbc %B0,__zero_reg__", operands);
- jump_mode = avr_jump_mode (operands[2], insn);
- op = ((EQ == <CODE>) ^ (jump_mode == 1)) ? "brcc" : "brcs";
+ int jump_mode = avr_jump_mode (operands[2], insn);
+ const char *op = ((EQ == <CODE>) ^ (jump_mode == 1)) ? "brcc" : "brcs";
operands[1] = gen_rtx_CONST_STRING (VOIDmode, op);
switch (jump_mode)
(pc)))]
"dead_or_set_regno_p (insn, REG_CC)"
{
- const char *op;
- int jump_mode;
output_asm_insn ("ldi %3,1" CR_TAB
"sub %A0,%3" CR_TAB
"sbc %B0,__zero_reg__", operands);
- jump_mode = avr_jump_mode (operands[2], insn);
- op = ((EQ == <CODE>) ^ (jump_mode == 1)) ? "brcc" : "brcs";
+ int jump_mode = avr_jump_mode (operands[2], insn);
+ const char *op = ((EQ == <CODE>) ^ (jump_mode == 1)) ? "brcc" : "brcs";
operands[1] = gen_rtx_CONST_STRING (VOIDmode, op);
switch (jump_mode)
(pc)))]
"dead_or_set_regno_p (insn, REG_CC)"
{
- const char *op;
- int jump_mode;
-
output_asm_insn ("subi %A0,1", operands);
- jump_mode = avr_jump_mode (operands[1], insn);
- op = ((EQ == <CODE>) ^ (jump_mode == 1)) ? "brcc" : "brcs";
+ int jump_mode = avr_jump_mode (operands[1], insn);
+ const char *op = ((EQ == <CODE>) ^ (jump_mode == 1)) ? "brcc" : "brcs";
operands[0] = gen_rtx_CONST_STRING (VOIDmode, op);
switch (jump_mode)
[(unspec_volatile [(match_operand:QI 0 "const_int_operand" "L,P")]
UNSPECV_ENABLE_IRQS)
(set (match_operand:BLK 1 "" "")
- (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))]
+ (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))]
""
"@
cli
(const_int 1)]
UNSPECV_DELAY_CYCLES)
(set (match_operand:BLK 1 "" "")
- (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
+ (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
(clobber (match_scratch:QI 2 "=&d"))]
""
"#"
(const_int 1)]
UNSPECV_DELAY_CYCLES)
(set (match_operand:BLK 1 "" "")
- (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
+ (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
(clobber (match_scratch:QI 2 "=&d"))
(clobber (reg:CC REG_CC))]
"reload_completed"
(const_int 2)]
UNSPECV_DELAY_CYCLES)
(set (match_operand:BLK 1 "" "")
- (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
+ (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
(clobber (match_scratch:HI 2 "=&w,&d"))]
""
"#"
(const_int 2)]
UNSPECV_DELAY_CYCLES)
(set (match_operand:BLK 1 "" "")
- (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
+ (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
(clobber (match_scratch:HI 2 "=&w,&d"))
(clobber (reg:CC REG_CC))]
"reload_completed"
(const_int 3)]
UNSPECV_DELAY_CYCLES)
(set (match_operand:BLK 1 "" "")
- (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
+ (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
(clobber (match_scratch:QI 2 "=&d"))
(clobber (match_scratch:QI 3 "=&d"))
(clobber (match_scratch:QI 4 "=&d"))]
(const_int 3)]
UNSPECV_DELAY_CYCLES)
(set (match_operand:BLK 1 "" "")
- (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
+ (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
(clobber (match_scratch:QI 2 "=&d"))
(clobber (match_scratch:QI 3 "=&d"))
(clobber (match_scratch:QI 4 "=&d"))
(const_int 4)]
UNSPECV_DELAY_CYCLES)
(set (match_operand:BLK 1 "" "")
- (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
+ (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
(clobber (match_scratch:QI 2 "=&d"))
(clobber (match_scratch:QI 3 "=&d"))
(clobber (match_scratch:QI 4 "=&d"))
(const_int 4)]
UNSPECV_DELAY_CYCLES)
(set (match_operand:BLK 1 "" "")
- (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
+ (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))
(clobber (match_scratch:QI 2 "=&d"))
(clobber (match_scratch:QI 3 "=&d"))
(clobber (match_scratch:QI 4 "=&d"))
[(unspec_volatile [(match_operand:SI 0 "const_int_operand" "P,K")]
UNSPECV_NOP)
(set (match_operand:BLK 1 "" "")
- (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))]
+ (unspec_volatile:BLK [(match_dup 1)] UNSPECV_MEMORY_BARRIER))]
""
"@
nop
(define_insn "*sleep"
[(unspec_volatile [(const_int 0)] UNSPECV_SLEEP)
(set (match_operand:BLK 0 "" "")
- (unspec_volatile:BLK [(match_dup 0)] UNSPECV_MEMORY_BARRIER))]
+ (unspec_volatile:BLK [(match_dup 0)] UNSPECV_MEMORY_BARRIER))]
""
"sleep"
[(set_attr "length" "1")])
(define_insn "*wdr"
[(unspec_volatile [(const_int 0)] UNSPECV_WDR)
(set (match_operand:BLK 0 "" "")
- (unspec_volatile:BLK [(match_dup 0)] UNSPECV_MEMORY_BARRIER))]
+ (unspec_volatile:BLK [(match_dup 0)] UNSPECV_MEMORY_BARRIER))]
""
"wdr"
[(set_attr "length" "1")])