/* Target machine subroutines for Altera Nios II.
- Copyright (C) 2012-2017 Free Software Foundation, Inc.
+ Copyright (C) 2012-2020 Free Software Foundation, Inc.
Contributed by Jonah Graham (jgraham@altera.com),
Will Reece (wreece@altera.com), and Jeff DaSilva (jdasilva@altera.com).
Contributed by Mentor Graphics, Inc.
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
+#define IN_TARGET_CODE 1
+
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "langhooks.h"
#include "stor-layout.h"
#include "builtins.h"
+#include "tree-pass.h"
+#include "xregex.h"
/* This file should be included last. */
#include "target-def.h"
/* Forward function declarations. */
+static bool nios2_symbolic_constant_p (rtx);
static bool prologue_saved_reg_p (unsigned);
static void nios2_load_pic_register (void);
static void nios2_register_custom_code (unsigned int, enum nios2_ccs_code, int);
static void nios2_register_builtin_fndecl (unsigned, tree);
static rtx nios2_ldst_parallel (bool, bool, bool, rtx, int,
unsigned HOST_WIDE_INT, bool);
+static int nios2_address_cost (rtx, machine_mode, addr_space_t, bool);
/* Threshold for data being put into the small data/bss area, instead
of the normal data area (references to the small data/bss area take
/* Set to true if any conflicts (re-use of a code between 0-255) are found. */
static bool custom_code_conflict = false;
+/* State for command-line options. */
+regex_t nios2_gprel_sec_regex;
+regex_t nios2_r0rel_sec_regex;
+
\f
/* Definition of builtin function types for nios2. */
{
gcc_assert (GP_REG_P (regno));
- if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
+ if (df_regs_ever_live_p (regno) && !call_used_or_fixed_reg_p (regno))
return true;
if (regno == HARD_FRAME_POINTER_REGNUM && frame_pointer_needed)
switch (from)
{
case FRAME_POINTER_REGNUM:
- offset = cfun->machine->args_size;
+ /* This is the high end of the local variable storage, not the
+ hard frame pointer. */
+ offset = cfun->machine->args_size + cfun->machine->var_size;
break;
case ARG_POINTER_REGNUM:
for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
if (N2FPU_ENABLED_P (i) && N2FPU_UNSAFE_P (i))
warning (0, "switch %<-mcustom-%s%> has no effect unless "
- "-funsafe-math-optimizations is specified", N2FPU_NAME (i));
+ "%<-funsafe-math-optimizations%> is specified",
+ N2FPU_NAME (i));
/* Warn if the user is trying to use -mcustom-fmins et. al, that won't
get used without -ffinite-math-only. See fold_builtin_fmin_fmax ()
for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
if (N2FPU_ENABLED_P (i) && N2FPU_FINITE_P (i))
warning (0, "switch %<-mcustom-%s%> has no effect unless "
- "-ffinite-math-only is specified", N2FPU_NAME (i));
+ "%<-ffinite-math-only%> is specified", N2FPU_NAME (i));
/* Warn if the user is trying to use a custom rounding instruction
that won't get used without -fno-math-errno. See
for (i = 0; i < ARRAY_SIZE (nios2_fpu_insn); i++)
if (N2FPU_ENABLED_P (i) && N2FPU_NO_ERRNO_P (i))
warning (0, "switch %<-mcustom-%s%> has no effect unless "
- "-fno-math-errno is specified", N2FPU_NAME (i));
+ "%<-fno-math-errno%> is specified", N2FPU_NAME (i));
if (errors || custom_code_conflict)
fatal_error (input_location,
- "conflicting use of -mcustom switches, target attributes, "
- "and/or __builtin_custom_ functions");
+ "conflicting use of %<-mcustom%> switches, target attributes, "
+ "and/or %<__builtin_custom_%> functions");
}
static void
{
int param = N2FPU_N (fpu_insn_index);
- if (0 <= param && param <= 255)
+ if (param >= 0 && param <= 255)
nios2_register_custom_code (param, CCS_FPU, fpu_insn_index);
/* Valid values are 0-255, but also allow -1 so that the
sorry ("position-independent code requires the Linux ABI");
if (flag_pic && stack_limit_rtx
&& GET_CODE (stack_limit_rtx) == SYMBOL_REF)
- sorry ("PIC support for -fstack-limit-symbol");
+ sorry ("PIC support for %<-fstack-limit-symbol%>");
/* Function to allocate machine-dependent function status. */
init_machine_status = &nios2_init_machine_status;
nios2_gpopt_option = gpopt_local;
}
+ /* GP-relative and r0-relative addressing don't make sense for PIC. */
+ if (flag_pic)
+ {
+ if (nios2_gpopt_option != gpopt_none)
+ error ("%<-mgpopt%> not supported with PIC.");
+ if (nios2_gprel_sec)
+ error ("%<-mgprel-sec=%> not supported with PIC.");
+ if (nios2_r0rel_sec)
+ error ("%<-mr0rel-sec=%> not supported with PIC.");
+ }
+
+ /* Process -mgprel-sec= and -m0rel-sec=. */
+ if (nios2_gprel_sec)
+ {
+ if (regcomp (&nios2_gprel_sec_regex, nios2_gprel_sec,
+ REG_EXTENDED | REG_NOSUB))
+ error ("%<-mgprel-sec=%> argument is not a valid regular expression.");
+ }
+ if (nios2_r0rel_sec)
+ {
+ if (regcomp (&nios2_r0rel_sec_regex, nios2_r0rel_sec,
+ REG_EXTENDED | REG_NOSUB))
+ error ("%<-mr0rel-sec=%> argument is not a valid regular expression.");
+ }
+
/* If we don't have mul, we don't have mulx either! */
if (!TARGET_HAS_MUL && TARGET_HAS_MULX)
target_flags &= ~MASK_HAS_MULX;
cost has been computed, and false if subexpressions should be
scanned. In either case, *TOTAL contains the cost result. */
static bool
-nios2_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
- int outer_code ATTRIBUTE_UNUSED,
- int opno ATTRIBUTE_UNUSED,
- int *total, bool speed ATTRIBUTE_UNUSED)
+nios2_rtx_costs (rtx x, machine_mode mode,
+ int outer_code,
+ int opno,
+ int *total, bool speed)
{
int code = GET_CODE (x);
switch (code)
{
case CONST_INT:
- if (INTVAL (x) == 0)
+ if (INTVAL (x) == 0 || nios2_simple_const_p (x))
{
*total = COSTS_N_INSNS (0);
return true;
}
- else if (nios2_simple_const_p (x))
- {
- *total = COSTS_N_INSNS (2);
- return true;
- }
else
{
- *total = COSTS_N_INSNS (4);
+ /* High + lo_sum. */
+ *total = COSTS_N_INSNS (1);
return true;
}
case SYMBOL_REF:
case CONST:
case CONST_DOUBLE:
- {
- *total = COSTS_N_INSNS (4);
- return true;
- }
+ if (gprel_constant_p (x) || r0rel_constant_p (x))
+ {
+ *total = COSTS_N_INSNS (1);
+ return true;
+ }
+ else
+ {
+ /* High + lo_sum. */
+ *total = COSTS_N_INSNS (1);
+ return true;
+ }
+
+ case HIGH:
+ {
+ /* This is essentially a constant. */
+ *total = COSTS_N_INSNS (0);
+ return true;
+ }
+
+ case LO_SUM:
+ {
+ *total = COSTS_N_INSNS (0);
+ return true;
+ }
case AND:
{
return false;
}
+ /* For insns that have an execution latency (3 cycles), don't
+ penalize by the full amount since we can often schedule
+ to avoid it. */
case MULT:
{
- *total = COSTS_N_INSNS (1);
+ if (!TARGET_HAS_MUL)
+ *total = COSTS_N_INSNS (5); /* Guess? */
+ else if (speed)
+ *total = COSTS_N_INSNS (2); /* Latency adjustment. */
+ else
+ *total = COSTS_N_INSNS (1);
+ if (TARGET_HAS_MULX && GET_MODE (x) == DImode)
+ {
+ enum rtx_code c0 = GET_CODE (XEXP (x, 0));
+ enum rtx_code c1 = GET_CODE (XEXP (x, 1));
+ if ((c0 == SIGN_EXTEND && c1 == SIGN_EXTEND)
+ || (c0 == ZERO_EXTEND && c1 == ZERO_EXTEND))
+ /* This is the <mul>sidi3 pattern, which expands into 4 insns,
+ 2 multiplies and 2 moves. */
+ {
+ *total = *total * 2 + COSTS_N_INSNS (2);
+ return true;
+ }
+ }
return false;
}
- case SIGN_EXTEND:
+
+ case DIV:
{
- *total = COSTS_N_INSNS (3);
+ if (!TARGET_HAS_DIV)
+ *total = COSTS_N_INSNS (5); /* Guess? */
+ else if (speed)
+ *total = COSTS_N_INSNS (2); /* Latency adjustment. */
+ else
+ *total = COSTS_N_INSNS (1);
return false;
}
- case ZERO_EXTEND:
+
+ case ASHIFT:
+ case ASHIFTRT:
+ case LSHIFTRT:
+ case ROTATE:
{
- *total = COSTS_N_INSNS (1);
+ if (!speed)
+ *total = COSTS_N_INSNS (1);
+ else
+ *total = COSTS_N_INSNS (2); /* Latency adjustment. */
return false;
}
+
+ case ZERO_EXTRACT:
+ if (TARGET_HAS_BMX)
+ {
+ *total = COSTS_N_INSNS (1);
+ return true;
+ }
+ return false;
- case ZERO_EXTRACT:
- if (TARGET_HAS_BMX)
+ case SIGN_EXTEND:
+ {
+ if (MEM_P (XEXP (x, 0)))
+ *total = COSTS_N_INSNS (1);
+ else
+ *total = COSTS_N_INSNS (3);
+ return false;
+ }
+
+ case MEM:
{
- *total = COSTS_N_INSNS (1);
- return true;
+ rtx addr = XEXP (x, 0);
+
+ /* Account for cost of different addressing modes. */
+ *total = nios2_address_cost (addr, mode, ADDR_SPACE_GENERIC, speed);
+
+ if (outer_code == SET && opno == 0)
+ /* Stores execute in 1 cycle accounted for by
+ the outer SET. */
+ ;
+ else if (outer_code == SET || outer_code == SIGN_EXTEND
+ || outer_code == ZERO_EXTEND)
+ /* Latency adjustment. */
+ {
+ if (speed)
+ *total += COSTS_N_INSNS (1);
+ }
+ else
+ /* This is going to have to be split into a load. */
+ *total += COSTS_N_INSNS (speed ? 2 : 1);
+ return true;
}
- return false;
default:
return false;
}
-/* Addressing Modes. */
+/* Addressing modes and constants. */
+
+/* Symbol references and other 32-bit constants are split into
+ high/lo_sum pairs during the split1 pass. After that, they are not
+ considered legitimate addresses.
+ This function returns true if in a pre-split context where these
+ constants are allowed. */
+static bool
+nios2_large_constant_allowed (void)
+{
+ /* The reload_completed check is for the benefit of
+ nios2_asm_output_mi_thunk and perhaps other places that try to
+ emulate a post-reload pass. */
+ return !(cfun->curr_properties & PROP_rtl_split_insns) && !reload_completed;
+}
+
+/* Return true if X is constant expression with a reference to an
+ "ordinary" symbol; not GOT-relative, not GP-relative, not TLS. */
+static bool
+nios2_symbolic_constant_p (rtx x)
+{
+ rtx base, offset;
+
+ if (flag_pic)
+ return false;
+ if (GET_CODE (x) == LABEL_REF)
+ return true;
+ else if (CONSTANT_P (x))
+ {
+ split_const (x, &base, &offset);
+ return (SYMBOL_REF_P (base)
+ && !SYMBOL_REF_TLS_MODEL (base)
+ && !gprel_constant_p (base)
+ && !r0rel_constant_p (base)
+ && SMALL_INT (INTVAL (offset)));
+ }
+ return false;
+}
+
+/* Return true if X is an expression of the form
+ (PLUS reg large_constant). */
+static bool
+nios2_plus_large_constant_p (rtx x)
+{
+ return (GET_CODE (x) == PLUS
+ && REG_P (XEXP (x, 0))
+ && nios2_large_constant_p (XEXP (x, 1)));
+}
/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
static bool
&& nios2_regno_ok_for_base_p (REGNO (base), strict_p)
&& (offset == NULL_RTX
|| nios2_valid_addr_offset_p (offset)
+ || (nios2_large_constant_allowed ()
+ && nios2_symbolic_constant_p (offset))
|| nios2_unspec_reloc_p (offset)));
}
/* Else, fall through. */
case CONST:
- if (gprel_constant_p (operand))
+ if (gprel_constant_p (operand) || r0rel_constant_p (operand))
return true;
/* Else, fall through. */
case LABEL_REF:
+ if (nios2_large_constant_allowed ()
+ && nios2_symbolic_constant_p (operand))
+ return true;
+ return false;
+
case CONST_INT:
+ if (r0rel_constant_p (operand))
+ return true;
+ return nios2_large_constant_allowed ();
+
case CONST_DOUBLE:
return false;
rtx op0 = XEXP (operand, 0);
rtx op1 = XEXP (operand, 1);
- return (nios2_valid_addr_expr_p (op0, op1, strict_p)
- || nios2_valid_addr_expr_p (op1, op0, strict_p));
+ if (nios2_valid_addr_expr_p (op0, op1, strict_p)
+ || nios2_valid_addr_expr_p (op1, op0, strict_p))
+ return true;
}
+ break;
+
+ /* %lo(constant)(reg)
+ This requires a 16-bit relocation and isn't valid with R2
+ io-variant load/stores. */
+ case LO_SUM:
+ if (TARGET_ARCH_R2
+ && (TARGET_BYPASS_CACHE || TARGET_BYPASS_CACHE_VOLATILE))
+ return false;
+ else
+ {
+ rtx op0 = XEXP (operand, 0);
+ rtx op1 = XEXP (operand, 1);
+
+ return (REG_P (op0)
+ && nios2_regno_ok_for_base_p (REGNO (op0), strict_p)
+ && nios2_large_constant_p (op1));
+ }
default:
break;
return false;
}
+/* Implement TARGET_ADDRESS_COST.
+ Experimentation has shown that we get better code by penalizing the
+ the (plus reg symbolic_constant) and (plus reg (const ...)) forms
+ but giving (plus reg symbol_ref) address modes the same cost as those
+ that don't require splitting. Also, from a theoretical point of view:
+ - This is in line with the recommendation in the GCC internals
+ documentation to make address forms involving multiple
+ registers more expensive than single-register forms.
+ - OTOH it still encourages fwprop1 to propagate constants into
+ address expressions more aggressively.
+ - We should discourage splitting (symbol + offset) into hi/lo pairs
+ to allow CSE'ing the symbol when it's used with more than one offset,
+ but not so heavily as to avoid this addressing mode at all. */
+static int
+nios2_address_cost (rtx address,
+ machine_mode mode ATTRIBUTE_UNUSED,
+ addr_space_t as ATTRIBUTE_UNUSED,
+ bool speed ATTRIBUTE_UNUSED)
+{
+ if (nios2_plus_large_constant_p (address))
+ return COSTS_N_INSNS (1);
+ if (nios2_large_constant_p (address))
+ {
+ if (GET_CODE (address) == CONST)
+ return COSTS_N_INSNS (1);
+ else
+ return COSTS_N_INSNS (0);
+ }
+ return COSTS_N_INSNS (0);
+}
+
+/* Return true if X is a MEM whose address expression involves a large (32-bit)
+ constant. */
+bool
+nios2_large_constant_memory_operand_p (rtx x)
+{
+ rtx addr;
+
+ if (GET_CODE (x) != MEM)
+ return false;
+ addr = XEXP (x, 0);
+
+ return (nios2_large_constant_p (addr)
+ || nios2_plus_large_constant_p (addr));
+}
+
+
+/* Return true if X is something that needs to be split into a
+ high/lo_sum pair. */
+bool
+nios2_large_constant_p (rtx x)
+{
+ return (nios2_symbolic_constant_p (x)
+ || nios2_large_unspec_reloc_p (x)
+ || (CONST_INT_P (x) && !SMALL_INT (INTVAL (x))));
+}
+
+/* Given an RTX X that satisfies nios2_large_constant_p, split it into
+ high and lo_sum parts using TEMP as a scratch register. Emit the high
+ instruction and return the lo_sum expression.
+ Also handle special cases involving constant integers. */
+rtx
+nios2_split_large_constant (rtx x, rtx temp)
+{
+ if (CONST_INT_P (x))
+ {
+ HOST_WIDE_INT val = INTVAL (x);
+ if (SMALL_INT (val))
+ return x;
+ else if (SMALL_INT_UNSIGNED (val) || UPPER16_INT (val))
+ {
+ emit_move_insn (temp, x);
+ return temp;
+ }
+ else
+ {
+ HOST_WIDE_INT high = (val + 0x8000) & ~0xffff;
+ HOST_WIDE_INT low = val - high;
+ emit_move_insn (temp, gen_int_mode (high, Pmode));
+ return gen_rtx_PLUS (Pmode, temp, gen_int_mode (low, Pmode));
+ }
+ }
+
+ emit_insn (gen_rtx_SET (temp, gen_rtx_HIGH (Pmode, copy_rtx (x))));
+ return gen_rtx_LO_SUM (Pmode, temp, copy_rtx (x));
+}
+
+/* Split an RTX of the form
+ (plus op0 op1)
+ where op1 is a large constant into
+ (set temp (high op1))
+ (set temp (plus op0 temp))
+ (lo_sum temp op1)
+ returning the lo_sum expression as the value. */
+static rtx
+nios2_split_plus_large_constant (rtx op0, rtx op1)
+{
+ rtx temp = gen_reg_rtx (Pmode);
+ op0 = force_reg (Pmode, op0);
+
+ emit_insn (gen_rtx_SET (temp, gen_rtx_HIGH (Pmode, copy_rtx (op1))));
+ emit_insn (gen_rtx_SET (temp, gen_rtx_PLUS (Pmode, op0, temp)));
+ return gen_rtx_LO_SUM (Pmode, temp, copy_rtx (op1));
+}
+
+/* Given a MEM OP with an address that includes a splittable symbol or
+ other large constant, emit some instructions to do the split and
+ return a new MEM. */
+rtx
+nios2_split_large_constant_memory_operand (rtx op)
+{
+ rtx addr = XEXP (op, 0);
+
+ if (nios2_large_constant_p (addr))
+ addr = nios2_split_large_constant (addr, gen_reg_rtx (Pmode));
+ else if (nios2_plus_large_constant_p (addr))
+ addr = nios2_split_plus_large_constant (XEXP (addr, 0), XEXP (addr, 1));
+ else
+ gcc_unreachable ();
+ return replace_equiv_address (op, addr, false);
+}
+
/* Return true if SECTION is a small section name. */
static bool
nios2_small_section_name_p (const char *section)
return (strcmp (section, ".sbss") == 0
|| strncmp (section, ".sbss.", 6) == 0
|| strcmp (section, ".sdata") == 0
- || strncmp (section, ".sdata.", 7) == 0);
+ || strncmp (section, ".sdata.", 7) == 0
+ || (nios2_gprel_sec
+ && regexec (&nios2_gprel_sec_regex, section, 0, NULL, 0) == 0));
+}
+
+/* Return true if SECTION is a r0-relative section name. */
+static bool
+nios2_r0rel_section_name_p (const char *section)
+{
+ return (nios2_r0rel_sec
+ && regexec (&nios2_r0rel_sec_regex, section, 0, NULL, 0) == 0);
}
/* Return true if EXP should be placed in the small data section. */
if (nios2_small_section_name_p (section))
return true;
}
+ else if (flexible_array_type_p (TREE_TYPE (exp))
+ && (!TREE_PUBLIC (exp) || DECL_EXTERNAL (exp)))
+ {
+ /* We really should not consider any objects of any flexibly-sized
+ type to be small data, but pre-GCC 10 did not test
+ for this and just fell through to the next case. Thus older
+ code compiled with -mgpopt=global could contain GP-relative
+ accesses to objects defined in this compilation unit with
+ external linkage. We retain the possible small-data treatment
+ of such definitions for backward ABI compatibility, but
+ no longer generate GP-relative accesses for external
+ references (so that the ABI could be changed in the future
+ with less potential impact), or objects with internal
+ linkage. */
+ return false;
+ }
else
{
HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
}
}
+/* Likewise for r0-relative addressing. */
+static bool
+nios2_symbol_ref_in_r0rel_data_p (rtx sym)
+{
+ tree decl;
+
+ gcc_assert (GET_CODE (sym) == SYMBOL_REF);
+ decl = SYMBOL_REF_DECL (sym);
+
+ /* TLS variables are not accessed through r0. */
+ if (SYMBOL_REF_TLS_MODEL (sym) != 0)
+ return false;
+
+ /* On Nios II R2, there is no r0-relative relocation that can be
+ used with "io" instructions. So, if we are implicitly generating
+ those instructions, we cannot emit r0-relative accesses. */
+ if (TARGET_ARCH_R2
+ && (TARGET_BYPASS_CACHE || TARGET_BYPASS_CACHE_VOLATILE))
+ return false;
+
+ /* If the user has explicitly placed the symbol in a r0rel section
+ via an attribute, generate r0-relative addressing. */
+ if (decl && DECL_SECTION_NAME (decl))
+ return nios2_r0rel_section_name_p (DECL_SECTION_NAME (decl));
+ return false;
+}
+
/* Implement TARGET_SECTION_TYPE_FLAGS. */
static unsigned int
base = nios2_legitimize_tls_address (base);
else if (flag_pic)
base = nios2_load_pic_address (base, UNSPEC_PIC_SYM, NULL_RTX);
+ else if (!nios2_large_constant_allowed ()
+ && nios2_symbolic_constant_p (addr))
+ return nios2_split_large_constant (addr, gen_reg_rtx (Pmode));
+ else if (CONST_INT_P (addr))
+ {
+ HOST_WIDE_INT val = INTVAL (addr);
+ if (SMALL_INT (val))
+ /* Use r0-relative addressing. */
+ return addr;
+ else if (!nios2_large_constant_allowed ())
+ /* Split into high/lo pair. */
+ return nios2_split_large_constant (addr, gen_reg_rtx (Pmode));
+ }
else
return addr;
nios2_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
machine_mode mode ATTRIBUTE_UNUSED)
{
+ rtx op0, op1;
+
if (CONSTANT_P (x))
return nios2_legitimize_constant_address (x);
+ /* Remaining cases all involve something + a constant. */
+ if (GET_CODE (x) != PLUS)
+ return x;
+
+ op0 = XEXP (x, 0);
+ op1 = XEXP (x, 1);
+
+ /* Target-independent code turns (exp + constant) into plain
+ register indirect. Although subsequent optimization passes will
+ eventually sort that out, ivopts uses the unoptimized form for
+ computing its cost model, so we get better results by generating
+ the correct form from the start. */
+ if (nios2_valid_addr_offset_p (op1))
+ return gen_rtx_PLUS (Pmode, force_reg (Pmode, op0), copy_rtx (op1));
+
+ /* We may need to split symbolic constants now. */
+ else if (nios2_symbolic_constant_p (op1))
+ {
+ if (nios2_large_constant_allowed ())
+ return gen_rtx_PLUS (Pmode, force_reg (Pmode, op0), copy_rtx (op1));
+ else
+ return nios2_split_plus_large_constant (op0, op1);
+ }
+
/* For the TLS LE (Local Exec) model, the compiler may try to
combine constant offsets with unspec relocs, creating address RTXs
looking like this:
(const_int 48 [0x30])))] UNSPEC_ADD_TLS_LE)))
Which will be output as '%tls_le(var+48)(r23)' in assembly. */
- if (GET_CODE (x) == PLUS
- && GET_CODE (XEXP (x, 1)) == CONST)
+ else if (GET_CODE (op1) == CONST)
{
rtx unspec, offset;
- split_const (XEXP (x, 1), &unspec, &offset);
+ split_const (op1, &unspec, &offset);
if (GET_CODE (unspec) == UNSPEC
&& !nios2_large_offset_p (XINT (unspec, 1))
&& offset != const0_rtx)
{
- rtx reg = force_reg (Pmode, XEXP (x, 0));
+ rtx reg = force_reg (Pmode, op0);
unspec = copy_rtx (unspec);
XVECEXP (unspec, 0, 0)
= plus_constant (Pmode, XVECEXP (unspec, 0, 0), INTVAL (offset));
- x = gen_rtx_PLUS (Pmode, reg, gen_rtx_CONST (Pmode, unspec));
+ return gen_rtx_PLUS (Pmode, reg, gen_rtx_CONST (Pmode, unspec));
}
}
return true;
}
}
- else if (!gprel_constant_p (from))
+ else if (gprel_constant_p (from) || r0rel_constant_p (from))
+ /* Handled directly by movsi_internal as gp + offset
+ or r0 + offset. */
+ ;
+ else if (nios2_large_constant_p (from))
+ /* This case covers either a regular symbol reference or an UNSPEC
+ representing a 32-bit offset. We split the former
+ only conditionally and the latter always. */
+ {
+ if (!nios2_large_constant_allowed ()
+ || nios2_large_unspec_reloc_p (from))
+ {
+ rtx lo = nios2_split_large_constant (from, to);
+ emit_insn (gen_rtx_SET (to, lo));
+ set_unique_reg_note (get_last_insn (), REG_EQUAL,
+ copy_rtx (operands[1]));
+ return true;
+ }
+ }
+ else
+ /* This is a TLS or PIC symbol. */
{
- if (!nios2_large_unspec_reloc_p (from))
- from = nios2_legitimize_constant_address (from);
+ from = nios2_legitimize_constant_address (from);
if (CONSTANT_P (from))
{
emit_insn (gen_rtx_SET (to,
break;
}
+ debug_rtx (op);
output_operand_lossage ("Unsupported operand for code '%c'", letter);
gcc_unreachable ();
}
return false;
}
+/* Likewise if this is a zero-relative accessible reference. */
+bool
+r0rel_constant_p (rtx op)
+{
+ if (GET_CODE (op) == SYMBOL_REF
+ && nios2_symbol_ref_in_r0rel_data_p (op))
+ return true;
+ else if (GET_CODE (op) == CONST
+ && GET_CODE (XEXP (op, 0)) == PLUS)
+ return r0rel_constant_p (XEXP (XEXP (op, 0), 0));
+ else if (GET_CODE (op) == CONST_INT
+ && SMALL_INT (INTVAL (op)))
+ return true;
+
+ return false;
+}
+
/* Return the name string for a supported unspec reloc offset. */
static const char *
nios2_unspec_reloc_name (int unspec)
fprintf (file, ")(%s)", reg_names[GP_REGNO]);
return;
}
-
+ else if (r0rel_constant_p (op))
+ {
+ if (CONST_INT_P (op))
+ {
+ output_addr_const (file, op);
+ fprintf (file, "(r0)");
+ return;
+ }
+ else
+ {
+ fprintf (file, "%%lo(");
+ output_addr_const (file, op);
+ fprintf (file, ")(r0)");
+ return;
+ }
+ }
break;
case PLUS:
}
break;
+ case LO_SUM:
+ {
+ rtx op0 = XEXP (op, 0);
+ rtx op1 = XEXP (op, 1);
+
+ if (REG_P (op0) && CONSTANT_P (op1))
+ {
+ nios2_print_operand (file, op1, 'L');
+ fprintf (file, "(%s)", reg_names[REGNO (op0)]);
+ return;
+ }
+ }
+ break;
+
case REG:
fprintf (file, "0(%s)", reg_names[REGNO (op)]);
return;
push the argument on the stack, or a hard register in which to
store the argument.
- MODE is the argument's machine mode.
- TYPE is the data type of the argument (as a tree).
- This is null for libcalls where that information may
- not be available.
CUM is a variable of type CUMULATIVE_ARGS which gives info about
the preceding args and about the function being called.
- NAMED is nonzero if this argument is a named parameter
- (otherwise it is an extra parameter matching an ellipsis). */
+ ARG is a description of the argument. */
static rtx
-nios2_function_arg (cumulative_args_t cum_v, machine_mode mode,
- const_tree type ATTRIBUTE_UNUSED,
- bool named ATTRIBUTE_UNUSED)
+nios2_function_arg (cumulative_args_t cum_v, const function_arg_info &arg)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
rtx return_rtx = NULL_RTX;
if (cum->regs_used < NUM_ARG_REGS)
- return_rtx = gen_rtx_REG (mode, FIRST_ARG_REGNO + cum->regs_used);
+ return_rtx = gen_rtx_REG (arg.mode, FIRST_ARG_REGNO + cum->regs_used);
return return_rtx;
}
in memory. */
static int
-nios2_arg_partial_bytes (cumulative_args_t cum_v,
- machine_mode mode, tree type ATTRIBUTE_UNUSED,
- bool named ATTRIBUTE_UNUSED)
+nios2_arg_partial_bytes (cumulative_args_t cum_v, const function_arg_info &arg)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
- HOST_WIDE_INT param_size;
-
- if (mode == BLKmode)
- {
- param_size = int_size_in_bytes (type);
- gcc_assert (param_size >= 0);
- }
- else
- param_size = GET_MODE_SIZE (mode);
+ HOST_WIDE_INT param_size = arg.promoted_size_in_bytes ();
+ gcc_assert (param_size >= 0);
/* Convert to words (round up). */
param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
return 0;
}
-/* Update the data in CUM to advance over an argument of mode MODE
- and data type TYPE; TYPE is null for libcalls where that information
- may not be available. */
+/* Update the data in CUM to advance over argument ARG. */
static void
-nios2_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
- const_tree type ATTRIBUTE_UNUSED,
- bool named ATTRIBUTE_UNUSED)
+nios2_function_arg_advance (cumulative_args_t cum_v,
+ const function_arg_info &arg)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
- HOST_WIDE_INT param_size;
-
- if (mode == BLKmode)
- {
- param_size = int_size_in_bytes (type);
- gcc_assert (param_size >= 0);
- }
- else
- param_size = GET_MODE_SIZE (mode);
+ HOST_WIDE_INT param_size = arg.promoted_size_in_bytes ();
+ gcc_assert (param_size >= 0);
/* Convert to words (round up). */
param_size = (UNITS_PER_WORD - 1 + param_size) / UNITS_PER_WORD;
own va_arg type. */
static void
nios2_setup_incoming_varargs (cumulative_args_t cum_v,
- machine_mode mode, tree type,
- int *pretend_size, int second_time)
+ const function_arg_info &arg,
+ int *pretend_size, int second_time)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
CUMULATIVE_ARGS local_cum;
cfun->machine->uses_anonymous_args = 1;
local_cum = *cum;
- nios2_function_arg_advance (local_cum_v, mode, type, true);
+ nios2_function_arg_advance (local_cum_v, arg);
regs_to_push = NUM_ARG_REGS - local_cum.regs_used;
{
if (!custom_insn_opcode (value, VOIDmode))
error ("custom instruction opcode must be compile time "
- "constant in the range 0-255 for __builtin_custom_%s",
+ "constant in the range 0-255 for %<__builtin_custom_%s%>",
custom_builtin_name[index]);
}
else
int ignore ATTRIBUTE_UNUSED)
{
tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
- unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
+ unsigned int fcode = DECL_MD_FUNCTION_CODE (fndecl);
if (fcode < nios2_fpu_builtin_base)
{
continue;
if (!ISDIGIT (*t))
{
- error ("`custom-%s=' argument requires "
- "numeric digits", N2FPU_NAME (code));
+ error ("%<custom-%s=%> argument should be "
+ "a non-negative integer", N2FPU_NAME (code));
return false;
}
}
HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
tree function)
{
+ const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
rtx this_rtx, funexp;
rtx_insn *insn;
/* Run just enough of rest_of_compilation to get the insns emitted.
There's not really enough bulk here to make other passes such as
- instruction scheduling worth while. Note that use_thunk calls
- assemble_start_function and assemble_end_function. */
+ instruction scheduling worth while. */
insn = get_insns ();
shorten_branches (insn);
+ assemble_start_function (thunk_fndecl, fnname);
final_start_function (insn, file, 1);
final (insn, file, 1);
final_end_function ();
+ assemble_end_function (thunk_fndecl, fnname);
/* Stop pretending to be a post-reload pass. */
reload_completed = 0;
|| TARGET_BYPASS_CACHE)
return false;
addr = XEXP (mem, 0);
- /* GP-based references are never narrow. */
- if (gprel_constant_p (addr))
+ /* GP-based and R0-based references are never narrow. */
+ if (gprel_constant_p (addr) || r0rel_constant_p (addr))
return false;
+ /* %lo requires a 16-bit relocation and is never narrow. */
+ if (GET_CODE (addr) == LO_SUM)
+ return false;
ret = split_mem_address (addr, &rhs1, &rhs2);
gcc_assert (ret);
}
|| TARGET_BYPASS_CACHE)
return false;
addr = XEXP (mem, 0);
- /* GP-based references are never narrow. */
- if (gprel_constant_p (addr))
+ /* GP-based and r0-based references are never narrow. */
+ if (gprel_constant_p (addr) || r0rel_constant_p (addr))
+ return false;
+ /* %lo requires a 16-bit relocation and is never narrow. */
+ if (GET_CODE (addr) == LO_SUM)
return false;
ret = split_mem_address (addr, &rhs1, &rhs2);
gcc_assert (ret);
can_use_cdx_ldstw (int regno, int basereg, int offset)
{
if (CDX_REG_P (regno) && CDX_REG_P (basereg)
- && (offset & 0x3) == 0 && 0 <= offset && offset < 0x40)
+ && (offset & 0x3) == 0 && offset >= 0 && offset < 0x40)
return true;
else if (basereg == SP_REGNO
&& offset >= 0 && offset < 0x80 && (offset & 0x3) == 0)
int n = CODE_LABEL_NUMBER (label);
if (label_align && n >= min_labelno && n <= max_labelno)
- return MAX (label_align[n - min_labelno], align_labels_log);
- return align_labels_log;
+ return MAX (label_align[n - min_labelno], align_labels.levels[0].log);
+ return align_labels.levels[0].log;
}
/* Implement ADJUST_REG_ALLOC_ORDER. We use the default ordering
#undef TARGET_LEGITIMATE_ADDRESS_P
#define TARGET_LEGITIMATE_ADDRESS_P nios2_legitimate_address_p
-#undef TARGET_LRA_P
-#define TARGET_LRA_P hook_bool_void_false
-
#undef TARGET_PREFERRED_RELOAD_CLASS
#define TARGET_PREFERRED_RELOAD_CLASS nios2_preferred_reload_class
#undef TARGET_RTX_COSTS
#define TARGET_RTX_COSTS nios2_rtx_costs
+#undef TARGET_ADDRESS_COST
+#define TARGET_ADDRESS_COST nios2_address_cost
+
#undef TARGET_HAVE_TLS
#define TARGET_HAVE_TLS TARGET_LINUX_ABI
#undef TARGET_CONSTANT_ALIGNMENT
#define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
+#undef TARGET_HAVE_SPECULATION_SAFE_VALUE
+#define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
+
struct gcc_target targetm = TARGET_INITIALIZER;
#include "gt-nios2.h"