int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
int varargs_save_offset; /* offset to save the varargs registers */
int ehrd_offset; /* offset to EH return data */
+ int ehcr_offset; /* offset to EH CR field data */
int reg_size; /* register size (4 or 8) */
HOST_WIDE_INT vars_size; /* variable save area size */
int parm_size; /* outgoing parameter size */
64-bits wide and is allocated early enough so that the offset
does not overflow the 16-bit load/store offset field. */
rtx sdmode_stack_slot;
+ /* Flag if r2 setup is needed with ELFv2 ABI. */
+ bool r2_setup_needed;
} machine_function;
/* Support targetm.vectorize.builtin_mask_for_load. */
#undef TARGET_RETURN_IN_MEMORY
#define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
+#undef TARGET_RETURN_IN_MSB
+#define TARGET_RETURN_IN_MSB rs6000_return_in_msb
+
#undef TARGET_SETUP_INCOMING_VARARGS
#define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
{
case ABI_NONE: abi_str = "none"; break;
case ABI_AIX: abi_str = "aix"; break;
+ case ABI_ELFv2: abi_str = "ELFv2"; break;
case ABI_V4: abi_str = "V4"; break;
case ABI_DARWIN: abi_str = "darwin"; break;
default: abi_str = "unknown"; break;
/* We should always be splitting complex arguments, but we can't break
Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
- if (DEFAULT_ABI != ABI_AIX)
+ if (DEFAULT_ABI == ABI_V4 || DEFAULT_ABI == ABI_DARWIN)
targetm.calls.split_complex_arg = NULL;
}
putc ('\n', file);
}
- if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
+ if (DEFAULT_ABI == ABI_ELFv2)
+ fprintf (file, "\t.abiversion 2\n");
+
+ if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2
+ || (TARGET_ELF && flag_pic == 2))
{
switch_to_section (toc_section);
switch_to_section (text_section);
if (TARGET_ELF || TARGET_MACHO)
{
- if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
+ if (DEFAULT_ABI == ABI_V4 && flag_pic)
return false;
if (TARGET_TOC)
return false;
1, const0_rtx, Pmode);
r3 = gen_rtx_REG (Pmode, 3);
- if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
- insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
- else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
- insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
+ if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
+ {
+ if (TARGET_64BIT)
+ insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
+ else
+ insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
+ }
else if (DEFAULT_ABI == ABI_V4)
insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
else
1, const0_rtx, Pmode);
r3 = gen_rtx_REG (Pmode, 3);
- if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
- insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
- else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
- insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
+ if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
+ {
+ if (TARGET_64BIT)
+ insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
+ else
+ insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
+ }
else if (DEFAULT_ABI == ABI_V4)
insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
else
/* The TOC register is not killed across calls in a way that is
visible to the compiler. */
- if (DEFAULT_ABI == ABI_AIX)
+ if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
call_really_used_regs[2] = 0;
if (DEFAULT_ABI == ABI_V4
}
\f
/* Nonzero if we can use a floating-point register to pass this arg. */
-#define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
+#define USE_FP_FOR_ARG_P(CUM,MODE) \
(SCALAR_FLOAT_MODE_P (MODE) \
&& (CUM)->fregno <= FP_ARG_MAX_REG \
&& TARGET_HARD_FLOAT && TARGET_FPRS)
/* Nonzero if we can use an AltiVec register to pass this arg. */
-#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
+#define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,NAMED) \
(ALTIVEC_OR_VSX_VECTOR_MODE (MODE) \
&& (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
&& TARGET_ALTIVEC_ABI \
&& (NAMED))
+/* Walk down the type tree of TYPE counting consecutive base elements.
+ If *MODEP is VOIDmode, then set it to the first valid floating point
+ or vector type. If a non-floating point or vector type is found, or
+ if a floating point or vector type that doesn't match a non-VOIDmode
+ *MODEP is found, then return -1, otherwise return the count in the
+ sub-tree. */
+
+static int
+rs6000_aggregate_candidate (const_tree type, enum machine_mode *modep)
+{
+ enum machine_mode mode;
+ HOST_WIDE_INT size;
+
+ switch (TREE_CODE (type))
+ {
+ case REAL_TYPE:
+ mode = TYPE_MODE (type);
+ if (!SCALAR_FLOAT_MODE_P (mode))
+ return -1;
+
+ if (*modep == VOIDmode)
+ *modep = mode;
+
+ if (*modep == mode)
+ return 1;
+
+ break;
+
+ case COMPLEX_TYPE:
+ mode = TYPE_MODE (TREE_TYPE (type));
+ if (!SCALAR_FLOAT_MODE_P (mode))
+ return -1;
+
+ if (*modep == VOIDmode)
+ *modep = mode;
+
+ if (*modep == mode)
+ return 2;
+
+ break;
+
+ case VECTOR_TYPE:
+ if (!TARGET_ALTIVEC_ABI || !TARGET_ALTIVEC)
+ return -1;
+
+ /* Use V4SImode as representative of all 128-bit vector types. */
+ size = int_size_in_bytes (type);
+ switch (size)
+ {
+ case 16:
+ mode = V4SImode;
+ break;
+ default:
+ return -1;
+ }
+
+ if (*modep == VOIDmode)
+ *modep = mode;
+
+ /* Vector modes are considered to be opaque: two vectors are
+ equivalent for the purposes of being homogeneous aggregates
+ if they are the same size. */
+ if (*modep == mode)
+ return 1;
+
+ break;
+
+ case ARRAY_TYPE:
+ {
+ int count;
+ tree index = TYPE_DOMAIN (type);
+
+ /* Can't handle incomplete types. */
+ if (!COMPLETE_TYPE_P (type))
+ return -1;
+
+ count = rs6000_aggregate_candidate (TREE_TYPE (type), modep);
+ if (count == -1
+ || !index
+ || !TYPE_MAX_VALUE (index)
+ || !host_integerp (TYPE_MAX_VALUE (index), 1)
+ || !TYPE_MIN_VALUE (index)
+ || !host_integerp (TYPE_MIN_VALUE (index), 1)
+ || count < 0)
+ return -1;
+
+ count *= (1 + tree_low_cst (TYPE_MAX_VALUE (index), 1)
+ - tree_low_cst (TYPE_MIN_VALUE (index), 1));
+
+ /* There must be no padding. */
+ if (!host_integerp (TYPE_SIZE (type), 1)
+ || (tree_low_cst (TYPE_SIZE (type), 1)
+ != count * GET_MODE_BITSIZE (*modep)))
+ return -1;
+
+ return count;
+ }
+
+ case RECORD_TYPE:
+ {
+ int count = 0;
+ int sub_count;
+ tree field;
+
+ /* Can't handle incomplete types. */
+ if (!COMPLETE_TYPE_P (type))
+ return -1;
+
+ for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ {
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
+
+ sub_count = rs6000_aggregate_candidate (TREE_TYPE (field), modep);
+ if (sub_count < 0)
+ return -1;
+ count += sub_count;
+ }
+
+ /* There must be no padding. */
+ if (!host_integerp (TYPE_SIZE (type), 1)
+ || (tree_low_cst (TYPE_SIZE (type), 1)
+ != count * GET_MODE_BITSIZE (*modep)))
+ return -1;
+
+ return count;
+ }
+
+ case UNION_TYPE:
+ case QUAL_UNION_TYPE:
+ {
+ /* These aren't very interesting except in a degenerate case. */
+ int count = 0;
+ int sub_count;
+ tree field;
+
+ /* Can't handle incomplete types. */
+ if (!COMPLETE_TYPE_P (type))
+ return -1;
+
+ for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ {
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
+
+ sub_count = rs6000_aggregate_candidate (TREE_TYPE (field), modep);
+ if (sub_count < 0)
+ return -1;
+ count = count > sub_count ? count : sub_count;
+ }
+
+ /* There must be no padding. */
+ if (!host_integerp (TYPE_SIZE (type), 1)
+ || (tree_low_cst (TYPE_SIZE (type), 1)
+ != count * GET_MODE_BITSIZE (*modep)))
+ return -1;
+
+ return count;
+ }
+
+ default:
+ break;
+ }
+
+ return -1;
+}
+
+/* If an argument, whose type is described by TYPE and MODE, is a homogeneous
+ float or vector aggregate that shall be passed in FP/vector registers
+ according to the ELFv2 ABI, return the homogeneous element mode in
+ *ELT_MODE and the number of elements in *N_ELTS, and return TRUE.
+
+ Otherwise, set *ELT_MODE to MODE and *N_ELTS to 1, and return FALSE. */
+
+static bool
+rs6000_discover_homogeneous_aggregate (enum machine_mode mode, const_tree type,
+ enum machine_mode *elt_mode,
+ int *n_elts)
+{
+ /* Note that we do not accept complex types at the top level as
+ homogeneous aggregates; these types are handled via the
+ targetm.calls.split_complex_arg mechanism. Complex types
+ can be elements of homogeneous aggregates, however. */
+ if (DEFAULT_ABI == ABI_ELFv2 && type && AGGREGATE_TYPE_P (type))
+ {
+ enum machine_mode field_mode = VOIDmode;
+ int field_count = rs6000_aggregate_candidate (type, &field_mode);
+
+ if (field_count > 0)
+ {
+ int n_regs = (SCALAR_FLOAT_MODE_P (field_mode)?
+ (GET_MODE_SIZE (field_mode) + 7) >> 3 : 1);
+
+ /* The ELFv2 ABI allows homogeneous aggregates to occupy
+ up to AGGR_ARG_NUM_REG registers. */
+ if (field_count * n_regs <= AGGR_ARG_NUM_REG)
+ {
+ if (elt_mode)
+ *elt_mode = field_mode;
+ if (n_elts)
+ *n_elts = field_count;
+ return true;
+ }
+ }
+ }
+
+ if (elt_mode)
+ *elt_mode = mode;
+ if (n_elts)
+ *n_elts = 1;
+ return false;
+}
+
/* Return a nonzero value to say to return the function value in
memory, just as large structures are always returned. TYPE will be
the data type of the value, and FNTYPE will be the type of the
/* Otherwise fall through to more conventional ABI rules. */
}
+ /* The ELFv2 ABI returns homogeneous VFP aggregates in registers */
+ if (rs6000_discover_homogeneous_aggregate (TYPE_MODE (type), type,
+ NULL, NULL))
+ return false;
+
+ /* The ELFv2 ABI returns aggregates up to 16B in registers */
+ if (DEFAULT_ABI == ABI_ELFv2 && AGGREGATE_TYPE_P (type)
+ && (unsigned HOST_WIDE_INT) int_size_in_bytes (type) <= 16)
+ return false;
+
if (AGGREGATE_TYPE_P (type)
&& (aix_struct_return
|| (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
return false;
}
+/* Specify whether values returned in registers should be at the most
+ significant end of a register. We want aggregates returned by
+ value to match the way aggregates are passed to functions. */
+
+static bool
+rs6000_return_in_msb (const_tree valtype)
+{
+ return (DEFAULT_ABI == ABI_ELFv2
+ && BYTES_BIG_ENDIAN
+ && AGGREGATE_TYPE_P (valtype)
+ && FUNCTION_ARG_PADDING (TYPE_MODE (valtype), valtype) == upward);
+}
+
#ifdef HAVE_AS_GNU_ATTRIBUTE
/* Return TRUE if a call to function FNDECL may be one that
potentially affects the function calling ABI of the object file. */
static bool
rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
{
- if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
+ if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2 || TARGET_64BIT)
return must_pass_in_stack_var_size (mode, type);
else
return must_pass_in_stack_var_size_or_pad (mode, type);
static unsigned int
rs6000_function_arg_boundary (enum machine_mode mode, const_tree type)
{
+ enum machine_mode elt_mode;
+ int n_elts;
+
+ rs6000_discover_homogeneous_aggregate (mode, type, &elt_mode, &n_elts);
+
if (DEFAULT_ABI == ABI_V4
&& (GET_MODE_SIZE (mode) == 8
|| (TARGET_HARD_FLOAT
&& int_size_in_bytes (type) >= 8
&& int_size_in_bytes (type) < 16))
return 64;
- else if (ALTIVEC_OR_VSX_VECTOR_MODE (mode)
+ else if (ALTIVEC_OR_VSX_VECTOR_MODE (elt_mode)
|| (type && TREE_CODE (type) == VECTOR_TYPE
&& int_size_in_bytes (type) >= 16))
return 128;
else if (((TARGET_MACHO && rs6000_darwin64_abi)
+ || DEFAULT_ABI == ABI_ELFv2
|| (DEFAULT_ABI == ABI_AIX && !rs6000_compat_align_parm))
&& mode == BLKmode
&& type && TYPE_ALIGN (type) > 64)
return PARM_BOUNDARY;
}
+/* The offset in words to the start of the parameter save area. */
+
+static unsigned int
+rs6000_parm_offset (void)
+{
+ return (DEFAULT_ABI == ABI_V4 ? 2
+ : DEFAULT_ABI == ABI_ELFv2 ? 4
+ : 6);
+}
+
/* For a function parm of MODE and TYPE, return the starting word in
the parameter area. NWORDS of the parameter area are already used. */
unsigned int nwords)
{
unsigned int align;
- unsigned int parm_offset;
align = rs6000_function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
- parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
- return nwords + (-(parm_offset + nwords) & align);
+ return nwords + (-(rs6000_parm_offset () + nwords) & align);
}
/* Compute the size (in words) of a function argument. */
if (TREE_CODE (ftype) == RECORD_TYPE)
rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
- else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
+ else if (USE_FP_FOR_ARG_P (cum, mode))
{
unsigned n_fpregs = (GET_MODE_SIZE (mode) + 7) >> 3;
rs6000_darwin64_record_arg_advance_flush (cum, bitpos, 0);
else
cum->words += n_fpregs;
}
- else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
+ else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, 1))
{
rs6000_darwin64_record_arg_advance_flush (cum, bitpos, 0);
cum->vregno++;
rs6000_function_arg_advance_1 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
const_tree type, bool named, int depth)
{
+ enum machine_mode elt_mode;
+ int n_elts;
+
+ rs6000_discover_homogeneous_aggregate (mode, type, &elt_mode, &n_elts);
+
/* Only tick off an argument if we're not recursing. */
if (depth == 0)
cum->nargs_prototype--;
#endif
if (TARGET_ALTIVEC_ABI
- && (ALTIVEC_OR_VSX_VECTOR_MODE (mode)
+ && (ALTIVEC_OR_VSX_VECTOR_MODE (elt_mode)
|| (type && TREE_CODE (type) == VECTOR_TYPE
&& int_size_in_bytes (type) == 16)))
{
bool stack = false;
- if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
+ if (USE_ALTIVEC_FOR_ARG_P (cum, elt_mode, named))
{
- cum->vregno++;
+ cum->vregno += n_elts;
+
if (!TARGET_ALTIVEC)
error ("cannot pass argument in vector register because"
" altivec instructions are disabled, use -maltivec"
/* PowerPC64 Linux and AIX allocate GPRs for a vector argument
even if it is going to be passed in a vector register.
Darwin does the same for variable-argument functions. */
- if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
+ if (((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
+ && TARGET_64BIT)
|| (cum->stdarg && DEFAULT_ABI != ABI_V4))
stack = true;
}
{
int align;
- /* Vector parameters must be 16-byte aligned. This places
- them at 2 mod 4 in terms of words in 32-bit mode, since
- the parameter save area starts at offset 24 from the
- stack. In 64-bit mode, they just have to start on an
- even word, since the parameter save area is 16-byte
- aligned. Space for GPRs is reserved even if the argument
- will be passed in memory. */
+ /* Vector parameters must be 16-byte aligned. In 32-bit
+ mode this means we need to take into account the offset
+ to the parameter save area. In 64-bit mode, they just
+ have to start on an even word, since the parameter save
+ area is 16-byte aligned. */
if (TARGET_32BIT)
- align = (2 - cum->words) & 3;
+ align = -(rs6000_parm_offset () + cum->words) & 3;
else
align = cum->words & 1;
cum->words += align + rs6000_arg_size (mode, type);
cum->words = align_words + n_words;
- if (SCALAR_FLOAT_MODE_P (mode)
+ if (SCALAR_FLOAT_MODE_P (elt_mode)
&& TARGET_HARD_FLOAT && TARGET_FPRS)
{
/* _Decimal128 must be passed in an even/odd float register pair.
This assumes that the register number is odd when fregno is
odd. */
- if (mode == TDmode && (cum->fregno % 2) == 1)
+ if (elt_mode == TDmode && (cum->fregno % 2) == 1)
cum->fregno++;
- cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
+ cum->fregno += n_elts * ((GET_MODE_SIZE (elt_mode) + 7) >> 3);
}
if (TARGET_DEBUG_ARG)
if (TREE_CODE (ftype) == RECORD_TYPE)
rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
- else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
+ else if (cum->named && USE_FP_FOR_ARG_P (cum, mode))
{
unsigned n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
#if 0
if (mode == TFmode || mode == TDmode)
cum->fregno++;
}
- else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
+ else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, 1))
{
rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
rvec[(*k)++]
return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
}
+/* We have an argument of MODE and TYPE that goes into FPRs or VRs,
+ but must also be copied into the parameter save area starting at
+ offset ALIGN_WORDS. Fill in RVEC with the elements corresponding
+ to the GPRs and/or memory. Return the number of elements used. */
+
+static int
+rs6000_psave_function_arg (enum machine_mode mode, const_tree type,
+ int align_words, rtx *rvec)
+{
+ int k = 0;
+
+ if (align_words < GP_ARG_NUM_REG)
+ {
+ int n_words = rs6000_arg_size (mode, type);
+
+ if (align_words + n_words > GP_ARG_NUM_REG
+ || mode == BLKmode
+ || (TARGET_32BIT && TARGET_POWERPC64))
+ {
+ /* If this is partially on the stack, then we only
+ include the portion actually in registers here. */
+ enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
+ int i = 0;
+
+ if (align_words + n_words > GP_ARG_NUM_REG)
+ {
+ /* Not all of the arg fits in gprs. Say that it goes in memory
+ too, using a magic NULL_RTX component. Also see comment in
+ rs6000_mixed_function_arg for why the normal
+ function_arg_partial_nregs scheme doesn't work in this case. */
+ rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
+ }
+
+ do
+ {
+ rtx r = gen_rtx_REG (rmode, GP_ARG_MIN_REG + align_words);
+ rtx off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
+ rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
+ }
+ while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
+ }
+ else
+ {
+ /* The whole arg fits in gprs. */
+ rtx r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
+ rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
+ }
+ }
+ else
+ {
+ /* It's entirely in memory. */
+ rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
+ }
+
+ return k;
+}
+
+/* RVEC is a vector of K components of an argument of mode MODE.
+ Construct the final function_arg return value from it. */
+
+static rtx
+rs6000_finish_function_arg (enum machine_mode mode, rtx *rvec, int k)
+{
+ gcc_assert (k >= 1);
+
+ /* Avoid returning a PARALLEL in the trivial cases. */
+ if (k == 1)
+ {
+ if (XEXP (rvec[0], 0) == NULL_RTX)
+ return NULL_RTX;
+
+ if (GET_MODE (XEXP (rvec[0], 0)) == mode)
+ return XEXP (rvec[0], 0);
+ }
+
+ return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
+}
+
/* Determine where to put an argument to a function.
Value is zero to push the argument on the stack,
or a hard register in which to store the argument.
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
enum rs6000_abi abi = DEFAULT_ABI;
+ enum machine_mode elt_mode;
+ int n_elts;
/* Return a marker to indicate whether CR1 needs to set or clear the
bit that V.4 uses to say fp args were passed in registers.
return GEN_INT (cum->call_cookie & ~CALL_LIBCALL);
}
+ rs6000_discover_homogeneous_aggregate (mode, type, &elt_mode, &n_elts);
+
if (TARGET_MACHO && rs6000_darwin64_struct_check_p (mode, type))
{
rtx rslt = rs6000_darwin64_record_arg (cum, type, named, /*retval= */false);
/* Else fall through to usual handling. */
}
- if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
- if (TARGET_64BIT && ! cum->prototype)
- {
- /* Vector parameters get passed in vector register
- and also in GPRs or memory, in absence of prototype. */
- int align_words;
- rtx slot;
- align_words = (cum->words + 1) & ~1;
+ if (USE_ALTIVEC_FOR_ARG_P (cum, elt_mode, named))
+ {
+ rtx rvec[GP_ARG_NUM_REG + AGGR_ARG_NUM_REG + 1];
+ rtx r, off;
+ int i, k = 0;
- if (align_words >= GP_ARG_NUM_REG)
- {
- slot = NULL_RTX;
- }
- else
- {
- slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
- }
- return gen_rtx_PARALLEL (mode,
- gen_rtvec (2,
- gen_rtx_EXPR_LIST (VOIDmode,
- slot, const0_rtx),
- gen_rtx_EXPR_LIST (VOIDmode,
- gen_rtx_REG (mode, cum->vregno),
- const0_rtx)));
- }
- else
- return gen_rtx_REG (mode, cum->vregno);
+ /* Do we also need to pass this argument in the parameter
+ save area? */
+ if (TARGET_64BIT && ! cum->prototype)
+ {
+ int align_words = (cum->words + 1) & ~1;
+ k = rs6000_psave_function_arg (mode, type, align_words, rvec);
+ }
+
+ /* Describe where this argument goes in the vector registers. */
+ for (i = 0; i < n_elts && cum->vregno + i <= ALTIVEC_ARG_MAX_REG; i++)
+ {
+ r = gen_rtx_REG (elt_mode, cum->vregno + i);
+ off = GEN_INT (i * GET_MODE_SIZE (elt_mode));
+ rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
+ }
+
+ return rs6000_finish_function_arg (mode, rvec, k);
+ }
else if (TARGET_ALTIVEC_ABI
&& (ALTIVEC_OR_VSX_VECTOR_MODE (mode)
|| (type && TREE_CODE (type) == VECTOR_TYPE
int align, align_words, n_words;
enum machine_mode part_mode;
- /* Vector parameters must be 16-byte aligned. This places them at
- 2 mod 4 in terms of words in 32-bit mode, since the parameter
- save area starts at offset 24 from the stack. In 64-bit mode,
- they just have to start on an even word, since the parameter
- save area is 16-byte aligned. */
+ /* Vector parameters must be 16-byte aligned. In 32-bit
+ mode this means we need to take into account the offset
+ to the parameter save area. In 64-bit mode, they just
+ have to start on an even word, since the parameter save
+ area is 16-byte aligned. */
if (TARGET_32BIT)
- align = (2 - cum->words) & 3;
+ align = -(rs6000_parm_offset () + cum->words) & 3;
else
align = cum->words & 1;
align_words = cum->words + align;
/* _Decimal128 must be passed in an even/odd float register pair.
This assumes that the register number is odd when fregno is odd. */
- if (mode == TDmode && (cum->fregno % 2) == 1)
+ if (elt_mode == TDmode && (cum->fregno % 2) == 1)
cum->fregno++;
- if (USE_FP_FOR_ARG_P (cum, mode, type))
+ if (USE_FP_FOR_ARG_P (cum, elt_mode))
{
- rtx rvec[GP_ARG_NUM_REG + 1];
- rtx r;
- int k;
- bool needs_psave;
- enum machine_mode fmode = mode;
- unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
+ rtx rvec[GP_ARG_NUM_REG + AGGR_ARG_NUM_REG + 1];
+ rtx r, off;
+ int i, k = 0;
+ unsigned long n_fpreg = (GET_MODE_SIZE (elt_mode) + 7) >> 3;
- if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
- {
- /* Currently, we only ever need one reg here because complex
- doubles are split. */
- gcc_assert (cum->fregno == FP_ARG_MAX_REG
- && (fmode == TFmode || fmode == TDmode));
-
- /* Long double or _Decimal128 split over regs and memory. */
- fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
- }
-
- /* Do we also need to pass this arg in the parameter save
- area? */
- needs_psave = (type
- && (cum->nargs_prototype <= 0
- || (DEFAULT_ABI == ABI_AIX
- && TARGET_XL_COMPAT
- && align_words >= GP_ARG_NUM_REG)));
+ /* Do we also need to pass this argument in the parameter
+ save area? */
+ if (type && (cum->nargs_prototype <= 0
+ || ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
+ && TARGET_XL_COMPAT
+ && align_words >= GP_ARG_NUM_REG)))
+ k = rs6000_psave_function_arg (mode, type, align_words, rvec);
- if (!needs_psave && mode == fmode)
- return gen_rtx_REG (fmode, cum->fregno);
-
- k = 0;
- if (needs_psave)
+ /* Describe where this argument goes in the fprs. */
+ for (i = 0; i < n_elts
+ && cum->fregno + i * n_fpreg <= FP_ARG_MAX_REG; i++)
{
- /* Describe the part that goes in gprs or the stack.
- This piece must come first, before the fprs. */
- if (align_words < GP_ARG_NUM_REG)
+ /* Check if the argument is split over registers and memory.
+ This can only ever happen for long double or _Decimal128;
+ complex types are handled via split_complex_arg. */
+ enum machine_mode fmode = elt_mode;
+ if (cum->fregno + (i + 1) * n_fpreg > FP_ARG_MAX_REG + 1)
{
- unsigned long n_words = rs6000_arg_size (mode, type);
-
- if (align_words + n_words > GP_ARG_NUM_REG
- || (TARGET_32BIT && TARGET_POWERPC64))
- {
- /* If this is partially on the stack, then we only
- include the portion actually in registers here. */
- enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
- rtx off;
- int i = 0;
- if (align_words + n_words > GP_ARG_NUM_REG)
- /* Not all of the arg fits in gprs. Say that it
- goes in memory too, using a magic NULL_RTX
- component. Also see comment in
- rs6000_mixed_function_arg for why the normal
- function_arg_partial_nregs scheme doesn't work
- in this case. */
- rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
- const0_rtx);
- do
- {
- r = gen_rtx_REG (rmode,
- GP_ARG_MIN_REG + align_words);
- off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
- rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
- }
- while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
- }
- else
- {
- /* The whole arg fits in gprs. */
- r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
- rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
- }
+ gcc_assert (fmode == TFmode || fmode == TDmode);
+ fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
}
- else
- /* It's entirely in memory. */
- rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
- }
- /* Describe where this piece goes in the fprs. */
- r = gen_rtx_REG (fmode, cum->fregno);
- rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
+ r = gen_rtx_REG (fmode, cum->fregno + i * n_fpreg);
+ off = GEN_INT (i * GET_MODE_SIZE (elt_mode));
+ rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
+ }
- return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
+ return rs6000_finish_function_arg (mode, rvec, k);
}
else if (align_words < GP_ARG_NUM_REG)
{
if (TARGET_32BIT && TARGET_POWERPC64)
return rs6000_mixed_function_arg (mode, type, align_words);
- if (mode == BLKmode)
- mode = Pmode;
-
return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
}
else
tree type, bool named)
{
CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+ bool passed_in_gprs = true;
int ret = 0;
int align_words;
+ enum machine_mode elt_mode;
+ int n_elts;
+
+ rs6000_discover_homogeneous_aggregate (mode, type, &elt_mode, &n_elts);
if (DEFAULT_ABI == ABI_V4)
return 0;
- if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
- && cum->nargs_prototype >= 0)
- return 0;
+ if (USE_ALTIVEC_FOR_ARG_P (cum, elt_mode, named))
+ {
+ /* If we are passing this arg in the fixed parameter save area
+ (gprs or memory) as well as VRs, we do not use the partial
+ bytes mechanism; instead, rs6000_function_arg will return a
+ PARALLEL including a memory element as necessary. */
+ if (TARGET_64BIT && ! cum->prototype)
+ return 0;
+
+ /* Otherwise, we pass in VRs only. Check for partial copies. */
+ passed_in_gprs = false;
+ if (cum->vregno + n_elts > ALTIVEC_ARG_MAX_REG + 1)
+ ret = (ALTIVEC_ARG_MAX_REG + 1 - cum->vregno) * 16;
+ }
/* In this complicated case we just disable the partial_nregs code. */
if (TARGET_MACHO && rs6000_darwin64_struct_check_p (mode, type))
align_words = rs6000_parm_start (mode, type, cum->words);
- if (USE_FP_FOR_ARG_P (cum, mode, type))
+ if (USE_FP_FOR_ARG_P (cum, elt_mode))
{
+ unsigned long n_fpreg = (GET_MODE_SIZE (elt_mode) + 7) >> 3;
+
/* If we are passing this arg in the fixed parameter save area
- (gprs or memory) as well as fprs, then this function should
- return the number of partial bytes passed in the parameter
- save area rather than partial bytes passed in fprs. */
+ (gprs or memory) as well as FPRs, we do not use the partial
+ bytes mechanism; instead, rs6000_function_arg will return a
+ PARALLEL including a memory element as necessary. */
if (type
&& (cum->nargs_prototype <= 0
- || (DEFAULT_ABI == ABI_AIX
+ || ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
&& TARGET_XL_COMPAT
&& align_words >= GP_ARG_NUM_REG)))
return 0;
- else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
- > FP_ARG_MAX_REG + 1)
- ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
- else if (cum->nargs_prototype >= 0)
- return 0;
+
+ /* Otherwise, we pass in FPRs only. Check for partial copies. */
+ passed_in_gprs = false;
+ if (cum->fregno + n_elts * n_fpreg > FP_ARG_MAX_REG + 1)
+ ret = ((FP_ARG_MAX_REG + 1 - cum->fregno)
+ * MIN (8, GET_MODE_SIZE (elt_mode)));
}
- if (align_words < GP_ARG_NUM_REG
+ if (passed_in_gprs
+ && align_words < GP_ARG_NUM_REG
&& GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
return 0;
}
+/* Process parameter of type TYPE after ARGS_SO_FAR parameters were
+ already processes. Return true if the parameter must be passed
+ (fully or partially) on the stack. */
+
+static bool
+rs6000_parm_needs_stack (cumulative_args_t args_so_far, tree type)
+{
+ enum machine_mode mode;
+ int unsignedp;
+ rtx entry_parm;
+
+ /* Catch errors. */
+ if (type == NULL || type == error_mark_node)
+ return true;
+
+ /* Handle types with no storage requirement. */
+ if (TYPE_MODE (type) == VOIDmode)
+ return false;
+
+ /* Handle complex types. */
+ if (TREE_CODE (type) == COMPLEX_TYPE)
+ return (rs6000_parm_needs_stack (args_so_far, TREE_TYPE (type))
+ || rs6000_parm_needs_stack (args_so_far, TREE_TYPE (type)));
+
+ /* Handle transparent aggregates. */
+ if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
+ && TYPE_TRANSPARENT_AGGR (type))
+ type = TREE_TYPE (first_field (type));
+
+ /* See if this arg was passed by invisible reference. */
+ if (pass_by_reference (get_cumulative_args (args_so_far),
+ TYPE_MODE (type), type, true))
+ type = build_pointer_type (type);
+
+ /* Find mode as it is passed by the ABI. */
+ unsignedp = TYPE_UNSIGNED (type);
+ mode = promote_mode (type, TYPE_MODE (type), &unsignedp);
+
+ /* If we must pass in stack, we need a stack. */
+ if (rs6000_must_pass_in_stack (mode, type))
+ return true;
+
+ /* If there is no incoming register, we need a stack. */
+ entry_parm = rs6000_function_arg (args_so_far, mode, type, true);
+ if (entry_parm == NULL)
+ return true;
+
+ /* Likewise if we need to pass both in registers and on the stack. */
+ if (GET_CODE (entry_parm) == PARALLEL
+ && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
+ return true;
+
+ /* Also true if we're partially in registers and partially not. */
+ if (rs6000_arg_partial_bytes (args_so_far, mode, type, true) != 0)
+ return true;
+
+ /* Update info on where next arg arrives in registers. */
+ rs6000_function_arg_advance (args_so_far, mode, type, true);
+ return false;
+}
+
+/* Return true if FUN has no prototype, has a variable argument
+ list, or passes any parameter in memory. */
+
+static bool
+rs6000_function_parms_need_stack (tree fun)
+{
+ function_args_iterator args_iter;
+ tree arg_type;
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
+
+ if (!fun)
+ /* Must be a libcall, all of which only use reg parms. */
+ return false;
+ if (!TYPE_P (fun))
+ fun = TREE_TYPE (fun);
+
+ /* Varargs functions need the parameter save area. */
+ if (!prototype_p (fun) || stdarg_p (fun))
+ return true;
+
+ INIT_CUMULATIVE_INCOMING_ARGS (args_so_far_v, fun, NULL_RTX);
+ args_so_far = pack_cumulative_args (&args_so_far_v);
+
+ if (aggregate_value_p (TREE_TYPE (fun), fun))
+ {
+ tree type = build_pointer_type (TREE_TYPE (fun));
+ rs6000_parm_needs_stack (args_so_far, type);
+ }
+
+ FOREACH_FUNCTION_ARGS (fun, arg_type, args_iter)
+ if (rs6000_parm_needs_stack (args_so_far, arg_type))
+ return true;
+
+ return false;
+}
+
+/* Return the size of the REG_PARM_STACK_SPACE are for FUN. This is
+ usually a constant depending on the ABI. However, in the ELFv2 ABI
+ the register parameter area is optional when calling a function that
+ has a prototype is scope, has no variable argument list, and passes
+ all parameters in registers. */
+
+int
+rs6000_reg_parm_stack_space (tree fun)
+{
+ int reg_parm_stack_space;
+
+ switch (DEFAULT_ABI)
+ {
+ default:
+ reg_parm_stack_space = 0;
+ break;
+
+ case ABI_AIX:
+ case ABI_DARWIN:
+ reg_parm_stack_space = TARGET_64BIT ? 64 : 32;
+ break;
+
+ case ABI_ELFv2:
+ /* ??? Recomputing this every time is a bit expensive. Is there
+ a place to cache this information? */
+ if (rs6000_function_parms_need_stack (fun))
+ reg_parm_stack_space = TARGET_64BIT ? 64 : 32;
+ else
+ reg_parm_stack_space = 0;
+ break;
+ }
+
+ return reg_parm_stack_space;
+}
+
static void
rs6000_move_block_from_reg (int regno, rtx x, int nregs)
{
if (((TARGET_MACHO
&& rs6000_darwin64_abi)
+ || DEFAULT_ABI == ABI_ELFv2
|| (DEFAULT_ABI == ABI_AIX && !rs6000_compat_align_parm))
&& integer_zerop (TYPE_SIZE (type)))
{
ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
break;
+ case ABI_ELFv2:
case ABI_V4:
case ABI_DARWIN:
break;
}
else
{
- gcc_checking_assert (DEFAULT_ABI == ABI_AIX);
+ gcc_checking_assert (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2);
if (info->first_fp_reg_save > 61)
strategy |= SAVE_INLINE_FPRS | REST_INLINE_FPRS;
strategy |= SAVE_INLINE_GPRS | REST_INLINE_GPRS;
by the static chain. It would require too much fiddling and the
static chain is rarely used anyway. FPRs are saved w.r.t the stack
pointer on Darwin, and AIX uses r1 or r12. */
- if (using_static_chain_p && DEFAULT_ABI != ABI_AIX)
+ if (using_static_chain_p
+ && (DEFAULT_ABI == ABI_V4 || DEFAULT_ABI == ABI_DARWIN))
strategy |= ((DEFAULT_ABI == ABI_DARWIN ? 0 : SAVE_INLINE_FPRS)
| SAVE_INLINE_GPRS
| SAVE_INLINE_VRS | REST_INLINE_VRS);
The required alignment for AIX configurations is two words (i.e., 8
or 16 bytes).
+ The ELFv2 ABI is a variant of the AIX ABI. Stack frames look like:
+
+ SP----> +---------------------------------------+
+ | Back chain to caller | 0
+ +---------------------------------------+
+ | Save area for CR | 8
+ +---------------------------------------+
+ | Saved LR | 16
+ +---------------------------------------+
+ | Saved TOC pointer | 24
+ +---------------------------------------+
+ | Parameter save area (P) | 32
+ +---------------------------------------+
+ | Alloca space (A) | 32+P
+ +---------------------------------------+
+ | Local variable space (L) | 32+P+A
+ +---------------------------------------+
+ | Save area for AltiVec registers (W) | 32+P+A+L
+ +---------------------------------------+
+ | AltiVec alignment padding (Y) | 32+P+A+L+W
+ +---------------------------------------+
+ | Save area for GP registers (G) | 32+P+A+L+W+Y
+ +---------------------------------------+
+ | Save area for FP registers (F) | 32+P+A+L+W+Y+G
+ +---------------------------------------+
+ old SP->| back chain to caller's caller | 32+P+A+L+W+Y+G+F
+ +---------------------------------------+
+
V.4 stack frames look like:
rs6000_stack_t *info_ptr = &stack_info;
int reg_size = TARGET_32BIT ? 4 : 8;
int ehrd_size;
+ int ehcr_size;
int save_align;
int first_gp;
HOST_WIDE_INT non_fixed_size;
else
ehrd_size = 0;
+ /* In the ELFv2 ABI, we also need to allocate space for separate
+ CR field save areas if the function calls __builtin_eh_return. */
+ if (DEFAULT_ABI == ABI_ELFv2 && crtl->calls_eh_return)
+ {
+ /* This hard-codes that we have three call-saved CR fields. */
+ ehcr_size = 3 * reg_size;
+ /* We do *not* use the regular CR save mechanism. */
+ info_ptr->cr_save_p = 0;
+ }
+ else
+ ehcr_size = 0;
+
/* Determine various sizes. */
info_ptr->reg_size = reg_size;
info_ptr->fixed_size = RS6000_SAVE_AREA;
gcc_unreachable ();
case ABI_AIX:
+ case ABI_ELFv2:
case ABI_DARWIN:
info_ptr->fp_save_offset = - info_ptr->fp_size;
info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
}
else
info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
+
+ info_ptr->ehcr_offset = info_ptr->ehrd_offset - ehcr_size;
info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
info_ptr->lr_save_offset = 2*reg_size;
break;
+ info_ptr->spe_gp_size
+ info_ptr->spe_padding_size
+ ehrd_size
+ + ehcr_size
+ info_ptr->cr_size
+ info_ptr->vrsave_size,
save_align);
/* Determine if we need to save the link register. */
if (info_ptr->calls_p
- || (DEFAULT_ABI == ABI_AIX
+ || ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
&& crtl->profile
&& !TARGET_PROFILE_KERNEL)
|| (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
default: abi_string = "Unknown"; break;
case ABI_NONE: abi_string = "NONE"; break;
case ABI_AIX: abi_string = "AIX"; break;
+ case ABI_ELFv2: abi_string = "ELFv2"; break;
case ABI_DARWIN: abi_string = "Darwin"; break;
case ABI_V4: abi_string = "V.4"; break;
}
/* Currently we don't optimize very well between prolog and body
code and for PIC code the code can be actually quite bad, so
don't try to be too clever here. */
- if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
+ if (count != 0
+ || ((DEFAULT_ABI == ABI_V4 || DEFAULT_ABI == ABI_DARWIN) && flag_pic))
{
cfun->machine->ra_needs_full_frame = 1;
return false;
}
- /* Under the AIX ABI we can't allow calls to non-local functions,
- because the callee may have a different TOC pointer to the
- caller and there's no way to ensure we restore the TOC when we
- return. With the secure-plt SYSV ABI we can't make non-local
+ /* Under the AIX or ELFv2 ABIs we can't allow calls to non-local
+ functions, because the callee may have a different TOC pointer to
+ the caller and there's no way to ensure we restore the TOC when
+ we return. With the secure-plt SYSV ABI we can't make non-local
calls when -fpic/PIC because the plt call stubs use r30. */
if (DEFAULT_ABI == ABI_DARWIN
- || (DEFAULT_ABI == ABI_AIX
+ || ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
&& decl
&& !DECL_EXTERNAL (decl)
&& (*targetm.binds_local_p) (decl))
rtx dest;
dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
- if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
+ if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI == ABI_V4 && flag_pic)
{
char buf[30];
rtx lab, tmp1, tmp2, got;
emit_insn (gen_load_toc_v4_pic_si ());
emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
}
- else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
+ else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 2)
{
char buf[30];
rtx temp0 = (fromprolog
}
else
{
- gcc_assert (DEFAULT_ABI == ABI_AIX);
+ gcc_assert (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2);
if (TARGET_32BIT)
emit_insn (gen_load_toc_aix_si (dest));
if ((sel & SAVRES_LR))
suffix = "_x";
}
- else if (DEFAULT_ABI == ABI_AIX)
+ else if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
{
#if !defined (POWERPC_LINUX) && !defined (POWERPC_FREEBSD)
/* No out-of-line save/restore routines for GPRs on AIX. */
static inline unsigned
ptr_regno_for_savres (int sel)
{
- if (DEFAULT_ABI == ABI_AIX)
+ if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
return (sel & SAVRES_REG) == SAVRES_FPR || (sel & SAVRES_LR) ? 1 : 12;
return DEFAULT_ABI == ABI_DARWIN && (sel & SAVRES_REG) == SAVRES_FPR ? 1 : 11;
}
return insn;
}
+/* Emit code to store CR fields that need to be saved into REG. */
+
+static void
+rs6000_emit_move_from_cr (rtx reg)
+{
+ /* Only the ELFv2 ABI allows storing only selected fields. */
+ if (DEFAULT_ABI == ABI_ELFv2 && TARGET_MFCRF)
+ {
+ int i, cr_reg[8], count = 0;
+
+ /* Collect CR fields that must be saved. */
+ for (i = 0; i < 8; i++)
+ if (save_reg_p (CR0_REGNO + i))
+ cr_reg[count++] = i;
+
+ /* If it's just a single one, use mfcrf. */
+ if (count == 1)
+ {
+ rtvec p = rtvec_alloc (1);
+ rtvec r = rtvec_alloc (2);
+ RTVEC_ELT (r, 0) = gen_rtx_REG (CCmode, CR0_REGNO + cr_reg[0]);
+ RTVEC_ELT (r, 1) = GEN_INT (1 << (7 - cr_reg[0]));
+ RTVEC_ELT (p, 0)
+ = gen_rtx_SET (VOIDmode, reg,
+ gen_rtx_UNSPEC (SImode, r, UNSPEC_MOVESI_FROM_CR));
+
+ emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
+ return;
+ }
+
+ /* ??? It might be better to handle count == 2 / 3 cases here
+ as well, using logical operations to combine the values. */
+ }
+
+ emit_insn (gen_movesi_from_cr (reg));
+}
+
/* Determine whether the gp REG is really used. */
static bool
#define NOT_INUSE(R) do {} while (0)
#endif
+ if (DEFAULT_ABI == ABI_ELFv2)
+ {
+ cfun->machine->r2_setup_needed = df_regs_ever_live_p (TOC_REGNUM);
+
+ /* With -mminimal-toc we may generate an extra use of r2 below. */
+ if (!TARGET_SINGLE_PIC_BASE
+ && TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
+ cfun->machine->r2_setup_needed = true;
+ }
+
+
if (flag_stack_usage_info)
current_function_static_stack_size = info->total_size;
/* If we need to save CR, put it into r12 or r11. Choose r12 except when
r12 will be needed by out-of-line gpr restore. */
- cr_save_regno = (DEFAULT_ABI == ABI_AIX
+ cr_save_regno = ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
&& !(strategy & (SAVE_INLINE_GPRS
| SAVE_NOINLINE_GPRS_SAVES_LR))
? 11 : 12);
&& REGNO (frame_reg_rtx) != cr_save_regno
&& !(using_static_chain_p && cr_save_regno == 11))
{
- rtx set;
-
cr_save_rtx = gen_rtx_REG (SImode, cr_save_regno);
START_USE (cr_save_regno);
- insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
- RTX_FRAME_RELATED_P (insn) = 1;
- /* Now, there's no way that dwarf2out_frame_debug_expr is going
- to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
- But that's OK. All we have to do is specify that _one_ condition
- code register is saved in this stack slot. The thrower's epilogue
- will then restore all the call-saved registers.
- We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
- set = gen_rtx_SET (VOIDmode, cr_save_rtx,
- gen_rtx_REG (SImode, CR2_REGNO));
- add_reg_note (insn, REG_FRAME_RELATED_EXPR, set);
+ rs6000_emit_move_from_cr (cr_save_rtx);
}
/* Do any required saving of fpr's. If only one or two to save, do
be updated if we arrived at this function via a plt call or
toc adjusting stub. */
emit_move_insn (tmp_reg_si, gen_rtx_MEM (SImode, tmp_reg));
- toc_restore_insn = TARGET_32BIT ? 0x80410014 : 0xE8410028;
+ toc_restore_insn = ((TARGET_32BIT ? 0x80410000 : 0xE8410000)
+ + RS6000_TOC_SAVE_SLOT);
hi = gen_int_mode (toc_restore_insn & ~0xffff, SImode);
emit_insn (gen_xorsi3 (tmp_reg_si, tmp_reg_si, hi));
compare_result = gen_rtx_REG (CCUNSmode, CR0_REGNO);
LABEL_NUSES (toc_save_done) += 1;
save_insn = emit_frame_save (frame_reg_rtx, reg_mode,
- TOC_REGNUM, frame_off + 5 * reg_size,
+ TOC_REGNUM, frame_off + RS6000_TOC_SAVE_SLOT,
sp_off - frame_off);
emit_label (toc_save_done);
rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
GEN_INT (info->cr_save_offset + frame_off));
rtx mem = gen_frame_mem (SImode, addr);
- /* See the large comment above about why CR2_REGNO is used. */
- rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
/* If we didn't copy cr before, do so now using r0. */
if (cr_save_rtx == NULL_RTX)
{
- rtx set;
-
START_USE (0);
cr_save_rtx = gen_rtx_REG (SImode, 0);
- insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
- RTX_FRAME_RELATED_P (insn) = 1;
- set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
+ rs6000_emit_move_from_cr (cr_save_rtx);
+ }
+
+ /* Saving CR requires a two-instruction sequence: one instruction
+ to move the CR to a general-purpose register, and a second
+ instruction that stores the GPR to memory.
+
+ We do not emit any DWARF CFI records for the first of these,
+ because we cannot properly represent the fact that CR is saved in
+ a register. One reason is that we cannot express that multiple
+ CR fields are saved; another reason is that on 64-bit, the size
+ of the CR register in DWARF (4 bytes) differs from the size of
+ a general-purpose register.
+
+ This means if any intervening instruction were to clobber one of
+ the call-saved CR fields, we'd have incorrect CFI. To prevent
+ this from happening, we mark the store to memory as a use of
+ those CR fields, which prevents any such instruction from being
+ scheduled in between the two instructions. */
+ rtx crsave_v[9];
+ int n_crsave = 0;
+ int i;
+
+ crsave_v[n_crsave++] = gen_rtx_SET (VOIDmode, mem, cr_save_rtx);
+ for (i = 0; i < 8; i++)
+ if (save_reg_p (CR0_REGNO + i))
+ crsave_v[n_crsave++]
+ = gen_rtx_USE (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO + i));
+
+ insn = emit_insn (gen_rtx_PARALLEL (VOIDmode,
+ gen_rtvec_v (n_crsave, crsave_v)));
+ END_USE (REGNO (cr_save_rtx));
+
+ /* Now, there's no way that dwarf2out_frame_debug_expr is going to
+ understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)',
+ so we need to construct a frame expression manually. */
+ RTX_FRAME_RELATED_P (insn) = 1;
+
+ /* Update address to be stack-pointer relative, like
+ rs6000_frame_related would do. */
+ addr = gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM),
+ GEN_INT (info->cr_save_offset + sp_off));
+ mem = gen_frame_mem (SImode, addr);
+
+ if (DEFAULT_ABI == ABI_ELFv2)
+ {
+ /* In the ELFv2 ABI we generate separate CFI records for each
+ CR field that was actually saved. They all point to the
+ same 32-bit stack slot. */
+ rtx crframe[8];
+ int n_crframe = 0;
+
+ for (i = 0; i < 8; i++)
+ if (save_reg_p (CR0_REGNO + i))
+ {
+ crframe[n_crframe]
+ = gen_rtx_SET (VOIDmode, mem,
+ gen_rtx_REG (SImode, CR0_REGNO + i));
+
+ RTX_FRAME_RELATED_P (crframe[n_crframe]) = 1;
+ n_crframe++;
+ }
+
+ add_reg_note (insn, REG_FRAME_RELATED_EXPR,
+ gen_rtx_PARALLEL (VOIDmode,
+ gen_rtvec_v (n_crframe, crframe)));
+ }
+ else
+ {
+ /* In other ABIs, by convention, we use a single CR regnum to
+ represent the fact that all call-saved CR fields are saved.
+ We use CR2_REGNO to be compatible with gcc-2.95 on Linux. */
+ rtx set = gen_rtx_SET (VOIDmode, mem,
+ gen_rtx_REG (SImode, CR2_REGNO));
add_reg_note (insn, REG_FRAME_RELATED_EXPR, set);
}
- insn = emit_move_insn (mem, cr_save_rtx);
- END_USE (REGNO (cr_save_rtx));
+ }
- rs6000_frame_related (insn, frame_reg_rtx, sp_off - frame_off,
- NULL_RTX, NULL_RTX);
+ /* In the ELFv2 ABI we need to save all call-saved CR fields into
+ *separate* slots if the routine calls __builtin_eh_return, so
+ that they can be independently restored by the unwinder. */
+ if (DEFAULT_ABI == ABI_ELFv2 && crtl->calls_eh_return)
+ {
+ int i, cr_off = info->ehcr_offset;
+ rtx crsave;
+
+ /* ??? We might get better performance by using multiple mfocrf
+ instructions. */
+ crsave = gen_rtx_REG (SImode, 0);
+ emit_insn (gen_movesi_from_cr (crsave));
+
+ for (i = 0; i < 8; i++)
+ if (!call_used_regs[CR0_REGNO + i])
+ {
+ rtvec p = rtvec_alloc (2);
+ RTVEC_ELT (p, 0)
+ = gen_frame_store (crsave, frame_reg_rtx, cr_off + frame_off);
+ RTVEC_ELT (p, 1)
+ = gen_rtx_USE (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO + i));
+
+ insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
+
+ RTX_FRAME_RELATED_P (insn) = 1;
+ add_reg_note (insn, REG_FRAME_RELATED_EXPR,
+ gen_frame_store (gen_rtx_REG (SImode, CR0_REGNO + i),
+ sp_reg_rtx, cr_off + sp_off));
+
+ cr_off += reg_size;
+ }
}
/* Update stack and set back pointer unless this is V.4,
be using r12 as frame_reg_rtx and r11 as the static chain
pointer for nested functions. */
save_regno = 12;
- if (DEFAULT_ABI == ABI_AIX && !using_static_chain_p)
+ if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
+ && !using_static_chain_p)
save_regno = 11;
else if (REGNO (frame_reg_rtx) == 12)
{
can use register 0. This allows us to use a plain 'blr' to return
from the procedure more often. */
int save_LR_around_toc_setup = (TARGET_ELF
- && DEFAULT_ABI != ABI_AIX
+ && DEFAULT_ABI == ABI_V4
&& flag_pic
&& ! info->lr_save_p
&& EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
if (rs6000_save_toc_in_prologue_p ())
{
rtx reg = gen_rtx_REG (reg_mode, TOC_REGNUM);
- emit_insn (gen_frame_store (reg, sp_reg_rtx, 5 * reg_size));
+ emit_insn (gen_frame_store (reg, sp_reg_rtx, RS6000_TOC_SAVE_SLOT));
}
}
}
}
+ /* ELFv2 ABI r2 setup code and local entry point. This must follow
+ immediately after the global entry point label. */
+ if (DEFAULT_ABI == ABI_ELFv2 && cfun->machine->r2_setup_needed)
+ {
+ const char *name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
+
+ fprintf (file, "0:\taddis 2,12,.TOC.-0b@ha\n");
+ fprintf (file, "\taddi 2,2,.TOC.-0b@l\n");
+
+ fputs ("\t.localentry\t", file);
+ assemble_name (file, name);
+ fputs (",.-", file);
+ assemble_name (file, name);
+ fputs ("\n", file);
+ }
+
+ /* Output -mprofile-kernel code. This needs to be done here instead of
+ in output_function_profile since it must go after the ELFv2 ABI
+ local entry point. */
+ if (TARGET_PROFILE_KERNEL)
+ {
+ gcc_assert (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2);
+ gcc_assert (!TARGET_32BIT);
+
+ asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
+ asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
+
+ /* In the ELFv2 ABI we have no compiler stack word. It must be
+ the resposibility of _mcount to preserve the static chain
+ register if required. */
+ if (DEFAULT_ABI != ABI_ELFv2
+ && cfun->static_chain_decl != NULL)
+ {
+ asm_fprintf (file, "\tstd %s,24(%s)\n",
+ reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
+ fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
+ asm_fprintf (file, "\tld %s,24(%s)\n",
+ reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
+ }
+ else
+ fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
+ }
+
rs6000_pic_labelno++;
}
if (using_mfcr_multiple && count > 1)
{
+ rtx insn;
rtvec p;
int ndx;
gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
ndx++;
}
- emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
+ insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
gcc_assert (ndx == count);
+
+ /* For the ELFv2 ABI we generate a CFA_RESTORE for each
+ CR field separately. */
+ if (!exit_func && DEFAULT_ABI == ABI_ELFv2 && flag_shrink_wrap)
+ {
+ for (i = 0; i < 8; i++)
+ if (save_reg_p (CR0_REGNO + i))
+ add_reg_note (insn, REG_CFA_RESTORE,
+ gen_rtx_REG (SImode, CR0_REGNO + i));
+
+ RTX_FRAME_RELATED_P (insn) = 1;
+ }
}
else
for (i = 0; i < 8; i++)
if (save_reg_p (CR0_REGNO + i))
- emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode, CR0_REGNO + i),
- reg));
+ {
+ rtx insn = emit_insn (gen_movsi_to_cr_one
+ (gen_rtx_REG (CCmode, CR0_REGNO + i), reg));
+
+ /* For the ELFv2 ABI we generate a CFA_RESTORE for each
+ CR field separately, attached to the insn that in fact
+ restores this particular CR field. */
+ if (!exit_func && DEFAULT_ABI == ABI_ELFv2 && flag_shrink_wrap)
+ {
+ add_reg_note (insn, REG_CFA_RESTORE,
+ gen_rtx_REG (SImode, CR0_REGNO + i));
- if (!exit_func && (DEFAULT_ABI == ABI_V4 || flag_shrink_wrap))
+ RTX_FRAME_RELATED_P (insn) = 1;
+ }
+ }
+
+ /* For other ABIs, we just generate a single CFA_RESTORE for CR2. */
+ if (!exit_func && DEFAULT_ABI != ABI_ELFv2
+ && (DEFAULT_ABI == ABI_V4 || flag_shrink_wrap))
{
rtx insn = get_last_insn ();
rtx cr = gen_rtx_REG (SImode, CR2_REGNO);
static rtx
add_crlr_cfa_restore (const rs6000_stack_t *info, rtx cfa_restores)
{
- if (info->cr_save_p)
+ if (DEFAULT_ABI == ABI_ELFv2)
+ {
+ int i;
+ for (i = 0; i < 8; i++)
+ if (save_reg_p (CR0_REGNO + i))
+ {
+ rtx cr = gen_rtx_REG (SImode, CR0_REGNO + i);
+ cfa_restores = alloc_reg_note (REG_CFA_RESTORE, cr,
+ cfa_restores);
+ }
+ }
+ else if (info->cr_save_p)
cfa_restores = alloc_reg_note (REG_CFA_RESTORE,
gen_rtx_REG (SImode, CR2_REGNO),
cfa_restores);
+
if (info->lr_save_p)
cfa_restores = alloc_reg_note (REG_CFA_RESTORE,
gen_rtx_REG (Pmode, LR_REGNO),
|| (!restoring_GPRs_inline
&& info->first_fp_reg_save == 64));
+ /* In the ELFv2 ABI we need to restore all call-saved CR fields from
+ *separate* slots if the routine calls __builtin_eh_return, so
+ that they can be independently restored by the unwinder. */
+ if (DEFAULT_ABI == ABI_ELFv2 && crtl->calls_eh_return)
+ {
+ int i, cr_off = info->ehcr_offset;
+
+ for (i = 0; i < 8; i++)
+ if (!call_used_regs[CR0_REGNO + i])
+ {
+ rtx reg = gen_rtx_REG (SImode, 0);
+ emit_insn (gen_frame_load (reg, frame_reg_rtx,
+ cr_off + frame_off));
+
+ insn = emit_insn (gen_movsi_to_cr_one
+ (gen_rtx_REG (CCmode, CR0_REGNO + i), reg));
+
+ if (!exit_func && flag_shrink_wrap)
+ {
+ add_reg_note (insn, REG_CFA_RESTORE,
+ gen_rtx_REG (SImode, CR0_REGNO + i));
+
+ RTX_FRAME_RELATED_P (insn) = 1;
+ }
+
+ cr_off += reg_size;
+ }
+ }
+
/* Get the old lr if we saved it. If we are restoring registers
out-of-line, then the out-of-line routines can do this for us. */
if (restore_lr && restoring_GPRs_inline)
{
rtx reg = gen_rtx_REG (reg_mode, 2);
emit_insn (gen_frame_load (reg, frame_reg_rtx,
- frame_off + 5 * reg_size));
+ frame_off + RS6000_TOC_SAVE_SLOT));
}
for (i = 0; ; ++i)
if (! restoring_FPRs_inline)
{
int i;
+ int reg;
rtx sym;
if (flag_shrink_wrap)
sym = rs6000_savres_routine_sym (info,
SAVRES_FPR | (lr ? SAVRES_LR : 0));
RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode, sym);
- RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode,
- gen_rtx_REG (Pmode,
- DEFAULT_ABI == ABI_AIX
- ? 1 : 11));
+ reg = (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)? 1 : 11;
+ RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, reg));
+
for (i = 0; i < 64 - info->first_fp_reg_save; i++)
{
rtx reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
System V.4 Powerpc's (and the embedded ABI derived from it) use a
different traceback table. */
- if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
+ if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
+ && ! flag_inhibit_size_directive
&& rs6000_traceback != traceback_none && !cfun->is_thunk)
{
const char *fname = NULL;
SIBLING_CALL_P (insn) = 1;
emit_barrier ();
+ /* Ensure we have a global entry point for the thunk. ??? We could
+ avoid that if the target routine doesn't need a global entry point,
+ but we do not know whether this is the case at this point. */
+ if (DEFAULT_ABI == ABI_ELFv2)
+ cfun->machine->r2_setup_needed = true;
+
/* Run just enough of rest_of_compilation to get the insns emitted.
There's not really enough bulk here to make other passes such as
instruction scheduling worth while. Note that use_thunk calls
if (TARGET_PROFILE_KERNEL)
return;
- if (DEFAULT_ABI == ABI_AIX)
+ if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
{
#ifndef NO_PROFILE_COUNTERS
# define NO_PROFILE_COUNTERS 0
break;
case ABI_AIX:
+ case ABI_ELFv2:
case ABI_DARWIN:
- if (!TARGET_PROFILE_KERNEL)
- {
- /* Don't do anything, done in output_profile_hook (). */
- }
- else
- {
- gcc_assert (!TARGET_32BIT);
-
- asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
- asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
-
- if (cfun->static_chain_decl != NULL)
- {
- asm_fprintf (file, "\tstd %s,24(%s)\n",
- reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
- fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
- asm_fprintf (file, "\tld %s,24(%s)\n",
- reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
- }
- else
- fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
- }
+ /* Don't do anything, done in output_profile_hook (). */
break;
}
}
ret = (TARGET_32BIT) ? 12 : 24;
break;
+ case ABI_ELFv2:
+ gcc_assert (!TARGET_32BIT);
+ ret = 32;
+ break;
+
case ABI_DARWIN:
case ABI_V4:
ret = (TARGET_32BIT) ? 40 : 48;
break;
/* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
+ case ABI_ELFv2:
case ABI_DARWIN:
case ABI_V4:
emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
static void
rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
{
- if (DEFAULT_ABI == ABI_AIX
+ if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
&& TARGET_MINIMAL_TOC
&& !TARGET_RELOCATABLE)
{
else
fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
}
- else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
+ else if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
+ && !TARGET_RELOCATABLE)
fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
else
{
{
if (flag_pic)
return 3;
- else if (DEFAULT_ABI == ABI_AIX)
+ else if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
return 2;
else
return 0;
void
rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
{
- if (TARGET_64BIT)
+ if (TARGET_64BIT && DEFAULT_ABI != ABI_ELFv2)
{
fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
ASM_OUTPUT_LABEL (file, name);
fprintf (file, "%s:\n", desc_name);
fprintf (file, "\t.long %s\n", orig_name);
fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
- if (DEFAULT_ABI == ABI_AIX)
- fputs ("\t.long 0\n", file);
+ fputs ("\t.long 0\n", file);
fprintf (file, "\t.previous\n");
}
ASM_OUTPUT_LABEL (file, name);
}
#endif
#if defined (POWERPC_LINUX) || defined (POWERPC_FREEBSD)
- if (TARGET_32BIT)
+ if (TARGET_32BIT || DEFAULT_ABI == ABI_ELFv2)
file_end_indicate_exec_stack ();
#endif
}
{
enum machine_mode mode;
unsigned int regno;
+ enum machine_mode elt_mode;
+ int n_elts;
/* Special handling for structs in darwin64. */
if (TARGET_MACHO
/* Otherwise fall through to standard ABI rules. */
}
+ /* The ELFv2 ABI returns homogeneous VFP aggregates in registers. */
+ if (rs6000_discover_homogeneous_aggregate (TYPE_MODE (valtype), valtype,
+ &elt_mode, &n_elts))
+ {
+ int first_reg, n_regs, i;
+ rtx par;
+
+ if (SCALAR_FLOAT_MODE_P (elt_mode))
+ {
+ /* _Decimal128 must use even/odd register pairs. */
+ first_reg = (elt_mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
+ n_regs = (GET_MODE_SIZE (elt_mode) + 7) >> 3;
+ }
+ else
+ {
+ first_reg = ALTIVEC_ARG_RETURN;
+ n_regs = 1;
+ }
+
+ par = gen_rtx_PARALLEL (TYPE_MODE (valtype), rtvec_alloc (n_elts));
+ for (i = 0; i < n_elts; i++)
+ {
+ rtx r = gen_rtx_REG (elt_mode, first_reg + i * n_regs);
+ rtx off = GEN_INT (i * GET_MODE_SIZE (elt_mode));
+ XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, r, off);
+ }
+
+ return par;
+ }
+
if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
{
/* Long long return value need be split in -mpowerpc64, 32bit ABI. */
}
\f
-/* A function pointer under AIX is a pointer to a data area whose first word
- contains the actual address of the function, whose second word contains a
- pointer to its TOC, and whose third word contains a value to place in the
- static chain register (r11). Note that if we load the static chain, our
- "trampoline" need not have any executable code. */
+
+/* Expand code to perform a call under the AIX or ELFv2 ABI. */
void
-rs6000_call_indirect_aix (rtx value, rtx func_desc, rtx flag)
+rs6000_call_aix (rtx value, rtx func_desc, rtx flag, rtx cookie)
{
+ rtx toc_reg = gen_rtx_REG (Pmode, TOC_REGNUM);
+ rtx toc_load = NULL_RTX;
+ rtx toc_restore = NULL_RTX;
rtx func_addr;
- rtx toc_reg;
- rtx sc_reg;
- rtx stack_ptr;
- rtx stack_toc_offset;
- rtx stack_toc_mem;
- rtx func_toc_offset;
- rtx func_toc_mem;
- rtx func_sc_offset;
- rtx func_sc_mem;
+ rtx abi_reg = NULL_RTX;
+ rtx call[4];
+ int n_call;
rtx insn;
- rtx (*call_func) (rtx, rtx, rtx, rtx);
- rtx (*call_value_func) (rtx, rtx, rtx, rtx, rtx);
-
- stack_ptr = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
- toc_reg = gen_rtx_REG (Pmode, TOC_REGNUM);
- /* Load up address of the actual function. */
- func_desc = force_reg (Pmode, func_desc);
- func_addr = gen_reg_rtx (Pmode);
- emit_move_insn (func_addr, gen_rtx_MEM (Pmode, func_desc));
-
- if (TARGET_32BIT)
- {
+ /* Handle longcall attributes. */
+ if (INTVAL (cookie) & CALL_LONG)
+ func_desc = rs6000_longcall_ref (func_desc);
+
+ /* Handle indirect calls. */
+ if (GET_CODE (func_desc) != SYMBOL_REF
+ || (DEFAULT_ABI == ABI_AIX && !SYMBOL_REF_FUNCTION_P (func_desc)))
+ {
+ /* Save the TOC into its reserved slot before the call,
+ and prepare to restore it after the call. */
+ rtx stack_ptr = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
+ rtx stack_toc_offset = GEN_INT (RS6000_TOC_SAVE_SLOT);
+ rtx stack_toc_mem = gen_frame_mem (Pmode,
+ gen_rtx_PLUS (Pmode, stack_ptr,
+ stack_toc_offset));
+ toc_restore = gen_rtx_SET (VOIDmode, toc_reg, stack_toc_mem);
+
+ /* Can we optimize saving the TOC in the prologue or
+ do we need to do it at every call? */
+ if (TARGET_SAVE_TOC_INDIRECT && !cfun->calls_alloca)
+ cfun->machine->save_toc_in_prologue = true;
+ else
+ {
+ MEM_VOLATILE_P (stack_toc_mem) = 1;
+ emit_move_insn (stack_toc_mem, toc_reg);
+ }
- stack_toc_offset = GEN_INT (TOC_SAVE_OFFSET_32BIT);
- func_toc_offset = GEN_INT (AIX_FUNC_DESC_TOC_32BIT);
- func_sc_offset = GEN_INT (AIX_FUNC_DESC_SC_32BIT);
- if (TARGET_POINTERS_TO_NESTED_FUNCTIONS)
+ if (DEFAULT_ABI == ABI_ELFv2)
{
- call_func = gen_call_indirect_aix32bit;
- call_value_func = gen_call_value_indirect_aix32bit;
+ /* A function pointer in the ELFv2 ABI is just a plain address, but
+ the ABI requires it to be loaded into r12 before the call. */
+ func_addr = gen_rtx_REG (Pmode, 12);
+ emit_move_insn (func_addr, func_desc);
+ abi_reg = func_addr;
}
else
{
- call_func = gen_call_indirect_aix32bit_nor11;
- call_value_func = gen_call_value_indirect_aix32bit_nor11;
+ /* A function pointer under AIX is a pointer to a data area whose
+ first word contains the actual address of the function, whose
+ second word contains a pointer to its TOC, and whose third word
+ contains a value to place in the static chain register (r11).
+ Note that if we load the static chain, our "trampoline" need
+ not have any executable code. */
+
+ /* Load up address of the actual function. */
+ func_desc = force_reg (Pmode, func_desc);
+ func_addr = gen_reg_rtx (Pmode);
+ emit_move_insn (func_addr, gen_rtx_MEM (Pmode, func_desc));
+
+ /* Prepare to load the TOC of the called function. Note that the
+ TOC load must happen immediately before the actual call so
+ that unwinding the TOC registers works correctly. See the
+ comment in frob_update_context. */
+ rtx func_toc_offset = GEN_INT (GET_MODE_SIZE (Pmode));
+ rtx func_toc_mem = gen_rtx_MEM (Pmode,
+ gen_rtx_PLUS (Pmode, func_desc,
+ func_toc_offset));
+ toc_load = gen_rtx_USE (VOIDmode, func_toc_mem);
+
+ /* If we have a static chain, load it up. */
+ if (TARGET_POINTERS_TO_NESTED_FUNCTIONS)
+ {
+ rtx sc_reg = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
+ rtx func_sc_offset = GEN_INT (2 * GET_MODE_SIZE (Pmode));
+ rtx func_sc_mem = gen_rtx_MEM (Pmode,
+ gen_rtx_PLUS (Pmode, func_desc,
+ func_sc_offset));
+ emit_move_insn (sc_reg, func_sc_mem);
+ abi_reg = sc_reg;
+ }
}
}
else
{
- stack_toc_offset = GEN_INT (TOC_SAVE_OFFSET_64BIT);
- func_toc_offset = GEN_INT (AIX_FUNC_DESC_TOC_64BIT);
- func_sc_offset = GEN_INT (AIX_FUNC_DESC_SC_64BIT);
- if (TARGET_POINTERS_TO_NESTED_FUNCTIONS)
- {
- call_func = gen_call_indirect_aix64bit;
- call_value_func = gen_call_value_indirect_aix64bit;
- }
- else
- {
- call_func = gen_call_indirect_aix64bit_nor11;
- call_value_func = gen_call_value_indirect_aix64bit_nor11;
- }
+ /* Direct calls use the TOC: for local calls, the callee will
+ assume the TOC register is set; for non-local calls, the
+ PLT stub needs the TOC register. */
+ abi_reg = toc_reg;
+ func_addr = func_desc;
}
- /* Reserved spot to store the TOC. */
- stack_toc_mem = gen_frame_mem (Pmode,
- gen_rtx_PLUS (Pmode,
- stack_ptr,
- stack_toc_offset));
+ /* Create the call. */
+ call[0] = gen_rtx_CALL (VOIDmode, gen_rtx_MEM (SImode, func_addr), flag);
+ if (value != NULL_RTX)
+ call[0] = gen_rtx_SET (VOIDmode, value, call[0]);
+ n_call = 1;
- gcc_assert (cfun);
- gcc_assert (cfun->machine);
+ if (toc_load)
+ call[n_call++] = toc_load;
+ if (toc_restore)
+ call[n_call++] = toc_restore;
- /* Can we optimize saving the TOC in the prologue or do we need to do it at
- every call? */
- if (TARGET_SAVE_TOC_INDIRECT && !cfun->calls_alloca)
- cfun->machine->save_toc_in_prologue = true;
+ call[n_call++] = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, LR_REGNO));
- else
- {
- MEM_VOLATILE_P (stack_toc_mem) = 1;
- emit_move_insn (stack_toc_mem, toc_reg);
- }
+ insn = gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (n_call, call));
+ insn = emit_call_insn (insn);
- /* Calculate the address to load the TOC of the called function. We don't
- actually load this until the split after reload. */
- func_toc_mem = gen_rtx_MEM (Pmode,
- gen_rtx_PLUS (Pmode,
- func_desc,
- func_toc_offset));
+ /* Mention all registers defined by the ABI to hold information
+ as uses in CALL_INSN_FUNCTION_USAGE. */
+ if (abi_reg)
+ use_reg (&CALL_INSN_FUNCTION_USAGE (insn), abi_reg);
+}
- /* If we have a static chain, load it up. */
- if (TARGET_POINTERS_TO_NESTED_FUNCTIONS)
- {
- func_sc_mem = gen_rtx_MEM (Pmode,
- gen_rtx_PLUS (Pmode,
- func_desc,
- func_sc_offset));
+/* Expand code to perform a sibling call under the AIX or ELFv2 ABI. */
- sc_reg = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
- emit_move_insn (sc_reg, func_sc_mem);
- }
+void
+rs6000_sibcall_aix (rtx value, rtx func_desc, rtx flag, rtx cookie)
+{
+ rtx call[2];
+ rtx insn;
+
+ gcc_assert (INTVAL (cookie) == 0);
/* Create the call. */
- if (value)
- insn = call_value_func (value, func_addr, flag, func_toc_mem,
- stack_toc_mem);
- else
- insn = call_func (func_addr, flag, func_toc_mem, stack_toc_mem);
+ call[0] = gen_rtx_CALL (VOIDmode, gen_rtx_MEM (SImode, func_desc), flag);
+ if (value != NULL_RTX)
+ call[0] = gen_rtx_SET (VOIDmode, value, call[0]);
+
+ call[1] = simple_return_rtx;
+
+ insn = gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (2, call));
+ insn = emit_call_insn (insn);
- emit_call_insn (insn);
+ /* Note use of the TOC register. */
+ use_reg (&CALL_INSN_FUNCTION_USAGE (insn), gen_rtx_REG (Pmode, TOC_REGNUM));
+ /* We need to also mark a use of the link register since the function we
+ sibling-call to will use it to return to our caller. */
+ use_reg (&CALL_INSN_FUNCTION_USAGE (insn), gen_rtx_REG (Pmode, LR_REGNO));
}
/* Return whether we need to always update the saved TOC pointer when we update
(TFHAR_REGNO 114)
(TFIAR_REGNO 115)
(TEXASR_REGNO 116)
-
- ; ABI defined stack offsets for storing the TOC pointer with AIX calls.
- (TOC_SAVE_OFFSET_32BIT 20)
- (TOC_SAVE_OFFSET_64BIT 40)
-
- ; Function TOC offset in the AIX function descriptor.
- (AIX_FUNC_DESC_TOC_32BIT 4)
- (AIX_FUNC_DESC_TOC_64BIT 8)
-
- ; Static chain offset in the AIX function descriptor.
- (AIX_FUNC_DESC_SC_32BIT 8)
- (AIX_FUNC_DESC_SC_64BIT 16)
])
;;
(match_operand:TLSmode 2 "rs6000_tls_symbol_ref" "")]
UNSPEC_TLSGD)
(clobber (reg:SI LR_REGNO))]
- "HAVE_AS_TLS && DEFAULT_ABI == ABI_AIX"
+ "HAVE_AS_TLS && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)"
{
if (TARGET_CMODEL != CMODEL_SMALL)
return "addis %0,%1,%2@got@tlsgd@ha\;addi %0,%0,%2@got@tlsgd@l\;"
(unspec:TLSmode [(match_operand:TLSmode 3 "rs6000_tls_symbol_ref" "")]
UNSPEC_TLSGD)
(clobber (reg:SI LR_REGNO))]
- "HAVE_AS_TLS && DEFAULT_ABI == ABI_AIX && TARGET_TLS_MARKERS"
+ "HAVE_AS_TLS && TARGET_TLS_MARKERS
+ && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)"
"bl %z1(%3@tlsgd)\;nop"
[(set_attr "type" "branch")
(set_attr "length" "8")])
(unspec:TLSmode [(match_operand:TLSmode 1 "gpc_reg_operand" "b")]
UNSPEC_TLSLD)
(clobber (reg:SI LR_REGNO))]
- "HAVE_AS_TLS && DEFAULT_ABI == ABI_AIX"
+ "HAVE_AS_TLS && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)"
{
if (TARGET_CMODEL != CMODEL_SMALL)
return "addis %0,%1,%&@got@tlsld@ha\;addi %0,%0,%&@got@tlsld@l\;"
(match_operand 2 "" "g")))
(unspec:TLSmode [(const_int 0)] UNSPEC_TLSLD)
(clobber (reg:SI LR_REGNO))]
- "HAVE_AS_TLS && DEFAULT_ABI == ABI_AIX && TARGET_TLS_MARKERS"
+ "HAVE_AS_TLS && TARGET_TLS_MARKERS
+ && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)"
"bl %z1(%&@tlsld)\;nop"
[(set_attr "type" "branch")
(set_attr "length" "8")])
[(parallel [(set (match_operand:SI 0 "gpc_reg_operand" "=r")
(unspec:SI [(const_int 0)] UNSPEC_TOC))
(use (reg:SI 2))])]
- "DEFAULT_ABI == ABI_AIX && TARGET_32BIT"
+ "(DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2) && TARGET_32BIT"
"*
{
char buf[30];
[(parallel [(set (match_operand:DI 0 "gpc_reg_operand" "=r")
(unspec:DI [(const_int 0)] UNSPEC_TOC))
(use (reg:DI 2))])]
- "DEFAULT_ABI == ABI_AIX && TARGET_64BIT"
+ "(DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2) && TARGET_64BIT"
"*
{
char buf[30];
[(parallel [(set (reg:SI LR_REGNO)
(match_operand:SI 0 "immediate_operand" "s"))
(use (unspec [(match_dup 0)] UNSPEC_TOC))])]
- "TARGET_ELF && DEFAULT_ABI != ABI_AIX
+ "TARGET_ELF && DEFAULT_ABI == ABI_V4
&& (flag_pic == 2 || (flag_pic && TARGET_SECURE_PLT))"
"")
[(set (reg:SI LR_REGNO)
(match_operand:SI 0 "immediate_operand" "s"))
(use (unspec [(match_dup 0)] UNSPEC_TOC))]
- "!TARGET_LINK_STACK && TARGET_ELF && DEFAULT_ABI != ABI_AIX
+ "!TARGET_LINK_STACK && TARGET_ELF && DEFAULT_ABI == ABI_V4
&& (flag_pic == 2 || (flag_pic && TARGET_SECURE_PLT))"
"bcl 20,31,%0\\n%0:"
[(set_attr "type" "branch")
[(set (reg:SI LR_REGNO)
(match_operand:SI 0 "immediate_operand" "s"))
(use (unspec [(match_dup 0)] UNSPEC_TOC))]
- "TARGET_LINK_STACK && TARGET_ELF && DEFAULT_ABI != ABI_AIX
+ "TARGET_LINK_STACK && TARGET_ELF && DEFAULT_ABI == ABI_V4
&& (flag_pic == 2 || (flag_pic && TARGET_SECURE_PLT))"
"*
{
(label_ref (match_operand 1 "" ""))]
UNSPEC_TOCPTR))
(match_dup 1)])]
- "TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2"
+ "TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 2"
"")
(define_insn "load_toc_v4_PIC_1b_normal"
(label_ref (match_operand 1 "" ""))]
UNSPEC_TOCPTR))
(match_dup 1)]
- "!TARGET_LINK_STACK && TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2"
+ "!TARGET_LINK_STACK && TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 2"
"bcl 20,31,$+8\;.long %0-$"
[(set_attr "type" "branch")
(set_attr "length" "8")])
(label_ref (match_operand 1 "" ""))]
UNSPEC_TOCPTR))
(match_dup 1)]
- "TARGET_LINK_STACK && TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2"
+ "TARGET_LINK_STACK && TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 2"
"*
{
char name[32];
(mem:SI (plus:SI (match_operand:SI 1 "gpc_reg_operand" "b")
(minus:SI (match_operand:SI 2 "immediate_operand" "s")
(match_operand:SI 3 "immediate_operand" "s")))))]
- "TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2"
+ "TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 2"
"lwz %0,%2-%3(%1)"
[(set_attr "type" "load")])
(high:SI
(minus:SI (match_operand:SI 2 "symbol_ref_operand" "s")
(match_operand:SI 3 "symbol_ref_operand" "s")))))]
- "TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic"
+ "TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI == ABI_V4 && flag_pic"
"addis %0,%1,%2-%3@ha")
(define_insn "load_toc_v4_PIC_3c"
(lo_sum:SI (match_operand:SI 1 "gpc_reg_operand" "b")
(minus:SI (match_operand:SI 2 "symbol_ref_operand" "s")
(match_operand:SI 3 "symbol_ref_operand" "s"))))]
- "TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic"
+ "TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI == ABI_V4 && flag_pic"
"addi %0,%1,%2-%3@l")
;; If the TOC is shared over a translation unit, as happens with all
operands[0] = XEXP (operands[0], 0);
+ if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
+ {
+ rs6000_call_aix (NULL_RTX, operands[0], operands[1], operands[2]);
+ DONE;
+ }
+
if (GET_CODE (operands[0]) != SYMBOL_REF
- || (DEFAULT_ABI == ABI_AIX && !SYMBOL_REF_FUNCTION_P (operands[0]))
|| (DEFAULT_ABI != ABI_DARWIN && (INTVAL (operands[2]) & CALL_LONG) != 0))
{
if (INTVAL (operands[2]) & CALL_LONG)
operands[0] = force_reg (Pmode, operands[0]);
break;
- case ABI_AIX:
- /* AIX function pointers are really pointers to a three word
- area. */
- rs6000_call_indirect_aix (NULL_RTX, operands[0], operands[1]);
- DONE;
-
default:
gcc_unreachable ();
}
operands[1] = XEXP (operands[1], 0);
+ if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
+ {
+ rs6000_call_aix (operands[0], operands[1], operands[2], operands[3]);
+ DONE;
+ }
+
if (GET_CODE (operands[1]) != SYMBOL_REF
- || (DEFAULT_ABI == ABI_AIX && !SYMBOL_REF_FUNCTION_P (operands[1]))
|| (DEFAULT_ABI != ABI_DARWIN && (INTVAL (operands[3]) & CALL_LONG) != 0))
{
if (INTVAL (operands[3]) & CALL_LONG)
operands[1] = force_reg (Pmode, operands[1]);
break;
- case ABI_AIX:
- /* AIX function pointers are really pointers to a three word
- area. */
- rs6000_call_indirect_aix (operands[0], operands[1], operands[2]);
- DONE;
-
default:
gcc_unreachable ();
}
[(set_attr "type" "branch")
(set_attr "length" "4,8")])
-;; Call to indirect functions with the AIX abi using a 3 word descriptor.
-;; Operand0 is the addresss of the function to call
-;; Operand1 is the flag for System V.4 for unprototyped or FP registers
-;; Operand2 is the location in the function descriptor to load r2 from
-;; Operand3 is the stack location to hold the current TOC pointer
-
-(define_insn "call_indirect_aix<ptrsize>"
- [(call (mem:SI (match_operand:P 0 "register_operand" "c,*l"))
- (match_operand 1 "" "g,g"))
- (use (match_operand:P 2 "memory_operand" "<ptrm>,<ptrm>"))
- (set (reg:P TOC_REGNUM) (match_operand:P 3 "memory_operand" "<ptrm>,<ptrm>"))
- (use (reg:P STATIC_CHAIN_REGNUM))
- (clobber (reg:P LR_REGNO))]
- "DEFAULT_ABI == ABI_AIX && TARGET_POINTERS_TO_NESTED_FUNCTIONS"
- "<ptrload> 2,%2\;b%T0l\;<ptrload> 2,%3"
- [(set_attr "type" "jmpreg")
- (set_attr "length" "12")])
-
-;; Like call_indirect_aix<ptrsize>, but no use of the static chain
-;; Operand0 is the addresss of the function to call
-;; Operand1 is the flag for System V.4 for unprototyped or FP registers
-;; Operand2 is the location in the function descriptor to load r2 from
-;; Operand3 is the stack location to hold the current TOC pointer
-
-(define_insn "call_indirect_aix<ptrsize>_nor11"
- [(call (mem:SI (match_operand:P 0 "register_operand" "c,*l"))
- (match_operand 1 "" "g,g"))
- (use (match_operand:P 2 "memory_operand" "<ptrm>,<ptrm>"))
- (set (reg:P TOC_REGNUM) (match_operand:P 3 "memory_operand" "<ptrm>,<ptrm>"))
- (clobber (reg:P LR_REGNO))]
- "DEFAULT_ABI == ABI_AIX && !TARGET_POINTERS_TO_NESTED_FUNCTIONS"
- "<ptrload> 2,%2\;b%T0l\;<ptrload> 2,%3"
- [(set_attr "type" "jmpreg")
- (set_attr "length" "12")])
-
-;; Operand0 is the return result of the function
-;; Operand1 is the addresss of the function to call
-;; Operand2 is the flag for System V.4 for unprototyped or FP registers
-;; Operand3 is the location in the function descriptor to load r2 from
-;; Operand4 is the stack location to hold the current TOC pointer
-
-(define_insn "call_value_indirect_aix<ptrsize>"
- [(set (match_operand 0 "" "")
- (call (mem:SI (match_operand:P 1 "register_operand" "c,*l"))
- (match_operand 2 "" "g,g")))
- (use (match_operand:P 3 "memory_operand" "<ptrm>,<ptrm>"))
- (set (reg:P TOC_REGNUM) (match_operand:P 4 "memory_operand" "<ptrm>,<ptrm>"))
- (use (reg:P STATIC_CHAIN_REGNUM))
- (clobber (reg:P LR_REGNO))]
- "DEFAULT_ABI == ABI_AIX && TARGET_POINTERS_TO_NESTED_FUNCTIONS"
- "<ptrload> 2,%3\;b%T1l\;<ptrload> 2,%4"
- [(set_attr "type" "jmpreg")
- (set_attr "length" "12")])
-
-;; Like call_value_indirect_aix<ptrsize>, but no use of the static chain
-;; Operand0 is the return result of the function
-;; Operand1 is the addresss of the function to call
-;; Operand2 is the flag for System V.4 for unprototyped or FP registers
-;; Operand3 is the location in the function descriptor to load r2 from
-;; Operand4 is the stack location to hold the current TOC pointer
-
-(define_insn "call_value_indirect_aix<ptrsize>_nor11"
- [(set (match_operand 0 "" "")
- (call (mem:SI (match_operand:P 1 "register_operand" "c,*l"))
- (match_operand 2 "" "g,g")))
- (use (match_operand:P 3 "memory_operand" "<ptrm>,<ptrm>"))
- (set (reg:P TOC_REGNUM) (match_operand:P 4 "memory_operand" "<ptrm>,<ptrm>"))
- (clobber (reg:P LR_REGNO))]
- "DEFAULT_ABI == ABI_AIX && !TARGET_POINTERS_TO_NESTED_FUNCTIONS"
- "<ptrload> 2,%3\;b%T1l\;<ptrload> 2,%4"
- [(set_attr "type" "jmpreg")
- (set_attr "length" "12")])
-
-;; Call to function which may be in another module. Restore the TOC
-;; pointer (r2) after the call unless this is System V.
-;; Operand2 is nonzero if we are using the V.4 calling sequence and
-;; either the function was not prototyped, or it was prototyped as a
-;; variable argument function. It is > 0 if FP registers were passed
-;; and < 0 if they were not.
-
-(define_insn "*call_nonlocal_aix32"
- [(call (mem:SI (match_operand:SI 0 "symbol_ref_operand" "s"))
- (match_operand 1 "" "g"))
- (use (match_operand:SI 2 "immediate_operand" "O"))
- (clobber (reg:SI LR_REGNO))]
- "TARGET_32BIT
- && DEFAULT_ABI == ABI_AIX
- && (INTVAL (operands[2]) & CALL_LONG) == 0"
- "bl %z0\;nop"
- [(set_attr "type" "branch")
- (set_attr "length" "8")])
-
-(define_insn "*call_nonlocal_aix64"
- [(call (mem:SI (match_operand:DI 0 "symbol_ref_operand" "s"))
- (match_operand 1 "" "g"))
- (use (match_operand:SI 2 "immediate_operand" "O"))
- (clobber (reg:SI LR_REGNO))]
- "TARGET_64BIT
- && DEFAULT_ABI == ABI_AIX
- && (INTVAL (operands[2]) & CALL_LONG) == 0"
- "bl %z0\;nop"
- [(set_attr "type" "branch")
- (set_attr "length" "8")])
-
-(define_insn "*call_value_nonlocal_aix32"
- [(set (match_operand 0 "" "")
- (call (mem:SI (match_operand:SI 1 "symbol_ref_operand" "s"))
- (match_operand 2 "" "g")))
- (use (match_operand:SI 3 "immediate_operand" "O"))
- (clobber (reg:SI LR_REGNO))]
- "TARGET_32BIT
- && DEFAULT_ABI == ABI_AIX
- && (INTVAL (operands[3]) & CALL_LONG) == 0"
- "bl %z1\;nop"
- [(set_attr "type" "branch")
- (set_attr "length" "8")])
-
-(define_insn "*call_value_nonlocal_aix64"
- [(set (match_operand 0 "" "")
- (call (mem:SI (match_operand:DI 1 "symbol_ref_operand" "s"))
- (match_operand 2 "" "g")))
- (use (match_operand:SI 3 "immediate_operand" "O"))
- (clobber (reg:SI LR_REGNO))]
- "TARGET_64BIT
- && DEFAULT_ABI == ABI_AIX
- && (INTVAL (operands[3]) & CALL_LONG) == 0"
- "bl %z1\;nop"
- [(set_attr "type" "branch")
- (set_attr "length" "8")])
;; A function pointer under System V is just a normal pointer
;; operands[0] is the function pointer
[(set_attr "type" "branch,branch")
(set_attr "length" "4,8")])
+
+;; Call to AIX abi function in the same module.
+
+(define_insn "*call_local_aix<mode>"
+ [(call (mem:SI (match_operand:P 0 "current_file_function_operand" "s"))
+ (match_operand 1 "" "g"))
+ (clobber (reg:P LR_REGNO))]
+ "DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2"
+ "bl %z0"
+ [(set_attr "type" "branch")
+ (set_attr "length" "4")])
+
+(define_insn "*call_value_local_aix<mode>"
+ [(set (match_operand 0 "" "")
+ (call (mem:SI (match_operand:P 1 "current_file_function_operand" "s"))
+ (match_operand 2 "" "g")))
+ (clobber (reg:P LR_REGNO))]
+ "DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2"
+ "bl %z1"
+ [(set_attr "type" "branch")
+ (set_attr "length" "4")])
+
+;; Call to AIX abi function which may be in another module.
+;; Restore the TOC pointer (r2) after the call.
+
+(define_insn "*call_nonlocal_aix<mode>"
+ [(call (mem:SI (match_operand:P 0 "symbol_ref_operand" "s"))
+ (match_operand 1 "" "g"))
+ (clobber (reg:P LR_REGNO))]
+ "DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2"
+ "bl %z0\;nop"
+ [(set_attr "type" "branch")
+ (set_attr "length" "8")])
+
+(define_insn "*call_value_nonlocal_aix<mode>"
+ [(set (match_operand 0 "" "")
+ (call (mem:SI (match_operand:P 1 "symbol_ref_operand" "s"))
+ (match_operand 2 "" "g")))
+ (clobber (reg:P LR_REGNO))]
+ "DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2"
+ "bl %z1\;nop"
+ [(set_attr "type" "branch")
+ (set_attr "length" "8")])
+
+;; Call to indirect functions with the AIX abi using a 3 word descriptor.
+;; Operand0 is the addresss of the function to call
+;; Operand2 is the location in the function descriptor to load r2 from
+;; Operand3 is the stack location to hold the current TOC pointer
+
+(define_insn "*call_indirect_aix<mode>"
+ [(call (mem:SI (match_operand:P 0 "register_operand" "c,*l"))
+ (match_operand 1 "" "g,g"))
+ (use (match_operand:P 2 "memory_operand" "<ptrm>,<ptrm>"))
+ (set (reg:P TOC_REGNUM) (match_operand:P 3 "memory_operand" "<ptrm>,<ptrm>"))
+ (clobber (reg:P LR_REGNO))]
+ "DEFAULT_ABI == ABI_AIX"
+ "<ptrload> 2,%2\;b%T0l\;<ptrload> 2,%3"
+ [(set_attr "type" "jmpreg")
+ (set_attr "length" "12")])
+
+(define_insn "*call_value_indirect_aix<mode>"
+ [(set (match_operand 0 "" "")
+ (call (mem:SI (match_operand:P 1 "register_operand" "c,*l"))
+ (match_operand 2 "" "g,g")))
+ (use (match_operand:P 3 "memory_operand" "<ptrm>,<ptrm>"))
+ (set (reg:P TOC_REGNUM) (match_operand:P 4 "memory_operand" "<ptrm>,<ptrm>"))
+ (clobber (reg:P LR_REGNO))]
+ "DEFAULT_ABI == ABI_AIX"
+ "<ptrload> 2,%3\;b%T1l\;<ptrload> 2,%4"
+ [(set_attr "type" "jmpreg")
+ (set_attr "length" "12")])
+
+;; Call to indirect functions with the ELFv2 ABI.
+;; Operand0 is the addresss of the function to call
+;; Operand2 is the stack location to hold the current TOC pointer
+
+(define_insn "*call_indirect_elfv2<mode>"
+ [(call (mem:SI (match_operand:P 0 "register_operand" "c,*l"))
+ (match_operand 1 "" "g,g"))
+ (set (reg:P TOC_REGNUM) (match_operand:P 2 "memory_operand" "<ptrm>,<ptrm>"))
+ (clobber (reg:P LR_REGNO))]
+ "DEFAULT_ABI == ABI_ELFv2"
+ "b%T0l\;<ptrload> 2,%2"
+ [(set_attr "type" "jmpreg")
+ (set_attr "length" "8")])
+
+(define_insn "*call_value_indirect_elfv2<mode>"
+ [(set (match_operand 0 "" "")
+ (call (mem:SI (match_operand:P 1 "register_operand" "c,*l"))
+ (match_operand 2 "" "g,g")))
+ (set (reg:P TOC_REGNUM) (match_operand:P 3 "memory_operand" "<ptrm>,<ptrm>"))
+ (clobber (reg:P LR_REGNO))]
+ "DEFAULT_ABI == ABI_ELFv2"
+ "b%T1l\;<ptrload> 2,%3"
+ [(set_attr "type" "jmpreg")
+ (set_attr "length" "8")])
+
+
;; Call subroutine returning any type.
(define_expand "untyped_call"
[(parallel [(call (match_operand 0 "" "")
gcc_assert (GET_CODE (operands[1]) == CONST_INT);
operands[0] = XEXP (operands[0], 0);
+
+ if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
+ {
+ rs6000_sibcall_aix (NULL_RTX, operands[0], operands[1], operands[2]);
+ DONE;
+ }
+}")
+
+(define_expand "sibcall_value"
+ [(parallel [(set (match_operand 0 "register_operand" "")
+ (call (mem:SI (match_operand 1 "address_operand" ""))
+ (match_operand 2 "" "")))
+ (use (match_operand 3 "" ""))
+ (use (reg:SI LR_REGNO))
+ (simple_return)])]
+ ""
+ "
+{
+#if TARGET_MACHO
+ if (MACHOPIC_INDIRECT)
+ operands[1] = machopic_indirect_call_target (operands[1]);
+#endif
+
+ gcc_assert (GET_CODE (operands[1]) == MEM);
+ gcc_assert (GET_CODE (operands[2]) == CONST_INT);
+
+ operands[1] = XEXP (operands[1], 0);
+
+ if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2)
+ {
+ rs6000_sibcall_aix (operands[0], operands[1], operands[2], operands[3]);
+ DONE;
+ }
}")
;; this and similar patterns must be marked as using LR, otherwise
[(set_attr "type" "branch")
(set_attr "length" "4,8")])
-
(define_insn "*sibcall_value_local64"
[(set (match_operand 0 "" "")
(call (mem:SI (match_operand:DI 1 "current_file_function_operand" "s,s"))
[(set_attr "type" "branch")
(set_attr "length" "4,8")])
-(define_insn "*sibcall_nonlocal_aix<mode>"
- [(call (mem:SI (match_operand:P 0 "call_operand" "s,c"))
- (match_operand 1 "" "g,g"))
- (use (match_operand:SI 2 "immediate_operand" "O,O"))
- (use (reg:SI LR_REGNO))
- (simple_return)]
- "DEFAULT_ABI == ABI_AIX
- && (INTVAL (operands[2]) & CALL_LONG) == 0"
- "@
- b %z0
- b%T0"
- [(set_attr "type" "branch")
- (set_attr "length" "4")])
-
-(define_insn "*sibcall_value_nonlocal_aix<mode>"
- [(set (match_operand 0 "" "")
- (call (mem:SI (match_operand:P 1 "call_operand" "s,c"))
- (match_operand 2 "" "g,g")))
- (use (match_operand:SI 3 "immediate_operand" "O,O"))
- (use (reg:SI LR_REGNO))
- (simple_return)]
- "DEFAULT_ABI == ABI_AIX
- && (INTVAL (operands[3]) & CALL_LONG) == 0"
- "@
- b %z1
- b%T1"
- [(set_attr "type" "branch")
- (set_attr "length" "4")])
-
(define_insn "*sibcall_nonlocal_sysv<mode>"
[(call (mem:SI (match_operand:P 0 "call_operand" "s,s,c,c"))
(match_operand 1 "" ""))
[(set_attr "type" "branch")
(set_attr "length" "4,8,4,8")])
-(define_expand "sibcall_value"
- [(parallel [(set (match_operand 0 "register_operand" "")
- (call (mem:SI (match_operand 1 "address_operand" ""))
- (match_operand 2 "" "")))
- (use (match_operand 3 "" ""))
- (use (reg:SI LR_REGNO))
- (simple_return)])]
- ""
- "
-{
-#if TARGET_MACHO
- if (MACHOPIC_INDIRECT)
- operands[1] = machopic_indirect_call_target (operands[1]);
-#endif
-
- gcc_assert (GET_CODE (operands[1]) == MEM);
- gcc_assert (GET_CODE (operands[2]) == CONST_INT);
-
- operands[1] = XEXP (operands[1], 0);
-}")
-
(define_insn "*sibcall_value_nonlocal_sysv<mode>"
[(set (match_operand 0 "" "")
(call (mem:SI (match_operand:P 1 "call_operand" "s,s,c,c"))
[(set_attr "type" "branch")
(set_attr "length" "4,8,4,8")])
+;; AIX ABI sibling call patterns.
+
+(define_insn "*sibcall_aix<mode>"
+ [(call (mem:SI (match_operand:P 0 "call_operand" "s,c"))
+ (match_operand 1 "" "g,g"))
+ (simple_return)]
+ "DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2"
+ "@
+ b %z0
+ b%T0"
+ [(set_attr "type" "branch")
+ (set_attr "length" "4")])
+
+(define_insn "*sibcall_value_aix<mode>"
+ [(set (match_operand 0 "" "")
+ (call (mem:SI (match_operand:P 1 "call_operand" "s,c"))
+ (match_operand 2 "" "g,g")))
+ (simple_return)]
+ "DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_ELFv2"
+ "@
+ b %z1
+ b%T1"
+ [(set_attr "type" "branch")
+ (set_attr "length" "4")])
+
(define_expand "sibcall_epilogue"
[(use (const_int 0))]
""
"mfcr %0"
[(set_attr "type" "mfcr")])
+(define_insn "*crsave"
+ [(match_parallel 0 "crsave_operation"
+ [(set (match_operand:SI 1 "memory_operand" "=m")
+ (match_operand:SI 2 "gpc_reg_operand" "r"))])]
+ ""
+ "stw %2,%1"
+ [(set_attr "type" "store")])
+
(define_insn "*stmw"
[(match_parallel 0 "stmw_operation"
[(set (match_operand:SI 1 "memory_operand" "=m")