/* Convert function calls to rtl insns, for GNU C compiler.
- Copyright (C) 1989-2018 Free Software Foundation, Inc.
+ Copyright (C) 1989-2021 Free Software Foundation, Inc.
This file is part of GCC.
#include "tree-ssanames.h"
#include "intl.h"
#include "stringpool.h"
+#include "hash-map.h"
+#include "hash-traits.h"
#include "attribs.h"
#include "builtins.h"
#include "gimple-fold.h"
+#include "attr-fnspec.h"
+#include "value-query.h"
+#include "tree-pretty-print.h"
/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
/* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
form for emit_group_move. */
rtx parallel_value;
- /* If value is passed in neither reg nor stack, this field holds a number
- of a special slot to be used. */
- rtx special_slot;
- /* For pointer bounds hold an index of parm bounds are bound to. -1 if
- there is no such pointer. */
- int pointer_arg;
- /* If pointer_arg refers a structure, then pointer_offset holds an offset
- of a pointer in this structure. */
- int pointer_offset;
/* If REG was promoted from the actual mode of the argument expression,
indicates whether the promotion is sign- or zero-extended. */
int unsignedp;
It is zero if this call doesn't want a structure value.
NEXT_ARG_REG is the rtx that results from executing
- targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
+ targetm.calls.function_arg (&args_so_far,
+ function_arg_info::end_marker ());
just after all the args have had their registers assigned.
This could be whatever you like, but normally it is the first
arg-register beyond those used for args in this call,
{
tree name_decl = DECL_NAME (fndecl);
- if (fndecl && name_decl
- && IDENTIFIER_LENGTH (name_decl) <= 11
- /* Exclude functions not at the file scope, or not `extern',
- since they are not the magic functions we would otherwise
- think they are.
- FIXME: this should be handled with attributes, not with this
- hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
- because you can declare fork() inside a function if you
- wish. */
- && (DECL_CONTEXT (fndecl) == NULL_TREE
- || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
- && TREE_PUBLIC (fndecl))
+ if (maybe_special_function_p (fndecl)
+ && IDENTIFIER_LENGTH (name_decl) <= 11)
{
const char *name = IDENTIFIER_POINTER (name_decl);
const char *tname = name;
return flags;
}
+/* Return fnspec for DECL. */
+
+static attr_fnspec
+decl_fnspec (tree fndecl)
+{
+ tree attr;
+ tree type = TREE_TYPE (fndecl);
+ if (type)
+ {
+ attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
+ if (attr)
+ {
+ return TREE_VALUE (TREE_VALUE (attr));
+ }
+ }
+ if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
+ return builtin_fnspec (fndecl);
+ return "";
+}
+
/* Similar to special_function_p; return a set of ERF_ flags for the
function FNDECL. */
static int
decl_return_flags (tree fndecl)
{
- tree attr;
- tree type = TREE_TYPE (fndecl);
- if (!type)
- return 0;
-
- attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
- if (!attr)
- return 0;
+ attr_fnspec fnspec = decl_fnspec (fndecl);
- attr = TREE_VALUE (TREE_VALUE (attr));
- if (!attr || TREE_STRING_LENGTH (attr) < 1)
- return 0;
+ unsigned int arg;
+ if (fnspec.returns_arg (&arg))
+ return ERF_RETURNS_ARG | arg;
- switch (TREE_STRING_POINTER (attr)[0])
- {
- case '1':
- case '2':
- case '3':
- case '4':
- return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
-
- case 'm':
- return ERF_NOALIAS;
-
- case '.':
- default:
- return 0;
- }
+ if (fnspec.returns_noalias_p ())
+ return ERF_NOALIAS;
+ return 0;
}
/* Return nonzero when FNDECL represents a call to setjmp. */
return false;
fndecl = gimple_call_fndecl (stmt);
- if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+ if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
switch (DECL_FUNCTION_CODE (fndecl))
{
CASE_BUILT_IN_ALLOCA:
return flags;
}
-/* Return true if TYPE should be passed by invisible reference. */
+/* Return true if ARG should be passed by invisible reference. */
bool
-pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
- tree type, bool named_arg)
+pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
{
- if (type)
+ if (tree type = arg.type)
{
/* If this type contains non-trivial constructors, then it is
forbidden for the middle-end to create any new copies. */
return true;
/* GCC post 3.4 passes *all* variable sized types by reference. */
- if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
return true;
/* If a record type should be passed the same as its first (and only)
member, use the type and mode of that member. */
if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
{
- type = TREE_TYPE (first_field (type));
- mode = TYPE_MODE (type);
+ arg.type = TREE_TYPE (first_field (type));
+ arg.mode = TYPE_MODE (arg.type);
}
}
- return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
- type, named_arg);
+ return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
+}
+
+/* Return true if TYPE should be passed by reference when passed to
+ the "..." arguments of a function. */
+
+bool
+pass_va_arg_by_reference (tree type)
+{
+ return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
}
-/* Return true if TYPE, which is passed by reference, should be callee
+/* Decide whether ARG, which occurs in the state described by CA,
+ should be passed by reference. Return true if so and update
+ ARG accordingly. */
+
+bool
+apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
+{
+ if (pass_by_reference (ca, arg))
+ {
+ arg.type = build_pointer_type (arg.type);
+ arg.mode = TYPE_MODE (arg.type);
+ arg.pass_by_reference = true;
+ return true;
+ }
+ return false;
+}
+
+/* Return true if ARG, which is passed by reference, should be callee
copied instead of caller copied. */
bool
-reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
- tree type, bool named_arg)
+reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
{
- if (type && TREE_ADDRESSABLE (type))
+ if (arg.type && TREE_ADDRESSABLE (arg.type))
return false;
- return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
- named_arg);
+ return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
}
/* If the value is a non-legitimate constant, force it into a
pseudo now. TLS symbols sometimes need a call to resolve. */
if (CONSTANT_P (args[i].value)
- && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
+ && (!targetm.legitimate_constant_p (args[i].mode, args[i].value)
+ || targetm.precompute_tls_p (args[i].mode, args[i].value)))
args[i].value = force_reg (args[i].mode, args[i].value);
/* If we're going to have to load the value by parts, pull the
}
}
-/* The limit set by -Walloc-larger-than=. */
-static GTY(()) tree alloc_object_size_limit;
-
-/* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
- setting if the option is specified, or to the maximum object size if it
- is not. Return the initialized value. */
-
-static tree
-alloc_max_size (void)
-{
- if (alloc_object_size_limit)
- return alloc_object_size_limit;
-
- alloc_object_size_limit = max_object_size ();
-
- if (!warn_alloc_size_limit)
- return alloc_object_size_limit;
-
- const char *optname = "-Walloc-size-larger-than=";
-
- char *end = NULL;
- errno = 0;
- unsigned HOST_WIDE_INT unit = 1;
- unsigned HOST_WIDE_INT limit
- = strtoull (warn_alloc_size_limit, &end, 10);
-
- /* If the value is too large to be represented use the maximum
- representable value that strtoull sets limit to (setting
- errno to ERANGE). */
-
- if (end && *end)
- {
- /* Numeric option arguments are at most INT_MAX. Make it
- possible to specify a larger value by accepting common
- suffixes. */
- if (!strcmp (end, "kB"))
- unit = 1000;
- else if (!strcasecmp (end, "KiB") || !strcmp (end, "KB"))
- unit = 1024;
- else if (!strcmp (end, "MB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000;
- else if (!strcasecmp (end, "MiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024;
- else if (!strcasecmp (end, "GB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000;
- else if (!strcasecmp (end, "GiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024;
- else if (!strcasecmp (end, "TB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000;
- else if (!strcasecmp (end, "TiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024;
- else if (!strcasecmp (end, "PB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000;
- else if (!strcasecmp (end, "PiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024;
- else if (!strcasecmp (end, "EB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000
- * 1000;
- else if (!strcasecmp (end, "EiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024
- * 1024;
- else
- {
- /* This could mean an unknown suffix or a bad prefix, like
- "+-1". */
- warning_at (UNKNOWN_LOCATION, 0,
- "invalid argument %qs to %qs",
- warn_alloc_size_limit, optname);
-
- /* Ignore the limit extracted by strtoull. */
- unit = 0;
- }
- }
-
- if (unit)
- {
- widest_int w = wi::mul (limit, unit);
- if (w < wi::to_widest (alloc_object_size_limit))
- alloc_object_size_limit
- = wide_int_to_tree (ptrdiff_type_node, w);
- else
- alloc_object_size_limit = build_all_ones_cst (size_type_node);
- }
-
-
- return alloc_object_size_limit;
-}
-
-/* Return true when EXP's range can be determined and set RANGE[] to it
- after adjusting it if necessary to make EXP a represents a valid size
- of object, or a valid size argument to an allocation function declared
- with attribute alloc_size (whose argument may be signed), or to a string
- manipulation function like memset. When ALLOW_ZERO is true, allow
- returning a range of [0, 0] for a size in an anti-range [1, N] where
- N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
- allocation functions like malloc but it is a valid argument to
- functions like memset. */
-
-bool
-get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
-{
- if (tree_fits_uhwi_p (exp))
- {
- /* EXP is a constant. */
- range[0] = range[1] = exp;
- return true;
- }
-
- tree exptype = TREE_TYPE (exp);
- bool integral = INTEGRAL_TYPE_P (exptype);
-
- wide_int min, max;
- enum value_range_type range_type;
-
- if (integral)
- range_type = determine_value_range (exp, &min, &max);
- else
- range_type = VR_VARYING;
-
- if (range_type == VR_VARYING)
- {
- if (integral)
- {
- /* Use the full range of the type of the expression when
- no value range information is available. */
- range[0] = TYPE_MIN_VALUE (exptype);
- range[1] = TYPE_MAX_VALUE (exptype);
- return true;
- }
-
- range[0] = NULL_TREE;
- range[1] = NULL_TREE;
- return false;
- }
-
- unsigned expprec = TYPE_PRECISION (exptype);
-
- bool signed_p = !TYPE_UNSIGNED (exptype);
-
- if (range_type == VR_ANTI_RANGE)
- {
- if (signed_p)
- {
- if (wi::les_p (max, 0))
- {
- /* EXP is not in a strictly negative range. That means
- it must be in some (not necessarily strictly) positive
- range which includes zero. Since in signed to unsigned
- conversions negative values end up converted to large
- positive values, and otherwise they are not valid sizes,
- the resulting range is in both cases [0, TYPE_MAX]. */
- min = wi::zero (expprec);
- max = wi::to_wide (TYPE_MAX_VALUE (exptype));
- }
- else if (wi::les_p (min - 1, 0))
- {
- /* EXP is not in a negative-positive range. That means EXP
- is either negative, or greater than max. Since negative
- sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
- min = max + 1;
- max = wi::to_wide (TYPE_MAX_VALUE (exptype));
- }
- else
- {
- max = min - 1;
- min = wi::zero (expprec);
- }
- }
- else if (wi::eq_p (0, min - 1))
- {
- /* EXP is unsigned and not in the range [1, MAX]. That means
- it's either zero or greater than MAX. Even though 0 would
- normally be detected by -Walloc-zero, unless ALLOW_ZERO
- is true, set the range to [MAX, TYPE_MAX] so that when MAX
- is greater than the limit the whole range is diagnosed. */
- if (allow_zero)
- min = max = wi::zero (expprec);
- else
- {
- min = max + 1;
- max = wi::to_wide (TYPE_MAX_VALUE (exptype));
- }
- }
- else
- {
- max = min - 1;
- min = wi::zero (expprec);
- }
- }
-
- range[0] = wide_int_to_tree (exptype, min);
- range[1] = wide_int_to_tree (exptype, max);
-
- return true;
-}
-
-/* Diagnose a call EXP to function FN decorated with attribute alloc_size
- whose argument numbers given by IDX with values given by ARGS exceed
- the maximum object size or cause an unsigned oveflow (wrapping) when
- multiplied. When ARGS[0] is null the function does nothing. ARGS[1]
- may be null for functions like malloc, and non-null for those like
- calloc that are decorated with a two-argument attribute alloc_size. */
-
-void
-maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
-{
- /* The range each of the (up to) two arguments is known to be in. */
- tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
-
- /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
- tree maxobjsize = alloc_max_size ();
-
- location_t loc = EXPR_LOCATION (exp);
-
- bool warned = false;
-
- /* Validate each argument individually. */
- for (unsigned i = 0; i != 2 && args[i]; ++i)
- {
- if (TREE_CODE (args[i]) == INTEGER_CST)
- {
- argrange[i][0] = args[i];
- argrange[i][1] = args[i];
-
- if (tree_int_cst_lt (args[i], integer_zero_node))
- {
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kargument %i value %qE is negative",
- exp, idx[i] + 1, args[i]);
- }
- else if (integer_zerop (args[i]))
- {
- /* Avoid issuing -Walloc-zero for allocation functions other
- than __builtin_alloca that are declared with attribute
- returns_nonnull because there's no portability risk. This
- avoids warning for such calls to libiberty's xmalloc and
- friends.
- Also avoid issuing the warning for calls to function named
- "alloca". */
- if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA
- && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
- || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA
- && !lookup_attribute ("returns_nonnull",
- TYPE_ATTRIBUTES (TREE_TYPE (fn)))))
- warned = warning_at (loc, OPT_Walloc_zero,
- "%Kargument %i value is zero",
- exp, idx[i] + 1);
- }
- else if (tree_int_cst_lt (maxobjsize, args[i]))
- {
- /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
- mode and with -fno-exceptions as a way to indicate array
- size overflow. There's no good way to detect C++98 here
- so avoid diagnosing these calls for all C++ modes. */
- if (i == 0
- && !args[1]
- && lang_GNU_CXX ()
- && DECL_IS_OPERATOR_NEW (fn)
- && integer_all_onesp (args[i]))
- continue;
-
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kargument %i value %qE exceeds "
- "maximum object size %E",
- exp, idx[i] + 1, args[i], maxobjsize);
- }
- }
- else if (TREE_CODE (args[i]) == SSA_NAME
- && get_size_range (args[i], argrange[i]))
- {
- /* Verify that the argument's range is not negative (including
- upper bound of zero). */
- if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
- && tree_int_cst_le (argrange[i][1], integer_zero_node))
- {
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kargument %i range [%E, %E] is negative",
- exp, idx[i] + 1,
- argrange[i][0], argrange[i][1]);
- }
- else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
- {
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kargument %i range [%E, %E] exceeds "
- "maximum object size %E",
- exp, idx[i] + 1,
- argrange[i][0], argrange[i][1],
- maxobjsize);
- }
- }
- }
-
- if (!argrange[0])
- return;
-
- /* For a two-argument alloc_size, validate the product of the two
- arguments if both of their values or ranges are known. */
- if (!warned && tree_fits_uhwi_p (argrange[0][0])
- && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
- && !integer_onep (argrange[0][0])
- && !integer_onep (argrange[1][0]))
- {
- /* Check for overflow in the product of a function decorated with
- attribute alloc_size (X, Y). */
- unsigned szprec = TYPE_PRECISION (size_type_node);
- wide_int x = wi::to_wide (argrange[0][0], szprec);
- wide_int y = wi::to_wide (argrange[1][0], szprec);
-
- bool vflow;
- wide_int prod = wi::umul (x, y, &vflow);
-
- if (vflow)
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kproduct %<%E * %E%> of arguments %i and %i "
- "exceeds %<SIZE_MAX%>",
- exp, argrange[0][0], argrange[1][0],
- idx[0] + 1, idx[1] + 1);
- else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kproduct %<%E * %E%> of arguments %i and %i "
- "exceeds maximum object size %E",
- exp, argrange[0][0], argrange[1][0],
- idx[0] + 1, idx[1] + 1,
- maxobjsize);
-
- if (warned)
- {
- /* Print the full range of each of the two arguments to make
- it clear when it is, in fact, in a range and not constant. */
- if (argrange[0][0] != argrange [0][1])
- inform (loc, "argument %i in the range [%E, %E]",
- idx[0] + 1, argrange[0][0], argrange[0][1]);
- if (argrange[1][0] != argrange [1][1])
- inform (loc, "argument %i in the range [%E, %E]",
- idx[1] + 1, argrange[1][0], argrange[1][1]);
- }
- }
-
- if (warned)
- {
- location_t fnloc = DECL_SOURCE_LOCATION (fn);
-
- if (DECL_IS_BUILTIN (fn))
- inform (loc,
- "in a call to built-in allocation function %qD", fn);
- else
- inform (fnloc,
- "in a call to allocation function %qD declared here", fn);
- }
-}
-
-/* If EXPR refers to a character array or pointer declared attribute
- nonstring return a decl for that array or pointer and set *REF to
- the referenced enclosing object or pointer. Otherwise returns
- null. */
-
-tree
-get_attr_nonstring_decl (tree expr, tree *ref)
-{
- tree decl = expr;
- if (TREE_CODE (decl) == SSA_NAME)
- {
- gimple *def = SSA_NAME_DEF_STMT (decl);
-
- if (is_gimple_assign (def))
- {
- tree_code code = gimple_assign_rhs_code (def);
- if (code == ADDR_EXPR
- || code == COMPONENT_REF
- || code == VAR_DECL)
- decl = gimple_assign_rhs1 (def);
- }
- else if (tree var = SSA_NAME_VAR (decl))
- decl = var;
- }
-
- if (TREE_CODE (decl) == ADDR_EXPR)
- decl = TREE_OPERAND (decl, 0);
-
- if (ref)
- *ref = decl;
-
- if (TREE_CODE (decl) == ARRAY_REF)
- decl = TREE_OPERAND (decl, 0);
- else if (TREE_CODE (decl) == COMPONENT_REF)
- decl = TREE_OPERAND (decl, 1);
- else if (TREE_CODE (decl) == MEM_REF)
- return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
-
- if (DECL_P (decl)
- && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
- return decl;
-
- return NULL_TREE;
-}
-
-/* Warn about passing a non-string array/pointer to a function that
- expects a nul-terminated string argument. */
-
-void
-maybe_warn_nonstring_arg (tree fndecl, tree exp)
-{
- if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
- return;
-
- unsigned nargs = call_expr_nargs (exp);
-
- /* The bound argument to a bounded string function like strncpy. */
- tree bound = NULL_TREE;
-
- /* The range of lengths of a string argument to one of the comparison
- functions. If the length is less than the bound it is used instead. */
- tree lenrng[2] = { NULL_TREE, NULL_TREE };
-
- /* It's safe to call "bounded" string functions with a non-string
- argument since the functions provide an explicit bound for this
- purpose. */
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- case BUILT_IN_STRCMP:
- case BUILT_IN_STRNCMP:
- case BUILT_IN_STRNCASECMP:
- {
- /* For these, if one argument refers to one or more of a set
- of string constants or arrays of known size, determine
- the range of their known or possible lengths and use it
- conservatively as the bound for the unbounded function,
- and to adjust the range of the bound of the bounded ones. */
- for (unsigned argno = 0; argno < nargs && !*lenrng; argno ++)
- {
- tree arg = CALL_EXPR_ARG (exp, argno);
- if (!get_attr_nonstring_decl (arg))
- get_range_strlen (arg, lenrng);
- }
- }
- /* Fall through. */
-
- case BUILT_IN_STPNCPY:
- case BUILT_IN_STRNCPY:
- {
- unsigned argno = 2;
- if (argno < nargs)
- bound = CALL_EXPR_ARG (exp, argno);
- break;
- }
-
- case BUILT_IN_STRNDUP:
- {
- unsigned argno = 1;
- if (argno < nargs)
- bound = CALL_EXPR_ARG (exp, argno);
- break;
- }
-
- default:
- break;
- }
-
- /* Determine the range of the bound argument (if specified). */
- tree bndrng[2] = { NULL_TREE, NULL_TREE };
- if (bound)
- {
- STRIP_NOPS (bound);
- get_size_range (bound, bndrng);
- }
-
- if (*lenrng)
- {
- /* Add one for the nul. */
- lenrng[0] = const_binop (PLUS_EXPR, TREE_TYPE (lenrng[0]),
- lenrng[0], size_one_node);
- lenrng[1] = const_binop (PLUS_EXPR, TREE_TYPE (lenrng[1]),
- lenrng[1], size_one_node);
-
- if (!bndrng[0])
- {
- /* Conservatively use the upper bound of the lengths for
- both the lower and the upper bound of the operation. */
- bndrng[0] = lenrng[1];
- bndrng[1] = lenrng[1];
- bound = void_type_node;
- }
- else
- {
- /* Replace the bound on the operation with the upper bound
- of the length of the string if the latter is smaller. */
- if (tree_int_cst_lt (lenrng[1], bndrng[0]))
- bndrng[0] = lenrng[1];
- else if (tree_int_cst_lt (lenrng[1], bndrng[1]))
- bndrng[1] = lenrng[1];
- }
- }
-
- /* Iterate over the built-in function's formal arguments and check
- each const char* against the actual argument. If the actual
- argument is declared attribute non-string issue a warning unless
- the argument's maximum length is bounded. */
- function_args_iterator it;
- function_args_iter_init (&it, TREE_TYPE (fndecl));
-
- for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
- {
- /* Avoid iterating past the declared argument in a call
- to function declared without a prototype. */
- if (argno >= nargs)
- break;
-
- tree argtype = function_args_iter_cond (&it);
- if (!argtype)
- break;
-
- if (TREE_CODE (argtype) != POINTER_TYPE)
- continue;
-
- argtype = TREE_TYPE (argtype);
-
- if (TREE_CODE (argtype) != INTEGER_TYPE
- || !TYPE_READONLY (argtype))
- continue;
-
- argtype = TYPE_MAIN_VARIANT (argtype);
- if (argtype != char_type_node)
- continue;
-
- tree callarg = CALL_EXPR_ARG (exp, argno);
- if (TREE_CODE (callarg) == ADDR_EXPR)
- callarg = TREE_OPERAND (callarg, 0);
-
- /* See if the destination is declared with attribute "nonstring". */
- tree decl = get_attr_nonstring_decl (callarg);
- if (!decl)
- continue;
-
- tree type = TREE_TYPE (decl);
-
- /* The maximum number of array elements accessed. */
- offset_int wibnd = 0;
- if (bndrng[0])
- wibnd = wi::to_offset (bndrng[0]);
-
- /* Size of the array. */
- offset_int asize = wibnd;
-
- /* Determine the array size. For arrays of unknown bound and
- pointers reset BOUND to trigger the appropriate warning. */
- if (TREE_CODE (type) == ARRAY_TYPE)
- {
- if (tree arrbnd = TYPE_DOMAIN (type))
- {
- if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
- asize = wi::to_offset (arrbnd) + 1;
- }
- else if (bound == void_type_node)
- bound = NULL_TREE;
- }
- else if (bound == void_type_node)
- bound = NULL_TREE;
-
- location_t loc = EXPR_LOCATION (exp);
-
- bool warned = false;
-
- if (wi::ltu_p (asize, wibnd))
- warned = warning_at (loc, OPT_Wstringop_overflow_,
- "%qD argument %i declared attribute %<nonstring%> "
- "is smaller than the specified bound %E",
- fndecl, argno + 1, bndrng[0]);
- else if (!bound)
- warned = warning_at (loc, OPT_Wstringop_overflow_,
- "%qD argument %i declared attribute %<nonstring%>",
- fndecl, argno + 1);
-
- if (warned)
- inform (DECL_SOURCE_LOCATION (decl),
- "argument %qD declared here", decl);
- }
-}
-
/* Issue an error if CALL_EXPR was flagged as requiring
tall-call optimization. */
-static void
+void
maybe_complain_about_tail_call (tree call_expr, const char *reason)
{
gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
bitmap_obstack_release (NULL);
- /* Extract attribute alloc_size and if set, store the indices of
- the corresponding arguments in ALLOC_IDX, and then the actual
- argument(s) at those indices in ALLOC_ARGS. */
- int alloc_idx[2] = { -1, -1 };
- if (tree alloc_size
- = (fndecl ? lookup_attribute ("alloc_size",
- TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))
- : NULL_TREE))
- {
- tree args = TREE_VALUE (alloc_size);
- alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
- if (TREE_CHAIN (args))
- alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
- }
-
- /* Array for up to the two attribute alloc_size arguments. */
- tree alloc_args[] = { NULL_TREE, NULL_TREE };
-
/* I counts args in order (to be) pushed; ARGPOS counts in order written. */
for (argpos = 0; argpos < num_actuals; i--, argpos++)
{
tree type = TREE_TYPE (args[i].tree_value);
int unsignedp;
- machine_mode mode;
/* Replace erroneous argument with constant zero. */
if (type == error_mark_node || !COMPLETE_TYPE_P (type))
/* If TYPE is a transparent union or record, pass things the way
we would pass the first field of the union or record. We have
already verified that the modes are the same. */
- if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
- && TYPE_TRANSPARENT_AGGR (type))
+ if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
type = TREE_TYPE (first_field (type));
/* Decide where to pass this arg.
with those made by function.c. */
/* See if this argument should be passed by invisible reference. */
- if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
- type, argpos < n_named_args))
+ function_arg_info arg (type, argpos < n_named_args);
+ if (pass_by_reference (args_so_far_pnt, arg))
{
- bool callee_copies;
- tree base = NULL_TREE;
-
- callee_copies
- = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
- type, argpos < n_named_args);
-
- /* If we're compiling a thunk, pass through invisible references
- instead of making a copy. */
- if (call_from_thunk_p
- || (callee_copies
- && !TREE_ADDRESSABLE (type)
- && (base = get_base_address (args[i].tree_value))
- && TREE_CODE (base) != SSA_NAME
- && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
+ const bool callee_copies
+ = reference_callee_copied (args_so_far_pnt, arg);
+ tree base;
+
+ /* If we're compiling a thunk, pass directly the address of an object
+ already in memory, instead of making a copy. Likewise if we want
+ to make the copy in the callee instead of the caller. */
+ if ((call_from_thunk_p || callee_copies)
+ && TREE_CODE (args[i].tree_value) != WITH_SIZE_EXPR
+ && ((base = get_base_address (args[i].tree_value)), true)
+ && TREE_CODE (base) != SSA_NAME
+ && (!DECL_P (base) || MEM_P (DECL_RTL (base))))
{
/* We may have turned the parameter value into an SSA name.
Go back to the original parameter so we can take the
"argument must be passed"
" by copying");
}
+ arg.pass_by_reference = true;
}
unsignedp = TYPE_UNSIGNED (type);
- mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
- fndecl ? TREE_TYPE (fndecl) : fntype, 0);
+ arg.type = type;
+ arg.mode
+ = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
+ fndecl ? TREE_TYPE (fndecl) : fntype, 0);
args[i].unsignedp = unsignedp;
- args[i].mode = mode;
+ args[i].mode = arg.mode;
targetm.calls.warn_parameter_passing_abi (args_so_far, type);
- args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
- argpos < n_named_args);
+ args[i].reg = targetm.calls.function_arg (args_so_far, arg);
if (args[i].reg && CONST_INT_P (args[i].reg))
- {
- args[i].special_slot = args[i].reg;
- args[i].reg = NULL;
- }
+ args[i].reg = NULL;
/* If this is a sibling call and the machine has register windows, the
register window has to be unwinded before calling the routine, so
arguments have to go into the incoming registers. */
if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
args[i].tail_call_reg
- = targetm.calls.function_incoming_arg (args_so_far, mode, type,
- argpos < n_named_args);
+ = targetm.calls.function_incoming_arg (args_so_far, arg);
else
args[i].tail_call_reg = args[i].reg;
if (args[i].reg)
- args[i].partial
- = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
- argpos < n_named_args);
+ args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
- args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
+ args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
/* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
it means that we are to pass this arg in the register(s) designated
if (args[i].reg == 0 || args[i].partial != 0
|| reg_parm_stack_space > 0
|| args[i].pass_on_stack)
- locate_and_pad_parm (mode, type,
+ locate_and_pad_parm (arg.mode, type,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
1,
#else
/* The argument is passed entirely in registers. See at which
end it should be padded. */
args[i].locate.where_pad =
- BLOCK_REG_PADDING (mode, type,
+ BLOCK_REG_PADDING (arg.mode, type,
int_size_in_bytes (type) <= UNITS_PER_WORD);
#endif
/* Increment ARGS_SO_FAR, which has info about which arg-registers
have been used, etc. */
- targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
- type, argpos < n_named_args);
-
- /* Store argument values for functions decorated with attribute
- alloc_size. */
- if (argpos == alloc_idx[0])
- alloc_args[0] = args[i].tree_value;
- else if (argpos == alloc_idx[1])
- alloc_args[1] = args[i].tree_value;
- }
-
- if (alloc_args[0])
- {
- /* Check the arguments of functions decorated with attribute
- alloc_size. */
- maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
+ /* ??? Traditionally we've passed TYPE_MODE here, instead of the
+ promoted_mode used for function_arg above. However, the
+ corresponding handling of incoming arguments in function.c
+ does pass the promoted mode. */
+ arg.mode = TYPE_MODE (type);
+ targetm.calls.function_arg_advance (args_so_far, arg);
}
-
- /* Detect passing non-string arguments to functions expecting
- nul-terminated strings. */
- maybe_warn_nonstring_arg (fndecl, exp);
}
/* Update ARGS_SIZE to contain the total size for the argument block.
{
if (idx >= internal_arg_pointer_exp_state.cache.length ())
internal_arg_pointer_exp_state.cache
- .safe_grow_cleared (idx + 1);
+ .safe_grow_cleared (idx + 1, true);
internal_arg_pointer_exp_state.cache[idx] = val;
}
}
poly_int64 size = 0;
HOST_WIDE_INT const_size = 0;
rtx_insn *before_arg = get_last_insn ();
+ tree type = TREE_TYPE (args[i].tree_value);
+ if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
+ type = TREE_TYPE (first_field (type));
/* Set non-negative if we must move a word at a time, even if
just one word (e.g, partial == 4 && mode == DFmode). Set
to -1 if we just use a normal move insn. This value can be
gcc_assert (partial % UNITS_PER_WORD == 0);
nregs = partial / UNITS_PER_WORD;
}
- else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
+ else if (TYPE_MODE (type) == BLKmode)
{
/* Variable-sized parameters should be described by a
PARALLEL instead. */
- const_size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
+ const_size = int_size_in_bytes (type);
gcc_assert (const_size >= 0);
nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
size = const_size;
if (GET_CODE (reg) == PARALLEL)
use_group_regs (call_fusage, reg);
else if (nregs == -1)
- use_reg_mode (call_fusage, reg,
- TYPE_MODE (TREE_TYPE (args[i].tree_value)));
+ use_reg_mode (call_fusage, reg, TYPE_MODE (type));
else if (nregs > 0)
use_regs (call_fusage, REGNO (reg), nregs);
}
can_implement_as_sibling_call_p (tree exp,
rtx structure_value_addr,
tree funtype,
- int reg_parm_stack_space ATTRIBUTE_UNUSED,
tree fndecl,
int flags,
tree addr,
return false;
}
-#ifdef REG_PARM_STACK_SPACE
- /* If outgoing reg parm stack space changes, we can not do sibcall. */
- if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
- != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
- || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
- {
- maybe_complain_about_tail_call (exp,
- "inconsistent size of stack space"
- " allocated for arguments which are"
- " passed in registers");
- return false;
- }
-#endif
-
/* Check whether the target is able to optimize the call
into a sibcall. */
if (!targetm.function_ok_for_sibcall (fndecl, exp))
return true;
}
+/* Update stack alignment when the parameter is passed in the stack
+ since the outgoing parameter requires extra alignment on the calling
+ function side. */
+
+static void
+update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
+{
+ if (crtl->stack_alignment_needed < locate->boundary)
+ crtl->stack_alignment_needed = locate->boundary;
+ if (crtl->preferred_stack_boundary < locate->boundary)
+ crtl->preferred_stack_boundary = locate->boundary;
+}
+
/* Generate all the code for a CALL_EXPR exp
and return an rtx for its value.
Store the value in TARGET (specified as an rtx) if convenient.
So the entire argument block must then be preallocated (i.e., we
ignore PUSH_ROUNDING in that case). */
- int must_preallocate = !PUSH_ARGS;
+ int must_preallocate = !targetm.calls.push_argument (0);
/* Size of the stack reserved for parameter registers. */
int reg_parm_stack_space = 0;
side-effects. */
if ((flags & (ECF_CONST | ECF_PURE))
&& (!(flags & ECF_LOOPING_CONST_OR_PURE))
+ && (flags & ECF_NOTHROW)
&& (ignore || target == const0_rtx
|| TYPE_MODE (rettype) == VOIDmode))
{
#endif
if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
- && reg_parm_stack_space > 0 && PUSH_ARGS)
+ && reg_parm_stack_space > 0 && targetm.calls.push_argument (0))
must_preallocate = 1;
/* Set up a place to return a structure. */
pushed these optimizations into -O2. Don't try if we're already
expanding a call, as that means we're an argument. Don't try if
there's cleanups, as we know there's code to follow the call. */
-
if (currently_expanding_call++ != 0
- || !flag_optimize_sibling_calls
+ || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
|| args_size.var
|| dbg_cnt (tail_call) == false)
try_tail_call = 0;
+ /* Workaround buggy C/C++ wrappers around Fortran routines with
+ character(len=constant) arguments if the hidden string length arguments
+ are passed on the stack; if the callers forget to pass those arguments,
+ attempting to tail call in such routines leads to stack corruption.
+ Avoid tail calls in functions where at least one such hidden string
+ length argument is passed (partially or fully) on the stack in the
+ caller and the callee needs to pass any arguments on the stack.
+ See PR90329. */
+ if (try_tail_call && maybe_ne (args_size.constant, 0))
+ for (tree arg = DECL_ARGUMENTS (current_function_decl);
+ arg; arg = DECL_CHAIN (arg))
+ if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
+ {
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
+ if (MEM_P (*iter))
+ {
+ try_tail_call = 0;
+ break;
+ }
+ }
+
/* If the user has marked the function as requiring tail-call
optimization, attempt it. */
if (must_tail_call)
try_tail_call = can_implement_as_sibling_call_p (exp,
structure_value_addr,
funtype,
- reg_parm_stack_space,
fndecl,
flags, addr, args_size);
/* Ensure current function's preferred stack boundary is at least
what we need. Stack alignment may also increase preferred stack
boundary. */
+ for (i = 0; i < num_actuals; i++)
+ if (reg_parm_stack_space > 0
+ || args[i].reg == 0
+ || args[i].partial != 0
+ || args[i].pass_on_stack)
+ update_stack_alignment_for_call (&args[i].locate);
if (crtl->preferred_stack_boundary < preferred_stack_boundary)
crtl->preferred_stack_boundary = preferred_stack_boundary;
else
preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
+ if (flag_callgraph_info)
+ record_final_call (fndecl, EXPR_LOCATION (exp));
+
/* We want to make two insn chains; one for a sibling call, the other
for a normal call. We will select one of the two chains after
initial RTL generation is complete. */
/* Set up next argument register. For sibling calls on machines
with register windows this should be the incoming register. */
if (pass == 0)
- next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
- VOIDmode,
- void_type_node,
- true);
+ next_arg_reg = targetm.calls.function_incoming_arg
+ (args_so_far, function_arg_info::end_marker ());
else
- next_arg_reg = targetm.calls.function_arg (args_so_far,
- VOIDmode, void_type_node,
- true);
+ next_arg_reg = targetm.calls.function_arg
+ (args_so_far, function_arg_info::end_marker ());
if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
{
emit_move_insn (temp, valreg);
- /* The return value from a malloc-like function can not alias
+ /* The return value from a malloc-like function cannot alias
anything else. */
last = get_last_insn ();
add_reg_note (last, REG_NOALIAS, temp);
argvec[count].mode = Pmode;
argvec[count].partial = 0;
- argvec[count].reg = targetm.calls.function_arg (args_so_far,
- Pmode, NULL_TREE, true);
- gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
- NULL_TREE, 1) == 0);
+ function_arg_info ptr_arg (Pmode, /*named=*/true);
+ argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
+ gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
locate_and_pad_parm (Pmode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
|| reg_parm_stack_space > 0)
args_size.constant += argvec[count].locate.size.constant;
- targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
+ targetm.calls.function_arg_advance (args_so_far, ptr_arg);
count++;
}
for (unsigned int i = 0; count < nargs; i++, count++)
{
rtx val = args[i].first;
- machine_mode mode = args[i].second;
+ function_arg_info arg (args[i].second, /*named=*/true);
int unsigned_p = 0;
/* We cannot convert the arg value to the mode the library wants here;
must do it earlier where we know the signedness of the arg. */
- gcc_assert (mode != BLKmode
- && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
+ gcc_assert (arg.mode != BLKmode
+ && (GET_MODE (val) == arg.mode
+ || GET_MODE (val) == VOIDmode));
/* Make sure it is a reasonable operand for a move or push insn. */
if (!REG_P (val) && !MEM_P (val)
- && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
+ && !(CONSTANT_P (val)
+ && targetm.legitimate_constant_p (arg.mode, val)))
val = force_operand (val, NULL_RTX);
- if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
+ if (pass_by_reference (&args_so_far_v, arg))
{
rtx slot;
- int must_copy
- = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
+ int must_copy = !reference_callee_copied (&args_so_far_v, arg);
/* If this was a CONST function, it is now PURE since it now
reads memory. */
}
else
{
- slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
+ slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
1, 1);
emit_move_insn (slot, val);
}
slot),
call_fusage);
- mode = Pmode;
+ arg.mode = Pmode;
+ arg.pass_by_reference = true;
val = force_operand (XEXP (slot, 0), NULL_RTX);
}
- mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
- argvec[count].mode = mode;
- argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
- argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
- NULL_TREE, true);
+ arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
+ NULL_TREE, 0);
+ argvec[count].mode = arg.mode;
+ argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
+ unsigned_p);
+ argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
argvec[count].partial
- = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
+ = targetm.calls.arg_partial_bytes (args_so_far, arg);
if (argvec[count].reg == 0
|| argvec[count].partial != 0
|| reg_parm_stack_space > 0)
{
- locate_and_pad_parm (mode, NULL_TREE,
+ locate_and_pad_parm (arg.mode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
1,
#else
/* The argument is passed entirely in registers. See at which
end it should be padded. */
argvec[count].locate.where_pad =
- BLOCK_REG_PADDING (mode, NULL_TREE,
- known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD));
+ BLOCK_REG_PADDING (arg.mode, NULL_TREE,
+ known_le (GET_MODE_SIZE (arg.mode),
+ UNITS_PER_WORD));
#endif
- targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
+ targetm.calls.function_arg_advance (args_so_far, arg);
}
+ for (int i = 0; i < nargs; i++)
+ if (reg_parm_stack_space > 0
+ || argvec[i].reg == 0
+ || argvec[i].partial != 0)
+ update_stack_alignment_for_call (&argvec[i].locate);
+
/* If this machine requires an external definition for library
functions, write one out. */
assemble_external_libcall (fun);
}
else
{
- if (!PUSH_ARGS)
+ if (!targetm.calls.push_argument (0))
argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
}
before_call = get_last_insn ();
+ if (flag_callgraph_info)
+ record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
+
/* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
will set inhibit_defer_pop to that value. */
/* The return type is needed to decide how many bytes the function pops.
original_args_size.constant, args_size.constant,
struct_value_size,
targetm.calls.function_arg (args_so_far,
- VOIDmode, void_type_node, true),
+ function_arg_info::end_marker ()),
valreg,
old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
return sibcall_failure;
}
-/* Nonzero if we do not know how to pass TYPE solely in registers. */
+/* Nonzero if we do not know how to pass ARG solely in registers. */
bool
-must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
- const_tree type)
+must_pass_in_stack_var_size (const function_arg_info &arg)
{
- if (!type)
+ if (!arg.type)
return false;
/* If the type has variable size... */
- if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
return true;
/* If the type is marked as addressable (it is required
to be constructed into the stack)... */
- if (TREE_ADDRESSABLE (type))
+ if (TREE_ADDRESSABLE (arg.type))
return true;
return false;
/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
bool
-must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
+must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
{
- if (!type)
+ if (!arg.type)
return false;
/* If the type has variable size... */
- if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
return true;
/* If the type is marked as addressable (it is required
to be constructed into the stack)... */
- if (TREE_ADDRESSABLE (type))
+ if (TREE_ADDRESSABLE (arg.type))
return true;
- if (TYPE_EMPTY_P (type))
+ if (TYPE_EMPTY_P (arg.type))
return false;
/* If the padding and mode of the type is such that a copy into
a register would put it into the wrong part of the register. */
- if (mode == BLKmode
- && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
- && (targetm.calls.function_arg_padding (mode, type)
+ if (arg.mode == BLKmode
+ && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
+ && (targetm.calls.function_arg_padding (arg.mode, arg.type)
== (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
return true;
return false;
}
-/* Tell the garbage collector about GTY markers in this source file. */
-#include "gt-calls.h"
+/* Return true if TYPE must be passed on the stack when passed to
+ the "..." arguments of a function. */
+
+bool
+must_pass_va_arg_in_stack (tree type)
+{
+ function_arg_info arg (type, /*named=*/false);
+ return targetm.calls.must_pass_in_stack (arg);
+}
+
+/* Return true if FIELD is the C++17 empty base field that should
+ be ignored for ABI calling convention decisions in order to
+ maintain ABI compatibility between C++14 and earlier, which doesn't
+ add this FIELD to classes with empty bases, and C++17 and later
+ which does. */
+
+bool
+cxx17_empty_base_field_p (const_tree field)
+{
+ return (DECL_FIELD_ABI_IGNORED (field)
+ && DECL_ARTIFICIAL (field)
+ && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
+ && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
+}