/* Convert function calls to rtl insns, for GNU C compiler.
- Copyright (C) 1989-2018 Free Software Foundation, Inc.
+ Copyright (C) 1989-2021 Free Software Foundation, Inc.
This file is part of GCC.
#include "except.h"
#include "dbgcnt.h"
#include "rtl-iter.h"
-#include "tree-chkp.h"
#include "tree-vrp.h"
#include "tree-ssanames.h"
-#include "rtl-chkp.h"
#include "intl.h"
#include "stringpool.h"
+#include "hash-map.h"
+#include "hash-traits.h"
#include "attribs.h"
#include "builtins.h"
+#include "gimple-fold.h"
+#include "attr-fnspec.h"
+#include "value-query.h"
+#include "tree-pretty-print.h"
/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
/* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
form for emit_group_move. */
rtx parallel_value;
- /* If value is passed in neither reg nor stack, this field holds a number
- of a special slot to be used. */
- rtx special_slot;
- /* For pointer bounds hold an index of parm bounds are bound to. -1 if
- there is no such pointer. */
- int pointer_arg;
- /* If pointer_arg refers a structure, then pointer_offset holds an offset
- of a pointer in this structure. */
- int pointer_offset;
/* If REG was promoted from the actual mode of the argument expression,
indicates whether the promotion is sign- or zero-extended. */
int unsignedp;
static int stack_arg_under_construction;
static void precompute_register_parameters (int, struct arg_data *, int *);
-static void store_bounds (struct arg_data *, struct arg_data *);
static int store_one_arg (struct arg_data *, rtx, int, int, int);
static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
static int finalize_must_preallocate (int, int, struct arg_data *,
It is zero if this call doesn't want a structure value.
NEXT_ARG_REG is the rtx that results from executing
- targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
+ targetm.calls.function_arg (&args_so_far,
+ function_arg_info::end_marker ());
just after all the args have had their registers assigned.
This could be whatever you like, but normally it is the first
arg-register beyond those used for args in this call,
&& MEM_EXPR (funmem) != NULL_TREE)
set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
- /* Mark instrumented calls. */
- if (call && fntree)
- CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
-
/* Put the register usage information there. */
add_function_usage_to (call_insn, call_fusage);
{
tree name_decl = DECL_NAME (fndecl);
- /* For instrumentation clones we want to derive flags
- from the original name. */
- if (cgraph_node::get (fndecl)
- && cgraph_node::get (fndecl)->instrumentation_clone)
- name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
-
- if (fndecl && name_decl
- && IDENTIFIER_LENGTH (name_decl) <= 11
- /* Exclude functions not at the file scope, or not `extern',
- since they are not the magic functions we would otherwise
- think they are.
- FIXME: this should be handled with attributes, not with this
- hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
- because you can declare fork() inside a function if you
- wish. */
- && (DECL_CONTEXT (fndecl) == NULL_TREE
- || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
- && TREE_PUBLIC (fndecl))
+ if (maybe_special_function_p (fndecl)
+ && IDENTIFIER_LENGTH (name_decl) <= 11)
{
const char *name = IDENTIFIER_POINTER (name_decl);
const char *tname = name;
return flags;
}
+/* Return fnspec for DECL. */
+
+static attr_fnspec
+decl_fnspec (tree fndecl)
+{
+ tree attr;
+ tree type = TREE_TYPE (fndecl);
+ if (type)
+ {
+ attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
+ if (attr)
+ {
+ return TREE_VALUE (TREE_VALUE (attr));
+ }
+ }
+ if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
+ return builtin_fnspec (fndecl);
+ return "";
+}
+
/* Similar to special_function_p; return a set of ERF_ flags for the
function FNDECL. */
static int
decl_return_flags (tree fndecl)
{
- tree attr;
- tree type = TREE_TYPE (fndecl);
- if (!type)
- return 0;
+ attr_fnspec fnspec = decl_fnspec (fndecl);
- attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
- if (!attr)
- return 0;
-
- attr = TREE_VALUE (TREE_VALUE (attr));
- if (!attr || TREE_STRING_LENGTH (attr) < 1)
- return 0;
+ unsigned int arg;
+ if (fnspec.returns_arg (&arg))
+ return ERF_RETURNS_ARG | arg;
- switch (TREE_STRING_POINTER (attr)[0])
- {
- case '1':
- case '2':
- case '3':
- case '4':
- return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
-
- case 'm':
- return ERF_NOALIAS;
-
- case '.':
- default:
- return 0;
- }
+ if (fnspec.returns_noalias_p ())
+ return ERF_NOALIAS;
+ return 0;
}
/* Return nonzero when FNDECL represents a call to setjmp. */
return false;
fndecl = gimple_call_fndecl (stmt);
- if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+ if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
switch (DECL_FUNCTION_CODE (fndecl))
{
CASE_BUILT_IN_ALLOCA:
- return true;
+ return gimple_call_num_args (stmt) > 0;
default:
break;
}
return flags;
}
-/* Return true if TYPE should be passed by invisible reference. */
+/* Return true if ARG should be passed by invisible reference. */
bool
-pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
- tree type, bool named_arg)
+pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
{
- if (type)
+ if (tree type = arg.type)
{
/* If this type contains non-trivial constructors, then it is
forbidden for the middle-end to create any new copies. */
return true;
/* GCC post 3.4 passes *all* variable sized types by reference. */
- if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
return true;
/* If a record type should be passed the same as its first (and only)
member, use the type and mode of that member. */
if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
{
- type = TREE_TYPE (first_field (type));
- mode = TYPE_MODE (type);
+ arg.type = TREE_TYPE (first_field (type));
+ arg.mode = TYPE_MODE (arg.type);
}
}
- return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
- type, named_arg);
+ return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
+}
+
+/* Return true if TYPE should be passed by reference when passed to
+ the "..." arguments of a function. */
+
+bool
+pass_va_arg_by_reference (tree type)
+{
+ return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
}
-/* Return true if TYPE, which is passed by reference, should be callee
+/* Decide whether ARG, which occurs in the state described by CA,
+ should be passed by reference. Return true if so and update
+ ARG accordingly. */
+
+bool
+apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
+{
+ if (pass_by_reference (ca, arg))
+ {
+ arg.type = build_pointer_type (arg.type);
+ arg.mode = TYPE_MODE (arg.type);
+ arg.pass_by_reference = true;
+ return true;
+ }
+ return false;
+}
+
+/* Return true if ARG, which is passed by reference, should be callee
copied instead of caller copied. */
bool
-reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
- tree type, bool named_arg)
+reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
{
- if (type && TREE_ADDRESSABLE (type))
+ if (arg.type && TREE_ADDRESSABLE (arg.type))
return false;
- return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
- named_arg);
+ return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
}
/* If the value is a non-legitimate constant, force it into a
pseudo now. TLS symbols sometimes need a call to resolve. */
if (CONSTANT_P (args[i].value)
- && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
+ && (!targetm.legitimate_constant_p (args[i].mode, args[i].value)
+ || targetm.precompute_tls_p (args[i].mode, args[i].value)))
args[i].value = force_reg (args[i].mode, args[i].value);
/* If we're going to have to load the value by parts, pull the
}
}
-/* The limit set by -Walloc-larger-than=. */
-static GTY(()) tree alloc_object_size_limit;
-
-/* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
- setting if the option is specified, or to the maximum object size if it
- is not. Return the initialized value. */
-
-static tree
-alloc_max_size (void)
-{
- if (!alloc_object_size_limit)
- {
- alloc_object_size_limit = max_object_size ();
-
- if (warn_alloc_size_limit)
- {
- char *end = NULL;
- errno = 0;
- unsigned HOST_WIDE_INT unit = 1;
- unsigned HOST_WIDE_INT limit
- = strtoull (warn_alloc_size_limit, &end, 10);
-
- if (!errno)
- {
- if (end && *end)
- {
- /* Numeric option arguments are at most INT_MAX. Make it
- possible to specify a larger value by accepting common
- suffixes. */
- if (!strcmp (end, "kB"))
- unit = 1000;
- else if (!strcasecmp (end, "KiB") || strcmp (end, "KB"))
- unit = 1024;
- else if (!strcmp (end, "MB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000;
- else if (!strcasecmp (end, "MiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024;
- else if (!strcasecmp (end, "GB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000;
- else if (!strcasecmp (end, "GiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024;
- else if (!strcasecmp (end, "TB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000;
- else if (!strcasecmp (end, "TiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024;
- else if (!strcasecmp (end, "PB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000;
- else if (!strcasecmp (end, "PiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024;
- else if (!strcasecmp (end, "EB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000
- * 1000;
- else if (!strcasecmp (end, "EiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024
- * 1024;
- else
- unit = 0;
- }
-
- if (unit)
- {
- widest_int w = wi::mul (limit, unit);
- if (w < wi::to_widest (alloc_object_size_limit))
- alloc_object_size_limit
- = wide_int_to_tree (ptrdiff_type_node, w);
- }
- }
- }
- }
- return alloc_object_size_limit;
-}
-
-/* Return true when EXP's range can be determined and set RANGE[] to it
- after adjusting it if necessary to make EXP a represents a valid size
- of object, or a valid size argument to an allocation function declared
- with attribute alloc_size (whose argument may be signed), or to a string
- manipulation function like memset. When ALLOW_ZERO is true, allow
- returning a range of [0, 0] for a size in an anti-range [1, N] where
- N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
- allocation functions like malloc but it is a valid argument to
- functions like memset. */
-
-bool
-get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
-{
- if (tree_fits_uhwi_p (exp))
- {
- /* EXP is a constant. */
- range[0] = range[1] = exp;
- return true;
- }
-
- tree exptype = TREE_TYPE (exp);
- bool integral = INTEGRAL_TYPE_P (exptype);
-
- wide_int min, max;
- enum value_range_type range_type;
-
- if (TREE_CODE (exp) == SSA_NAME && integral)
- range_type = get_range_info (exp, &min, &max);
- else
- range_type = VR_VARYING;
-
- if (range_type == VR_VARYING)
- {
- if (integral)
- {
- /* Use the full range of the type of the expression when
- no value range information is available. */
- range[0] = TYPE_MIN_VALUE (exptype);
- range[1] = TYPE_MAX_VALUE (exptype);
- return true;
- }
-
- range[0] = NULL_TREE;
- range[1] = NULL_TREE;
- return false;
- }
-
- unsigned expprec = TYPE_PRECISION (exptype);
-
- bool signed_p = !TYPE_UNSIGNED (exptype);
-
- if (range_type == VR_ANTI_RANGE)
- {
- if (signed_p)
- {
- if (wi::les_p (max, 0))
- {
- /* EXP is not in a strictly negative range. That means
- it must be in some (not necessarily strictly) positive
- range which includes zero. Since in signed to unsigned
- conversions negative values end up converted to large
- positive values, and otherwise they are not valid sizes,
- the resulting range is in both cases [0, TYPE_MAX]. */
- min = wi::zero (expprec);
- max = wi::to_wide (TYPE_MAX_VALUE (exptype));
- }
- else if (wi::les_p (min - 1, 0))
- {
- /* EXP is not in a negative-positive range. That means EXP
- is either negative, or greater than max. Since negative
- sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
- min = max + 1;
- max = wi::to_wide (TYPE_MAX_VALUE (exptype));
- }
- else
- {
- max = min - 1;
- min = wi::zero (expprec);
- }
- }
- else if (wi::eq_p (0, min - 1))
- {
- /* EXP is unsigned and not in the range [1, MAX]. That means
- it's either zero or greater than MAX. Even though 0 would
- normally be detected by -Walloc-zero, unless ALLOW_ZERO
- is true, set the range to [MAX, TYPE_MAX] so that when MAX
- is greater than the limit the whole range is diagnosed. */
- if (allow_zero)
- min = max = wi::zero (expprec);
- else
- {
- min = max + 1;
- max = wi::to_wide (TYPE_MAX_VALUE (exptype));
- }
- }
- else
- {
- max = min - 1;
- min = wi::zero (expprec);
- }
- }
-
- range[0] = wide_int_to_tree (exptype, min);
- range[1] = wide_int_to_tree (exptype, max);
-
- return true;
-}
-
-/* Diagnose a call EXP to function FN decorated with attribute alloc_size
- whose argument numbers given by IDX with values given by ARGS exceed
- the maximum object size or cause an unsigned oveflow (wrapping) when
- multiplied. When ARGS[0] is null the function does nothing. ARGS[1]
- may be null for functions like malloc, and non-null for those like
- calloc that are decorated with a two-argument attribute alloc_size. */
-
-void
-maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
-{
- /* The range each of the (up to) two arguments is known to be in. */
- tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
-
- /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
- tree maxobjsize = alloc_max_size ();
-
- location_t loc = EXPR_LOCATION (exp);
-
- bool warned = false;
-
- /* Validate each argument individually. */
- for (unsigned i = 0; i != 2 && args[i]; ++i)
- {
- if (TREE_CODE (args[i]) == INTEGER_CST)
- {
- argrange[i][0] = args[i];
- argrange[i][1] = args[i];
-
- if (tree_int_cst_lt (args[i], integer_zero_node))
- {
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kargument %i value %qE is negative",
- exp, idx[i] + 1, args[i]);
- }
- else if (integer_zerop (args[i]))
- {
- /* Avoid issuing -Walloc-zero for allocation functions other
- than __builtin_alloca that are declared with attribute
- returns_nonnull because there's no portability risk. This
- avoids warning for such calls to libiberty's xmalloc and
- friends.
- Also avoid issuing the warning for calls to function named
- "alloca". */
- if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA
- && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
- || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA
- && !lookup_attribute ("returns_nonnull",
- TYPE_ATTRIBUTES (TREE_TYPE (fn)))))
- warned = warning_at (loc, OPT_Walloc_zero,
- "%Kargument %i value is zero",
- exp, idx[i] + 1);
- }
- else if (tree_int_cst_lt (maxobjsize, args[i]))
- {
- /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
- mode and with -fno-exceptions as a way to indicate array
- size overflow. There's no good way to detect C++98 here
- so avoid diagnosing these calls for all C++ modes. */
- if (i == 0
- && !args[1]
- && lang_GNU_CXX ()
- && DECL_IS_OPERATOR_NEW (fn)
- && integer_all_onesp (args[i]))
- continue;
-
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kargument %i value %qE exceeds "
- "maximum object size %E",
- exp, idx[i] + 1, args[i], maxobjsize);
- }
- }
- else if (TREE_CODE (args[i]) == SSA_NAME
- && get_size_range (args[i], argrange[i]))
- {
- /* Verify that the argument's range is not negative (including
- upper bound of zero). */
- if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
- && tree_int_cst_le (argrange[i][1], integer_zero_node))
- {
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kargument %i range [%E, %E] is negative",
- exp, idx[i] + 1,
- argrange[i][0], argrange[i][1]);
- }
- else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
- {
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kargument %i range [%E, %E] exceeds "
- "maximum object size %E",
- exp, idx[i] + 1,
- argrange[i][0], argrange[i][1],
- maxobjsize);
- }
- }
- }
-
- if (!argrange[0])
- return;
-
- /* For a two-argument alloc_size, validate the product of the two
- arguments if both of their values or ranges are known. */
- if (!warned && tree_fits_uhwi_p (argrange[0][0])
- && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
- && !integer_onep (argrange[0][0])
- && !integer_onep (argrange[1][0]))
- {
- /* Check for overflow in the product of a function decorated with
- attribute alloc_size (X, Y). */
- unsigned szprec = TYPE_PRECISION (size_type_node);
- wide_int x = wi::to_wide (argrange[0][0], szprec);
- wide_int y = wi::to_wide (argrange[1][0], szprec);
-
- bool vflow;
- wide_int prod = wi::umul (x, y, &vflow);
-
- if (vflow)
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kproduct %<%E * %E%> of arguments %i and %i "
- "exceeds %<SIZE_MAX%>",
- exp, argrange[0][0], argrange[1][0],
- idx[0] + 1, idx[1] + 1);
- else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
- warned = warning_at (loc, OPT_Walloc_size_larger_than_,
- "%Kproduct %<%E * %E%> of arguments %i and %i "
- "exceeds maximum object size %E",
- exp, argrange[0][0], argrange[1][0],
- idx[0] + 1, idx[1] + 1,
- maxobjsize);
-
- if (warned)
- {
- /* Print the full range of each of the two arguments to make
- it clear when it is, in fact, in a range and not constant. */
- if (argrange[0][0] != argrange [0][1])
- inform (loc, "argument %i in the range [%E, %E]",
- idx[0] + 1, argrange[0][0], argrange[0][1]);
- if (argrange[1][0] != argrange [1][1])
- inform (loc, "argument %i in the range [%E, %E]",
- idx[1] + 1, argrange[1][0], argrange[1][1]);
- }
- }
-
- if (warned)
- {
- location_t fnloc = DECL_SOURCE_LOCATION (fn);
-
- if (DECL_IS_BUILTIN (fn))
- inform (loc,
- "in a call to built-in allocation function %qD", fn);
- else
- inform (fnloc,
- "in a call to allocation function %qD declared here", fn);
- }
-}
-
-/* If EXPR refers to a character array or pointer declared attribute
- nonstring return a decl for that array or pointer and set *REF to
- the referenced enclosing object or pointer. Otherwise returns
- null. */
-
-tree
-get_attr_nonstring_decl (tree expr, tree *ref)
-{
- tree decl = expr;
- if (TREE_CODE (decl) == SSA_NAME)
- {
- gimple *def = SSA_NAME_DEF_STMT (decl);
-
- if (is_gimple_assign (def))
- {
- tree_code code = gimple_assign_rhs_code (def);
- if (code == ADDR_EXPR
- || code == COMPONENT_REF
- || code == VAR_DECL)
- decl = gimple_assign_rhs1 (def);
- }
- else if (tree var = SSA_NAME_VAR (decl))
- decl = var;
- }
-
- if (TREE_CODE (decl) == ADDR_EXPR)
- decl = TREE_OPERAND (decl, 0);
-
- if (ref)
- *ref = decl;
-
- if (TREE_CODE (decl) == COMPONENT_REF)
- decl = TREE_OPERAND (decl, 1);
-
- if (DECL_P (decl)
- && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
- return decl;
-
- return NULL_TREE;
-}
-
-/* Warn about passing a non-string array/pointer to a function that
- expects a nul-terminated string argument. */
-
-void
-maybe_warn_nonstring_arg (tree fndecl, tree exp)
-{
- if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
- return;
-
- bool with_bounds = CALL_WITH_BOUNDS_P (exp);
-
- /* The bound argument to a bounded string function like strncpy. */
- tree bound = NULL_TREE;
-
- /* It's safe to call "bounded" string functions with a non-string
- argument since the functions provide an explicit bound for this
- purpose. */
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- case BUILT_IN_STPNCPY:
- case BUILT_IN_STPNCPY_CHK:
- case BUILT_IN_STRNCMP:
- case BUILT_IN_STRNCASECMP:
- case BUILT_IN_STRNCPY:
- case BUILT_IN_STRNCPY_CHK:
- bound = CALL_EXPR_ARG (exp, with_bounds ? 4 : 2);
- break;
-
- case BUILT_IN_STRNDUP:
- bound = CALL_EXPR_ARG (exp, with_bounds ? 2 : 1);
- break;
-
- default:
- break;
- }
-
- /* Determine the range of the bound argument (if specified). */
- tree bndrng[2] = { NULL_TREE, NULL_TREE };
- if (bound)
- get_size_range (bound, bndrng);
-
- /* Iterate over the built-in function's formal arguments and check
- each const char* against the actual argument. If the actual
- argument is declared attribute non-string issue a warning unless
- the argument's maximum length is bounded. */
- function_args_iterator it;
- function_args_iter_init (&it, TREE_TYPE (fndecl));
-
- for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
- {
- tree argtype = function_args_iter_cond (&it);
- if (!argtype)
- break;
-
- if (TREE_CODE (argtype) != POINTER_TYPE)
- continue;
-
- argtype = TREE_TYPE (argtype);
-
- if (TREE_CODE (argtype) != INTEGER_TYPE
- || !TYPE_READONLY (argtype))
- continue;
-
- argtype = TYPE_MAIN_VARIANT (argtype);
- if (argtype != char_type_node)
- continue;
-
- tree callarg = CALL_EXPR_ARG (exp, argno);
- if (TREE_CODE (callarg) == ADDR_EXPR)
- callarg = TREE_OPERAND (callarg, 0);
-
- /* See if the destination is declared with attribute "nonstring". */
- tree decl = get_attr_nonstring_decl (callarg);
- if (!decl)
- continue;
-
- tree type = TREE_TYPE (decl);
-
- offset_int wibnd = 0;
- if (bndrng[0])
- wibnd = wi::to_offset (bndrng[0]);
-
- offset_int asize = wibnd;
-
- if (TREE_CODE (type) == ARRAY_TYPE)
- if (tree arrbnd = TYPE_DOMAIN (type))
- {
- if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
- asize = wi::to_offset (arrbnd) + 1;
- }
-
- location_t loc = EXPR_LOCATION (exp);
-
- bool warned = false;
-
- if (wi::ltu_p (asize, wibnd))
- warned = warning_at (loc, OPT_Wstringop_overflow_,
- "%qD argument %i declared attribute %<nonstring%> "
- "is smaller than the specified bound %E",
- fndecl, argno + 1, bndrng[0]);
- else if (!bound)
- warned = warning_at (loc, OPT_Wstringop_overflow_,
- "%qD argument %i declared attribute %<nonstring%>",
- fndecl, argno + 1);
-
- if (warned)
- inform (DECL_SOURCE_LOCATION (decl),
- "argument %qD declared here", decl);
- }
-}
-
/* Issue an error if CALL_EXPR was flagged as requiring
tall-call optimization. */
-static void
+void
maybe_complain_about_tail_call (tree call_expr, const char *reason)
{
gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
i = num_actuals - 1;
{
- int j = i, ptr_arg = -1;
+ int j = i;
call_expr_arg_iterator iter;
tree arg;
bitmap slots = NULL;
{
args[j].tree_value = struct_value_addr_value;
j--;
-
- /* If we pass structure address then we need to
- create bounds for it. Since created bounds is
- a call statement, we expand it right here to avoid
- fixing all other places where it may be expanded. */
- if (CALL_WITH_BOUNDS_P (exp))
- {
- args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
- args[j].tree_value
- = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
- expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
- EXPAND_NORMAL, 0, false);
- args[j].pointer_arg = j + 1;
- j--;
- }
}
argpos = 0;
FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
{
tree argtype = TREE_TYPE (arg);
- /* Remember last param with pointer and associate it
- with following pointer bounds. */
- if (CALL_WITH_BOUNDS_P (exp)
- && chkp_type_has_pointer (argtype))
- {
- if (slots)
- BITMAP_FREE (slots);
- ptr_arg = j;
- if (!BOUNDED_TYPE_P (argtype))
- {
- slots = BITMAP_ALLOC (NULL);
- chkp_find_bound_slots (argtype, slots);
- }
- }
- else if (CALL_WITH_BOUNDS_P (exp)
- && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
- argpos < n_named_args))
- {
- if (slots)
- BITMAP_FREE (slots);
- ptr_arg = j;
- }
- else if (POINTER_BOUNDS_TYPE_P (argtype))
- {
- /* We expect bounds in instrumented calls only.
- Otherwise it is a sign we lost flag due to some optimization
- and may emit call args incorrectly. */
- gcc_assert (CALL_WITH_BOUNDS_P (exp));
-
- /* For structures look for the next available pointer. */
- if (ptr_arg != -1 && slots)
- {
- unsigned bnd_no = bitmap_first_set_bit (slots);
- args[j].pointer_offset =
- bnd_no * POINTER_SIZE / BITS_PER_UNIT;
-
- bitmap_clear_bit (slots, bnd_no);
-
- /* Check we have no more pointers in the structure. */
- if (bitmap_empty_p (slots))
- BITMAP_FREE (slots);
- }
- args[j].pointer_arg = ptr_arg;
-
- /* Check we covered all pointers in the previous
- non bounds arg. */
- if (!slots)
- ptr_arg = -1;
- }
- else
- ptr_arg = -1;
-
if (targetm.calls.split_complex_arg
&& argtype
&& TREE_CODE (argtype) == COMPLEX_TYPE
bitmap_obstack_release (NULL);
- /* Extract attribute alloc_size and if set, store the indices of
- the corresponding arguments in ALLOC_IDX, and then the actual
- argument(s) at those indices in ALLOC_ARGS. */
- int alloc_idx[2] = { -1, -1 };
- if (tree alloc_size
- = (fndecl ? lookup_attribute ("alloc_size",
- TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))
- : NULL_TREE))
- {
- tree args = TREE_VALUE (alloc_size);
- alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
- if (TREE_CHAIN (args))
- alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
- }
-
- /* Array for up to the two attribute alloc_size arguments. */
- tree alloc_args[] = { NULL_TREE, NULL_TREE };
-
/* I counts args in order (to be) pushed; ARGPOS counts in order written. */
for (argpos = 0; argpos < num_actuals; i--, argpos++)
{
tree type = TREE_TYPE (args[i].tree_value);
int unsignedp;
- machine_mode mode;
/* Replace erroneous argument with constant zero. */
if (type == error_mark_node || !COMPLETE_TYPE_P (type))
/* If TYPE is a transparent union or record, pass things the way
we would pass the first field of the union or record. We have
already verified that the modes are the same. */
- if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
- && TYPE_TRANSPARENT_AGGR (type))
+ if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
type = TREE_TYPE (first_field (type));
/* Decide where to pass this arg.
with those made by function.c. */
/* See if this argument should be passed by invisible reference. */
- if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
- type, argpos < n_named_args))
+ function_arg_info arg (type, argpos < n_named_args);
+ if (pass_by_reference (args_so_far_pnt, arg))
{
- bool callee_copies;
- tree base = NULL_TREE;
-
- callee_copies
- = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
- type, argpos < n_named_args);
-
- /* If we're compiling a thunk, pass through invisible references
- instead of making a copy. */
- if (call_from_thunk_p
- || (callee_copies
- && !TREE_ADDRESSABLE (type)
- && (base = get_base_address (args[i].tree_value))
- && TREE_CODE (base) != SSA_NAME
- && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
+ const bool callee_copies
+ = reference_callee_copied (args_so_far_pnt, arg);
+ tree base;
+
+ /* If we're compiling a thunk, pass directly the address of an object
+ already in memory, instead of making a copy. Likewise if we want
+ to make the copy in the callee instead of the caller. */
+ if ((call_from_thunk_p || callee_copies)
+ && TREE_CODE (args[i].tree_value) != WITH_SIZE_EXPR
+ && ((base = get_base_address (args[i].tree_value)), true)
+ && TREE_CODE (base) != SSA_NAME
+ && (!DECL_P (base) || MEM_P (DECL_RTL (base))))
{
/* We may have turned the parameter value into an SSA name.
Go back to the original parameter so we can take the
*may_tailcall = false;
maybe_complain_about_tail_call (exp,
"a callee-copied argument is"
- " stored in the current "
+ " stored in the current"
" function's frame");
}
"argument must be passed"
" by copying");
}
+ arg.pass_by_reference = true;
}
unsignedp = TYPE_UNSIGNED (type);
- mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
- fndecl ? TREE_TYPE (fndecl) : fntype, 0);
+ arg.type = type;
+ arg.mode
+ = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
+ fndecl ? TREE_TYPE (fndecl) : fntype, 0);
args[i].unsignedp = unsignedp;
- args[i].mode = mode;
+ args[i].mode = arg.mode;
targetm.calls.warn_parameter_passing_abi (args_so_far, type);
- args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
- argpos < n_named_args);
+ args[i].reg = targetm.calls.function_arg (args_so_far, arg);
if (args[i].reg && CONST_INT_P (args[i].reg))
- {
- args[i].special_slot = args[i].reg;
- args[i].reg = NULL;
- }
+ args[i].reg = NULL;
/* If this is a sibling call and the machine has register windows, the
register window has to be unwinded before calling the routine, so
arguments have to go into the incoming registers. */
if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
args[i].tail_call_reg
- = targetm.calls.function_incoming_arg (args_so_far, mode, type,
- argpos < n_named_args);
+ = targetm.calls.function_incoming_arg (args_so_far, arg);
else
args[i].tail_call_reg = args[i].reg;
if (args[i].reg)
- args[i].partial
- = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
- argpos < n_named_args);
+ args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
- args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
+ args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
/* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
it means that we are to pass this arg in the register(s) designated
|| (args[i].pass_on_stack && args[i].reg != 0))
*must_preallocate = 1;
- /* No stack allocation and padding for bounds. */
- if (POINTER_BOUNDS_P (args[i].tree_value))
- ;
/* Compute the stack-size of this argument. */
- else if (args[i].reg == 0 || args[i].partial != 0
+ if (args[i].reg == 0 || args[i].partial != 0
|| reg_parm_stack_space > 0
|| args[i].pass_on_stack)
- locate_and_pad_parm (mode, type,
+ locate_and_pad_parm (arg.mode, type,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
1,
#else
/* The argument is passed entirely in registers. See at which
end it should be padded. */
args[i].locate.where_pad =
- BLOCK_REG_PADDING (mode, type,
+ BLOCK_REG_PADDING (arg.mode, type,
int_size_in_bytes (type) <= UNITS_PER_WORD);
#endif
/* Increment ARGS_SO_FAR, which has info about which arg-registers
have been used, etc. */
- targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
- type, argpos < n_named_args);
-
- /* Store argument values for functions decorated with attribute
- alloc_size. */
- if (argpos == alloc_idx[0])
- alloc_args[0] = args[i].tree_value;
- else if (argpos == alloc_idx[1])
- alloc_args[1] = args[i].tree_value;
- }
-
- if (alloc_args[0])
- {
- /* Check the arguments of functions decorated with attribute
- alloc_size. */
- maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
+ /* ??? Traditionally we've passed TYPE_MODE here, instead of the
+ promoted_mode used for function_arg above. However, the
+ corresponding handling of incoming arguments in function.c
+ does pass the promoted mode. */
+ arg.mode = TYPE_MODE (type);
+ targetm.calls.function_arg_advance (args_so_far, arg);
}
-
- /* Detect passing non-string arguments to functions expecting
- nul-terminated strings. */
- maybe_warn_nonstring_arg (fndecl, exp);
}
/* Update ARGS_SIZE to contain the total size for the argument block.
partial_seen = 1;
else if (partial_seen && args[i].reg == 0)
must_preallocate = 1;
- /* We preallocate in case there are bounds passed
- in the bounds table to have precomputed address
- for bounds association. */
- else if (POINTER_BOUNDS_P (args[i].tree_value)
- && !args[i].reg)
- must_preallocate = 1;
if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
&& (TREE_CODE (args[i].tree_value) == CALL_EXPR
if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
continue;
- /* Pointer Bounds are never passed on the stack. */
- if (POINTER_BOUNDS_P (args[i].tree_value))
- continue;
-
addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
addr = plus_constant (Pmode, addr, arg_offset);
{
if (idx >= internal_arg_pointer_exp_state.cache.length ())
internal_arg_pointer_exp_state.cache
- .safe_grow_cleared (idx + 1);
+ .safe_grow_cleared (idx + 1, true);
internal_arg_pointer_exp_state.cache[idx] = val;
}
}
poly_int64 size = 0;
HOST_WIDE_INT const_size = 0;
rtx_insn *before_arg = get_last_insn ();
+ tree type = TREE_TYPE (args[i].tree_value);
+ if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
+ type = TREE_TYPE (first_field (type));
/* Set non-negative if we must move a word at a time, even if
just one word (e.g, partial == 4 && mode == DFmode). Set
to -1 if we just use a normal move insn. This value can be
gcc_assert (partial % UNITS_PER_WORD == 0);
nregs = partial / UNITS_PER_WORD;
}
- else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
+ else if (TYPE_MODE (type) == BLKmode)
{
/* Variable-sized parameters should be described by a
PARALLEL instead. */
- const_size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
+ const_size = int_size_in_bytes (type);
gcc_assert (const_size >= 0);
nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
size = const_size;
if (GET_CODE (reg) == PARALLEL)
use_group_regs (call_fusage, reg);
else if (nregs == -1)
- use_reg_mode (call_fusage, reg,
- TYPE_MODE (TREE_TYPE (args[i].tree_value)));
+ use_reg_mode (call_fusage, reg, TYPE_MODE (type));
else if (nregs > 0)
use_regs (call_fusage, REGNO (reg), nregs);
}
can_implement_as_sibling_call_p (tree exp,
rtx structure_value_addr,
tree funtype,
- int reg_parm_stack_space ATTRIBUTE_UNUSED,
tree fndecl,
int flags,
tree addr,
return false;
}
-#ifdef REG_PARM_STACK_SPACE
- /* If outgoing reg parm stack space changes, we can not do sibcall. */
- if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
- != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
- || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
- {
- maybe_complain_about_tail_call (exp,
- "inconsistent size of stack space"
- " allocated for arguments which are"
- " passed in registers");
- return false;
- }
-#endif
-
/* Check whether the target is able to optimize the call
into a sibcall. */
if (!targetm.function_ok_for_sibcall (fndecl, exp))
return true;
}
+/* Update stack alignment when the parameter is passed in the stack
+ since the outgoing parameter requires extra alignment on the calling
+ function side. */
+
+static void
+update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
+{
+ if (crtl->stack_alignment_needed < locate->boundary)
+ crtl->stack_alignment_needed = locate->boundary;
+ if (crtl->preferred_stack_boundary < locate->boundary)
+ crtl->preferred_stack_boundary = locate->boundary;
+}
+
/* Generate all the code for a CALL_EXPR exp
and return an rtx for its value.
Store the value in TARGET (specified as an rtx) if convenient.
/* Register in which non-BLKmode value will be returned,
or 0 if no value or if value is BLKmode. */
rtx valreg;
- /* Register(s) in which bounds are returned. */
- rtx valbnd = NULL;
/* Address where we should return a BLKmode value;
0 if value not BLKmode. */
rtx structure_value_addr = 0;
So the entire argument block must then be preallocated (i.e., we
ignore PUSH_ROUNDING in that case). */
- int must_preallocate = !PUSH_ARGS;
+ int must_preallocate = !targetm.calls.push_argument (0);
/* Size of the stack reserved for parameter registers. */
int reg_parm_stack_space = 0;
side-effects. */
if ((flags & (ECF_CONST | ECF_PURE))
&& (!(flags & ECF_LOOPING_CONST_OR_PURE))
+ && (flags & ECF_NOTHROW)
&& (ignore || target == const0_rtx
|| TYPE_MODE (rettype) == VOIDmode))
{
#endif
if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
- && reg_parm_stack_space > 0 && PUSH_ARGS)
+ && reg_parm_stack_space > 0 && targetm.calls.push_argument (0))
must_preallocate = 1;
/* Set up a place to return a structure. */
if (CALL_EXPR_RETURN_SLOT_OPT (exp)
&& target
&& MEM_P (target)
- && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
- && targetm.slow_unaligned_access (TYPE_MODE (rettype),
- MEM_ALIGN (target))))
+ /* If rettype is addressable, we may not create a temporary.
+ If target is properly aligned at runtime and the compiler
+ just doesn't know about it, it will work fine, otherwise it
+ will be UB. */
+ && (TREE_ADDRESSABLE (rettype)
+ || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
+ && targetm.slow_unaligned_access (TYPE_MODE (rettype),
+ MEM_ALIGN (target)))))
structure_value_addr = XEXP (target, 0);
else
{
structure_value_addr_value =
make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
- structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
+ structure_value_addr_parm = 1;
}
/* Count the arguments and set NUM_ACTUALS. */
pushed these optimizations into -O2. Don't try if we're already
expanding a call, as that means we're an argument. Don't try if
there's cleanups, as we know there's code to follow the call. */
-
if (currently_expanding_call++ != 0
- || !flag_optimize_sibling_calls
+ || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
|| args_size.var
|| dbg_cnt (tail_call) == false)
try_tail_call = 0;
+ /* Workaround buggy C/C++ wrappers around Fortran routines with
+ character(len=constant) arguments if the hidden string length arguments
+ are passed on the stack; if the callers forget to pass those arguments,
+ attempting to tail call in such routines leads to stack corruption.
+ Avoid tail calls in functions where at least one such hidden string
+ length argument is passed (partially or fully) on the stack in the
+ caller and the callee needs to pass any arguments on the stack.
+ See PR90329. */
+ if (try_tail_call && maybe_ne (args_size.constant, 0))
+ for (tree arg = DECL_ARGUMENTS (current_function_decl);
+ arg; arg = DECL_CHAIN (arg))
+ if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
+ {
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
+ if (MEM_P (*iter))
+ {
+ try_tail_call = 0;
+ break;
+ }
+ }
+
/* If the user has marked the function as requiring tail-call
optimization, attempt it. */
if (must_tail_call)
try_tail_call = can_implement_as_sibling_call_p (exp,
structure_value_addr,
funtype,
- reg_parm_stack_space,
fndecl,
flags, addr, args_size);
/* Ensure current function's preferred stack boundary is at least
what we need. Stack alignment may also increase preferred stack
boundary. */
+ for (i = 0; i < num_actuals; i++)
+ if (reg_parm_stack_space > 0
+ || args[i].reg == 0
+ || args[i].partial != 0
+ || args[i].pass_on_stack)
+ update_stack_alignment_for_call (&args[i].locate);
if (crtl->preferred_stack_boundary < preferred_stack_boundary)
crtl->preferred_stack_boundary = preferred_stack_boundary;
else
preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
+ if (flag_callgraph_info)
+ record_final_call (fndecl, EXPR_LOCATION (exp));
+
/* We want to make two insn chains; one for a sibling call, the other
for a normal call. We will select one of the two chains after
initial RTL generation is complete. */
for (i = 0; i < num_actuals; i++)
{
- /* Delay bounds until all other args are stored. */
- if (POINTER_BOUNDS_P (args[i].tree_value))
- continue;
- else if (args[i].reg == 0 || args[i].pass_on_stack)
+ if (args[i].reg == 0 || args[i].pass_on_stack)
{
rtx_insn *before_arg = get_last_insn ();
/* Figure out the register where the value, if any, will come back. */
valreg = 0;
- valbnd = 0;
if (TYPE_MODE (rettype) != VOIDmode
&& ! structure_value_addr)
{
if (pcc_struct_value)
- {
- valreg = hard_function_value (build_pointer_type (rettype),
- fndecl, NULL, (pass == 0));
- if (CALL_WITH_BOUNDS_P (exp))
- valbnd = targetm.calls.
- chkp_function_value_bounds (build_pointer_type (rettype),
- fndecl, (pass == 0));
- }
+ valreg = hard_function_value (build_pointer_type (rettype),
+ fndecl, NULL, (pass == 0));
else
- {
- valreg = hard_function_value (rettype, fndecl, fntype,
- (pass == 0));
- if (CALL_WITH_BOUNDS_P (exp))
- valbnd = targetm.calls.chkp_function_value_bounds (rettype,
- fndecl,
- (pass == 0));
- }
+ valreg = hard_function_value (rettype, fndecl, fntype,
+ (pass == 0));
/* If VALREG is a PARALLEL whose first member has a zero
offset, use that. This is for targets such as m68k that
}
}
- /* Store all bounds not passed in registers. */
- for (i = 0; i < num_actuals; i++)
- {
- if (POINTER_BOUNDS_P (args[i].tree_value)
- && !args[i].reg)
- store_bounds (&args[i],
- args[i].pointer_arg == -1
- ? NULL
- : &args[args[i].pointer_arg]);
- }
-
/* If register arguments require space on the stack and stack space
was not preallocated, allocate stack space here for arguments
passed in registers. */
/* Set up next argument register. For sibling calls on machines
with register windows this should be the incoming register. */
if (pass == 0)
- next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
- VOIDmode,
- void_type_node,
- true);
+ next_arg_reg = targetm.calls.function_incoming_arg
+ (args_so_far, function_arg_info::end_marker ());
else
- next_arg_reg = targetm.calls.function_arg (args_so_far,
- VOIDmode, void_type_node,
- true);
+ next_arg_reg = targetm.calls.function_arg
+ (args_so_far, function_arg_info::end_marker ());
if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
{
emit_move_insn (temp, valreg);
- /* The return value from a malloc-like function can not alias
+ /* The return value from a malloc-like function cannot alias
anything else. */
last = get_last_insn ();
add_reg_note (last, REG_NOALIAS, temp);
free (stack_usage_map_buf);
free (args);
-
- /* Join result with returned bounds so caller may use them if needed. */
- target = chkp_join_splitted_slot (target, valbnd);
-
return target;
}
argvec[count].mode = Pmode;
argvec[count].partial = 0;
- argvec[count].reg = targetm.calls.function_arg (args_so_far,
- Pmode, NULL_TREE, true);
- gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
- NULL_TREE, 1) == 0);
+ function_arg_info ptr_arg (Pmode, /*named=*/true);
+ argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
+ gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
locate_and_pad_parm (Pmode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
|| reg_parm_stack_space > 0)
args_size.constant += argvec[count].locate.size.constant;
- targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
+ targetm.calls.function_arg_advance (args_so_far, ptr_arg);
count++;
}
for (unsigned int i = 0; count < nargs; i++, count++)
{
rtx val = args[i].first;
- machine_mode mode = args[i].second;
+ function_arg_info arg (args[i].second, /*named=*/true);
int unsigned_p = 0;
/* We cannot convert the arg value to the mode the library wants here;
must do it earlier where we know the signedness of the arg. */
- gcc_assert (mode != BLKmode
- && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
+ gcc_assert (arg.mode != BLKmode
+ && (GET_MODE (val) == arg.mode
+ || GET_MODE (val) == VOIDmode));
/* Make sure it is a reasonable operand for a move or push insn. */
if (!REG_P (val) && !MEM_P (val)
- && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
+ && !(CONSTANT_P (val)
+ && targetm.legitimate_constant_p (arg.mode, val)))
val = force_operand (val, NULL_RTX);
- if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
+ if (pass_by_reference (&args_so_far_v, arg))
{
rtx slot;
- int must_copy
- = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
+ int must_copy = !reference_callee_copied (&args_so_far_v, arg);
/* If this was a CONST function, it is now PURE since it now
reads memory. */
}
else
{
- slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
+ slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
1, 1);
emit_move_insn (slot, val);
}
slot),
call_fusage);
- mode = Pmode;
+ arg.mode = Pmode;
+ arg.pass_by_reference = true;
val = force_operand (XEXP (slot, 0), NULL_RTX);
}
- mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
- argvec[count].mode = mode;
- argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
- argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
- NULL_TREE, true);
+ arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
+ NULL_TREE, 0);
+ argvec[count].mode = arg.mode;
+ argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
+ unsigned_p);
+ argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
argvec[count].partial
- = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
+ = targetm.calls.arg_partial_bytes (args_so_far, arg);
if (argvec[count].reg == 0
|| argvec[count].partial != 0
|| reg_parm_stack_space > 0)
{
- locate_and_pad_parm (mode, NULL_TREE,
+ locate_and_pad_parm (arg.mode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
1,
#else
/* The argument is passed entirely in registers. See at which
end it should be padded. */
argvec[count].locate.where_pad =
- BLOCK_REG_PADDING (mode, NULL_TREE,
- known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD));
+ BLOCK_REG_PADDING (arg.mode, NULL_TREE,
+ known_le (GET_MODE_SIZE (arg.mode),
+ UNITS_PER_WORD));
#endif
- targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
+ targetm.calls.function_arg_advance (args_so_far, arg);
}
+ for (int i = 0; i < nargs; i++)
+ if (reg_parm_stack_space > 0
+ || argvec[i].reg == 0
+ || argvec[i].partial != 0)
+ update_stack_alignment_for_call (&argvec[i].locate);
+
/* If this machine requires an external definition for library
functions, write one out. */
assemble_external_libcall (fun);
}
else
{
- if (!PUSH_ARGS)
+ if (!targetm.calls.push_argument (0))
argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
}
before_call = get_last_insn ();
+ if (flag_callgraph_info)
+ record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
+
/* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
will set inhibit_defer_pop to that value. */
/* The return type is needed to decide how many bytes the function pops.
original_args_size.constant, args_size.constant,
struct_value_size,
targetm.calls.function_arg (args_so_far,
- VOIDmode, void_type_node, true),
+ function_arg_info::end_marker ()),
valreg,
old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
}
\f
-/* Store pointer bounds argument ARG into Bounds Table entry
- associated with PARM. */
-static void
-store_bounds (struct arg_data *arg, struct arg_data *parm)
-{
- rtx slot = NULL, ptr = NULL, addr = NULL;
-
- /* We may pass bounds not associated with any pointer. */
- if (!parm)
- {
- gcc_assert (arg->special_slot);
- slot = arg->special_slot;
- ptr = const0_rtx;
- }
- /* Find pointer associated with bounds and where it is
- passed. */
- else
- {
- if (!parm->reg)
- {
- gcc_assert (!arg->special_slot);
-
- addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
- }
- else if (REG_P (parm->reg))
- {
- gcc_assert (arg->special_slot);
- slot = arg->special_slot;
-
- if (MEM_P (parm->value))
- addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
- else if (REG_P (parm->value))
- ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
- else
- {
- gcc_assert (!arg->pointer_offset);
- ptr = parm->value;
- }
- }
- else
- {
- gcc_assert (GET_CODE (parm->reg) == PARALLEL);
-
- gcc_assert (arg->special_slot);
- slot = arg->special_slot;
-
- if (parm->parallel_value)
- ptr = chkp_get_value_with_offs (parm->parallel_value,
- GEN_INT (arg->pointer_offset));
- else
- gcc_unreachable ();
- }
- }
-
- /* Expand bounds. */
- if (!arg->value)
- arg->value = expand_normal (arg->tree_value);
-
- targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
-}
-
/* Store a single argument for a function call
into the register or memory area where it must be passed.
*ARG describes the argument value and where to pass it.
rtx x = arg->value;
poly_int64 i = 0;
- if (XEXP (x, 0) == crtl->args.internal_arg_pointer
- || (GET_CODE (XEXP (x, 0)) == PLUS
- && XEXP (XEXP (x, 0), 0) ==
- crtl->args.internal_arg_pointer
- && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
+ if (strip_offset (XEXP (x, 0), &i)
+ == crtl->args.internal_arg_pointer)
{
- if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
- i = rtx_to_poly_int64 (XEXP (XEXP (x, 0), 1));
-
/* arg.locate doesn't contain the pretend_args_size offset,
it's part of argblock. Ensure we don't count it in I. */
if (STACK_GROWS_DOWNWARD)
return sibcall_failure;
}
-/* Nonzero if we do not know how to pass TYPE solely in registers. */
+/* Nonzero if we do not know how to pass ARG solely in registers. */
bool
-must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
- const_tree type)
+must_pass_in_stack_var_size (const function_arg_info &arg)
{
- if (!type)
+ if (!arg.type)
return false;
/* If the type has variable size... */
- if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
return true;
/* If the type is marked as addressable (it is required
to be constructed into the stack)... */
- if (TREE_ADDRESSABLE (type))
+ if (TREE_ADDRESSABLE (arg.type))
return true;
return false;
/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
bool
-must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
+must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
{
- if (!type)
+ if (!arg.type)
return false;
/* If the type has variable size... */
- if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
return true;
/* If the type is marked as addressable (it is required
to be constructed into the stack)... */
- if (TREE_ADDRESSABLE (type))
+ if (TREE_ADDRESSABLE (arg.type))
return true;
- if (TYPE_EMPTY_P (type))
+ if (TYPE_EMPTY_P (arg.type))
return false;
/* If the padding and mode of the type is such that a copy into
a register would put it into the wrong part of the register. */
- if (mode == BLKmode
- && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
- && (targetm.calls.function_arg_padding (mode, type)
+ if (arg.mode == BLKmode
+ && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
+ && (targetm.calls.function_arg_padding (arg.mode, arg.type)
== (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
return true;
return false;
}
-/* Tell the garbage collector about GTY markers in this source file. */
-#include "gt-calls.h"
+/* Return true if TYPE must be passed on the stack when passed to
+ the "..." arguments of a function. */
+
+bool
+must_pass_va_arg_in_stack (tree type)
+{
+ function_arg_info arg (type, /*named=*/false);
+ return targetm.calls.must_pass_in_stack (arg);
+}
+
+/* Return true if FIELD is the C++17 empty base field that should
+ be ignored for ABI calling convention decisions in order to
+ maintain ABI compatibility between C++14 and earlier, which doesn't
+ add this FIELD to classes with empty bases, and C++17 and later
+ which does. */
+
+bool
+cxx17_empty_base_field_p (const_tree field)
+{
+ return (DECL_FIELD_ABI_IGNORED (field)
+ && DECL_ARTIFICIAL (field)
+ && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
+ && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
+}