]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.c
Put the CL into the right dir.
[thirdparty/gcc.git] / gcc / calls.c
CommitLineData
66d433c7 1/* Convert function calls to rtl insns, for GNU C compiler.
fbd26352 2 Copyright (C) 1989-2019 Free Software Foundation, Inc.
66d433c7 3
f12b58b3 4This file is part of GCC.
66d433c7 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
66d433c7 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
66d433c7 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
66d433c7 19
20#include "config.h"
405711de 21#include "system.h"
805e22b2 22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "target.h"
25#include "rtl.h"
9ef16211 26#include "tree.h"
27#include "gimple.h"
7c29e30e 28#include "predict.h"
ad7b10a2 29#include "memmodel.h"
7c29e30e 30#include "tm_p.h"
31#include "stringpool.h"
32#include "expmed.h"
33#include "optabs.h"
7c29e30e 34#include "emit-rtl.h"
35#include "cgraph.h"
36#include "diagnostic-core.h"
b20a8bb4 37#include "fold-const.h"
9ed99284 38#include "stor-layout.h"
39#include "varasm.h"
bc61cadb 40#include "internal-fn.h"
d53441c8 41#include "dojump.h"
42#include "explow.h"
43#include "calls.h"
405711de 44#include "expr.h"
cd03a192 45#include "output.h"
771d21fa 46#include "langhooks.h"
95cedffb 47#include "except.h"
3072d30e 48#include "dbgcnt.h"
474ce66a 49#include "rtl-iter.h"
370e45b9 50#include "tree-vrp.h"
51#include "tree-ssanames.h"
0eff2551 52#include "tree-ssa-strlen.h"
370e45b9 53#include "intl.h"
30a86690 54#include "stringpool.h"
55#include "attribs.h"
e6a18b5a 56#include "builtins.h"
974404bd 57#include "gimple-fold.h"
a8b58ffb 58
dfb1ee39 59/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
60#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
66d433c7 61
62/* Data structure and subroutines used within expand_call. */
63
64struct arg_data
65{
66 /* Tree node for this argument. */
67 tree tree_value;
1c0c37a5 68 /* Mode for value; TYPE_MODE unless promoted. */
3754d046 69 machine_mode mode;
66d433c7 70 /* Current RTL value for argument, or 0 if it isn't precomputed. */
71 rtx value;
72 /* Initially-compute RTL value for argument; only for const functions. */
73 rtx initial_value;
74 /* Register to pass this argument in, 0 if passed on stack, or an
566d850a 75 PARALLEL if the arg is to be copied into multiple non-contiguous
66d433c7 76 registers. */
77 rtx reg;
0e0be288 78 /* Register to pass this argument in when generating tail call sequence.
79 This is not the same register as for normal calls on machines with
80 register windows. */
81 rtx tail_call_reg;
b600a907 82 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
83 form for emit_group_move. */
84 rtx parallel_value;
23eb5fa6 85 /* If REG was promoted from the actual mode of the argument expression,
86 indicates whether the promotion is sign- or zero-extended. */
87 int unsignedp;
83272ab4 88 /* Number of bytes to put in registers. 0 means put the whole arg
89 in registers. Also 0 if not passed in registers. */
66d433c7 90 int partial;
d10cfa8d 91 /* Nonzero if argument must be passed on stack.
f848041f 92 Note that some arguments may be passed on the stack
93 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
94 pass_on_stack identifies arguments that *cannot* go in registers. */
66d433c7 95 int pass_on_stack;
241399f6 96 /* Some fields packaged up for locate_and_pad_parm. */
97 struct locate_and_pad_arg_data locate;
66d433c7 98 /* Location on the stack at which parameter should be stored. The store
99 has already been done if STACK == VALUE. */
100 rtx stack;
101 /* Location on the stack of the start of this argument slot. This can
102 differ from STACK if this arg pads downward. This location is known
bd99ba64 103 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
66d433c7 104 rtx stack_slot;
66d433c7 105 /* Place that this stack area has been saved, if needed. */
106 rtx save_area;
f28c7a75 107 /* If an argument's alignment does not permit direct copying into registers,
108 copy in smaller-sized pieces into pseudos. These are stored in a
109 block pointed to by this field. The next field says how many
110 word-sized pseudos we made. */
111 rtx *aligned_regs;
112 int n_aligned_regs;
66d433c7 113};
114
d10cfa8d 115/* A vector of one char per byte of stack space. A byte if nonzero if
66d433c7 116 the corresponding stack location has been used.
117 This vector is used to prevent a function call within an argument from
118 clobbering any stack already set up. */
119static char *stack_usage_map;
120
121/* Size of STACK_USAGE_MAP. */
e0deb08c 122static unsigned int highest_outgoing_arg_in_use;
123
124/* Assume that any stack location at this byte index is used,
125 without checking the contents of stack_usage_map. */
126static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
d1b03b62 127
7ecc63d3 128/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
129 stack location's tail call argument has been already stored into the stack.
130 This bitmap is used to prevent sibling call optimization if function tries
131 to use parent's incoming argument slots when they have been already
132 overwritten with tail call arguments. */
133static sbitmap stored_args_map;
134
e0deb08c 135/* Assume that any virtual-incoming location at this byte index has been
136 stored, without checking the contents of stored_args_map. */
137static unsigned HOST_WIDE_INT stored_args_watermark;
138
d1b03b62 139/* stack_arg_under_construction is nonzero when an argument may be
140 initialized with a constructor call (including a C function that
141 returns a BLKmode struct) and expand_call must take special action
142 to make sure the object being constructed does not overlap the
143 argument list for the constructor call. */
fbbbfe26 144static int stack_arg_under_construction;
66d433c7 145
4c9e08a4 146static void precompute_register_parameters (int, struct arg_data *, int *);
147static int store_one_arg (struct arg_data *, rtx, int, int, int);
148static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
149static int finalize_must_preallocate (int, int, struct arg_data *,
150 struct args_size *);
2dd6f9ed 151static void precompute_arguments (int, struct arg_data *);
4c9e08a4 152static void compute_argument_addresses (struct arg_data *, rtx, int);
153static rtx rtx_for_function_call (tree, tree);
154static void load_register_parameters (struct arg_data *, int, rtx *, int,
155 int, int *);
5d1b319b 156static int special_function_p (const_tree, int);
4c9e08a4 157static int check_sibcall_argument_overlap_1 (rtx);
3663becd 158static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
4c9e08a4 159
5ab29745 160static tree split_complex_types (tree);
cde25025 161
4448f543 162#ifdef REG_PARM_STACK_SPACE
4c9e08a4 163static rtx save_fixed_argument_area (int, rtx, int *, int *);
164static void restore_fixed_argument_area (rtx, rtx, int, int);
6a0e6138 165#endif
66d433c7 166\f
e0deb08c 167/* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
168 stack region might already be in use. */
169
170static bool
171stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
172 unsigned int reg_parm_stack_space)
173{
174 unsigned HOST_WIDE_INT const_lower, const_upper;
175 const_lower = constant_lower_bound (lower_bound);
176 if (!upper_bound.is_constant (&const_upper))
177 const_upper = HOST_WIDE_INT_M1U;
178
179 if (const_upper > stack_usage_watermark)
180 return true;
181
182 /* Don't worry about things in the fixed argument area;
183 it has already been saved. */
184 const_lower = MAX (const_lower, reg_parm_stack_space);
185 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
186 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
187 if (stack_usage_map[i])
188 return true;
189 return false;
190}
191
192/* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
193 stack region are now in use. */
194
195static void
196mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
197{
198 unsigned HOST_WIDE_INT const_lower, const_upper;
199 const_lower = constant_lower_bound (lower_bound);
200 if (upper_bound.is_constant (&const_upper))
201 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
202 stack_usage_map[i] = 1;
203 else
204 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
205}
206
66d433c7 207/* Force FUNEXP into a form suitable for the address of a CALL,
208 and return that as an rtx. Also load the static chain register
209 if FNDECL is a nested function.
210
8866f42d 211 CALL_FUSAGE points to a variable holding the prospective
212 CALL_INSN_FUNCTION_USAGE information. */
66d433c7 213
d9076622 214rtx
156cc902 215prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
a27e3913 216 rtx *call_fusage, int reg_parm_seen, int flags)
66d433c7 217{
c7bf1374 218 /* Make a valid memory address and copy constants through pseudo-regs,
66d433c7 219 but not for a constant address if -fno-function-cse. */
220 if (GET_CODE (funexp) != SYMBOL_REF)
a27e3913 221 {
222 /* If it's an indirect call by descriptor, generate code to perform
223 runtime identification of the pointer and load the descriptor. */
224 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
225 {
226 const int bit_val = targetm.calls.custom_function_descriptors;
227 rtx call_lab = gen_label_rtx ();
228
229 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
230 fndecl_or_type
231 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
232 fndecl_or_type);
233 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
234 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
235
fa6012cb 236 if (GET_MODE (funexp) != Pmode)
237 funexp = convert_memory_address (Pmode, funexp);
238
a27e3913 239 /* Avoid long live ranges around function calls. */
240 funexp = copy_to_mode_reg (Pmode, funexp);
241
242 if (REG_P (chain))
243 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
244
245 /* Emit the runtime identification pattern. */
246 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
247 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
248 call_lab);
249
250 /* Statically predict the branch to very likely taken. */
251 rtx_insn *insn = get_last_insn ();
252 if (JUMP_P (insn))
253 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
254
255 /* Load the descriptor. */
256 rtx mem = gen_rtx_MEM (ptr_mode,
257 plus_constant (Pmode, funexp, - bit_val));
258 MEM_NOTRAP_P (mem) = 1;
259 mem = convert_memory_address (Pmode, mem);
260 emit_move_insn (chain, mem);
261
262 mem = gen_rtx_MEM (ptr_mode,
263 plus_constant (Pmode, funexp,
264 POINTER_SIZE / BITS_PER_UNIT
265 - bit_val));
266 MEM_NOTRAP_P (mem) = 1;
267 mem = convert_memory_address (Pmode, mem);
268 emit_move_insn (funexp, mem);
269
270 emit_label (call_lab);
271
272 if (REG_P (chain))
273 {
274 use_reg (call_fusage, chain);
275 STATIC_CHAIN_REG_P (chain) = 1;
276 }
277
278 /* Make sure we're not going to be overwritten below. */
279 gcc_assert (!static_chain_value);
280 }
281
282 /* If we are using registers for parameters, force the
283 function address into a register now. */
284 funexp = ((reg_parm_seen
285 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
286 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
287 : memory_address (FUNCTION_MODE, funexp));
288 }
97615b02 289 else
66d433c7 290 {
97615b02 291 /* funexp could be a SYMBOL_REF represents a function pointer which is
292 of ptr_mode. In this case, it should be converted into address mode
293 to be a valid address for memory rtx pattern. See PR 64971. */
294 if (GET_MODE (funexp) != Pmode)
295 funexp = convert_memory_address (Pmode, funexp);
296
a27e3913 297 if (!(flags & ECF_SIBCALL))
97615b02 298 {
299 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
300 funexp = force_reg (Pmode, funexp);
301 }
66d433c7 302 }
303
156cc902 304 if (static_chain_value != 0
305 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
306 || DECL_STATIC_CHAIN (fndecl_or_type)))
66d433c7 307 {
82c7907c 308 rtx chain;
309
156cc902 310 chain = targetm.calls.static_chain (fndecl_or_type, false);
3dce56cc 311 static_chain_value = convert_memory_address (Pmode, static_chain_value);
66d433c7 312
82c7907c 313 emit_move_insn (chain, static_chain_value);
314 if (REG_P (chain))
a27e3913 315 {
316 use_reg (call_fusage, chain);
317 STATIC_CHAIN_REG_P (chain) = 1;
318 }
66d433c7 319 }
320
321 return funexp;
322}
323
324/* Generate instructions to call function FUNEXP,
325 and optionally pop the results.
326 The CALL_INSN is the first insn generated.
327
c74d0a20 328 FNDECL is the declaration node of the function. This is given to the
f5bc28da 329 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
330 its own args.
e93a4612 331
f5bc28da 332 FUNTYPE is the data type of the function. This is given to the hook
333 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
334 own args. We used to allow an identifier for library functions, but
335 that doesn't work when the return type is an aggregate type and the
336 calling convention says that the pointer to this aggregate is to be
337 popped by the callee.
66d433c7 338
339 STACK_SIZE is the number of bytes of arguments on the stack,
a62b99b7 340 ROUNDED_STACK_SIZE is that number rounded up to
341 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
342 both to put into the call insn and to generate explicit popping
343 code if necessary.
66d433c7 344
345 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
346 It is zero if this call doesn't want a structure value.
347
348 NEXT_ARG_REG is the rtx that results from executing
f387af4f 349 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
66d433c7 350 just after all the args have had their registers assigned.
351 This could be whatever you like, but normally it is the first
352 arg-register beyond those used for args in this call,
353 or 0 if all the arg-registers are used in this call.
354 It is passed on to `gen_call' so you can put this info in the call insn.
355
356 VALREG is a hard register in which a value is returned,
357 or 0 if the call does not return a value.
358
359 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
360 the args to this call were processed.
361 We restore `inhibit_defer_pop' to that value.
362
07409b3a 363 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
1e625a2e 364 denote registers used by the called function. */
c87678e4 365
8ddf1c7e 366static void
16c9337c 367emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
4ee9c684 368 tree funtype ATTRIBUTE_UNUSED,
e0deb08c 369 poly_int64 stack_size ATTRIBUTE_UNUSED,
370 poly_int64 rounded_stack_size,
e967c3ed 371 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
4c9e08a4 372 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
373 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
39cba157 374 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
66d433c7 375{
e0deb08c 376 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
7f265a08 377 rtx call, funmem, pat;
66d433c7 378 int already_popped = 0;
e0deb08c 379 poly_int64 n_popped = 0;
d94a1f53 380
381 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
382 patterns exist). Any popping that the callee does on return will
383 be from our caller's frame rather than ours. */
384 if (!(ecf_flags & ECF_SIBCALL))
385 {
386 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
66d433c7 387
87e19636 388#ifdef CALL_POPS_ARGS
d94a1f53 389 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
87e19636 390#endif
d94a1f53 391 }
4c9e08a4 392
66d433c7 393 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
394 and we don't want to load it into a register as an optimization,
395 because prepare_call_address already did it if it should be done. */
396 if (GET_CODE (funexp) != SYMBOL_REF)
397 funexp = memory_address (FUNCTION_MODE, funexp);
398
57999964 399 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
400 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
854aa6aa 401 {
402 tree t = fndecl;
b9a16870 403
854aa6aa 404 /* Although a built-in FUNCTION_DECL and its non-__builtin
405 counterpart compare equal and get a shared mem_attrs, they
406 produce different dump output in compare-debug compilations,
407 if an entry gets garbage collected in one compilation, then
408 adds a different (but equivalent) entry, while the other
409 doesn't run the garbage collector at the same spot and then
410 shares the mem_attr with the equivalent entry. */
b9a16870 411 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
412 {
413 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
414 if (t2)
415 t = t2;
416 }
417
418 set_mem_expr (funmem, t);
854aa6aa 419 }
57999964 420 else if (fntree)
2622064f 421 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
57999964 422
7f265a08 423 if (ecf_flags & ECF_SIBCALL)
60ecc450 424 {
60ecc450 425 if (valreg)
7f265a08 426 pat = targetm.gen_sibcall_value (valreg, funmem,
427 rounded_stack_size_rtx,
428 next_arg_reg, NULL_RTX);
60ecc450 429 else
7f265a08 430 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
e967c3ed 431 next_arg_reg,
432 gen_int_mode (struct_value_size, Pmode));
60ecc450 433 }
2a631e19 434 /* If the target has "call" or "call_value" insns, then prefer them
435 if no arguments are actually popped. If the target does not have
436 "call" or "call_value" insns, then we must use the popping versions
437 even if the call has no arguments to pop. */
e0deb08c 438 else if (maybe_ne (n_popped, 0)
7f265a08 439 || !(valreg
440 ? targetm.have_call_value ()
441 : targetm.have_call ()))
66d433c7 442 {
e0deb08c 443 rtx n_pop = gen_int_mode (n_popped, Pmode);
66d433c7 444
445 /* If this subroutine pops its own args, record that in the call insn
446 if possible, for the sake of frame pointer elimination. */
e93a4612 447
66d433c7 448 if (valreg)
7f265a08 449 pat = targetm.gen_call_value_pop (valreg, funmem,
450 rounded_stack_size_rtx,
451 next_arg_reg, n_pop);
66d433c7 452 else
7f265a08 453 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
454 next_arg_reg, n_pop);
66d433c7 455
66d433c7 456 already_popped = 1;
457 }
458 else
60ecc450 459 {
460 if (valreg)
7f265a08 461 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
462 next_arg_reg, NULL_RTX);
60ecc450 463 else
7f265a08 464 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
e967c3ed 465 gen_int_mode (struct_value_size, Pmode));
60ecc450 466 }
7f265a08 467 emit_insn (pat);
66d433c7 468
d5f9786f 469 /* Find the call we just emitted. */
9ed997be 470 rtx_call_insn *call_insn = last_call_insn ();
66d433c7 471
57999964 472 /* Some target create a fresh MEM instead of reusing the one provided
473 above. Set its MEM_EXPR. */
cf7fb72d 474 call = get_call_rtx_from (call_insn);
475 if (call
57999964 476 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
477 && MEM_EXPR (funmem) != NULL_TREE)
478 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
479
d5f9786f 480 /* Put the register usage information there. */
481 add_function_usage_to (call_insn, call_fusage);
66d433c7 482
483 /* If this is a const call, then set the insn's unchanging bit. */
9c2a0c05 484 if (ecf_flags & ECF_CONST)
485 RTL_CONST_CALL_P (call_insn) = 1;
486
487 /* If this is a pure call, then set the insn's unchanging bit. */
488 if (ecf_flags & ECF_PURE)
489 RTL_PURE_CALL_P (call_insn) = 1;
490
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
493 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
66d433c7 494
e38def9c 495 /* Create a nothrow REG_EH_REGION note, if needed. */
496 make_reg_eh_region_note (call_insn, ecf_flags, 0);
00dd2e9e 497
356b51a0 498 if (ecf_flags & ECF_NORETURN)
a1ddb869 499 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
356b51a0 500
9239aee6 501 if (ecf_flags & ECF_RETURNS_TWICE)
0ff18307 502 {
a1ddb869 503 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
18d50ae6 504 cfun->calls_setjmp = 1;
0ff18307 505 }
9239aee6 506
60ecc450 507 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
508
d1f88d00 509 /* Restore this now, so that we do defer pops for this call's args
510 if the context of the call as a whole permits. */
511 inhibit_defer_pop = old_inhibit_defer_pop;
512
e0deb08c 513 if (maybe_ne (n_popped, 0))
66d433c7 514 {
515 if (!already_popped)
37808e3a 516 CALL_INSN_FUNCTION_USAGE (call_insn)
941522d6 517 = gen_rtx_EXPR_LIST (VOIDmode,
518 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
519 CALL_INSN_FUNCTION_USAGE (call_insn));
e39fae61 520 rounded_stack_size -= n_popped;
e0deb08c 521 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
91b70175 522 stack_pointer_delta -= n_popped;
27a7a23a 523
f6a1fc98 524 add_args_size_note (call_insn, stack_pointer_delta);
dfe00a8f 525
27a7a23a 526 /* If popup is needed, stack realign must use DRAP */
527 if (SUPPORTS_STACK_ALIGNMENT)
528 crtl->need_drap = true;
66d433c7 529 }
27827244 530 /* For noreturn calls when not accumulating outgoing args force
531 REG_ARGS_SIZE note to prevent crossjumping of calls with different
532 args sizes. */
533 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
f6a1fc98 534 add_args_size_note (call_insn, stack_pointer_delta);
66d433c7 535
4448f543 536 if (!ACCUMULATE_OUTGOING_ARGS)
66d433c7 537 {
4448f543 538 /* If returning from the subroutine does not automatically pop the args,
539 we need an instruction to pop them sooner or later.
540 Perhaps do it now; perhaps just record how much space to pop later.
541
542 If returning from the subroutine does pop the args, indicate that the
543 stack pointer will be changed. */
544
e0deb08c 545 if (maybe_ne (rounded_stack_size, 0))
4448f543 546 {
ff3ae375 547 if (ecf_flags & ECF_NORETURN)
10d1a2c0 548 /* Just pretend we did the pop. */
549 stack_pointer_delta -= rounded_stack_size;
550 else if (flag_defer_pop && inhibit_defer_pop == 0
d490e2f2 551 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
4448f543 552 pending_stack_adjust += rounded_stack_size;
553 else
554 adjust_stack (rounded_stack_size_rtx);
555 }
66d433c7 556 }
4448f543 557 /* When we accumulate outgoing args, we must avoid any stack manipulations.
558 Restore the stack pointer to its original value now. Usually
559 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
560 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
561 popping variants of functions exist as well.
562
563 ??? We may optimize similar to defer_pop above, but it is
564 probably not worthwhile.
c87678e4 565
4448f543 566 ??? It will be worthwhile to enable combine_stack_adjustments even for
567 such machines. */
e0deb08c 568 else if (maybe_ne (n_popped, 0))
569 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
66d433c7 570}
571
f29fd58e 572/* Determine if the function identified by FNDECL is one with
573 special properties we wish to know about. Modify FLAGS accordingly.
6a0e6138 574
575 For example, if the function might return more than one time (setjmp), then
f29fd58e 576 set ECF_RETURNS_TWICE.
6a0e6138 577
f29fd58e 578 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
6a0e6138 579 space from the stack such as alloca. */
580
dfe08167 581static int
5d1b319b 582special_function_p (const_tree fndecl, int flags)
6a0e6138 583{
058a1b7a 584 tree name_decl = DECL_NAME (fndecl);
585
058a1b7a 586 if (fndecl && name_decl
f29fd58e 587 && IDENTIFIER_LENGTH (name_decl) <= 11
6a0e6138 588 /* Exclude functions not at the file scope, or not `extern',
589 since they are not the magic functions we would otherwise
40109983 590 think they are.
a0c938f0 591 FIXME: this should be handled with attributes, not with this
592 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
593 because you can declare fork() inside a function if you
594 wish. */
0d568ddf 595 && (DECL_CONTEXT (fndecl) == NULL_TREE
40109983 596 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
597 && TREE_PUBLIC (fndecl))
6a0e6138 598 {
058a1b7a 599 const char *name = IDENTIFIER_POINTER (name_decl);
71d9fc9b 600 const char *tname = name;
6a0e6138 601
cc7cc47f 602 /* We assume that alloca will always be called by name. It
603 makes no sense to pass it as a pointer-to-function to
604 anything that does not understand its behavior. */
26fa902d 605 if (IDENTIFIER_LENGTH (name_decl) == 6
606 && name[0] == 'a'
607 && ! strcmp (name, "alloca"))
dfe08167 608 flags |= ECF_MAY_BE_ALLOCA;
cc7cc47f 609
f29fd58e 610 /* Disregard prefix _ or __. */
6a0e6138 611 if (name[0] == '_')
612 {
f29fd58e 613 if (name[1] == '_')
6a0e6138 614 tname += 2;
615 else
616 tname += 1;
617 }
618
f29fd58e 619 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
620 if (! strcmp (tname, "setjmp")
621 || ! strcmp (tname, "sigsetjmp")
622 || ! strcmp (name, "savectx")
623 || ! strcmp (name, "vfork")
624 || ! strcmp (name, "getcontext"))
625 flags |= ECF_RETURNS_TWICE;
6a0e6138 626 }
73673831 627
2b34677f 628 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
629 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
630 flags |= ECF_MAY_BE_ALLOCA;
26fa902d 631
dfe08167 632 return flags;
6a0e6138 633}
634
c8010b80 635/* Similar to special_function_p; return a set of ERF_ flags for the
636 function FNDECL. */
637static int
638decl_return_flags (tree fndecl)
639{
640 tree attr;
641 tree type = TREE_TYPE (fndecl);
642 if (!type)
643 return 0;
644
645 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
646 if (!attr)
647 return 0;
648
649 attr = TREE_VALUE (TREE_VALUE (attr));
650 if (!attr || TREE_STRING_LENGTH (attr) < 1)
651 return 0;
652
653 switch (TREE_STRING_POINTER (attr)[0])
654 {
655 case '1':
656 case '2':
657 case '3':
658 case '4':
659 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
660
661 case 'm':
662 return ERF_NOALIAS;
663
664 case '.':
665 default:
666 return 0;
667 }
668}
669
4c8db992 670/* Return nonzero when FNDECL represents a call to setjmp. */
d490e2f2 671
dfe08167 672int
5d1b319b 673setjmp_call_p (const_tree fndecl)
dfe08167 674{
69010134 675 if (DECL_IS_RETURNS_TWICE (fndecl))
676 return ECF_RETURNS_TWICE;
dfe08167 677 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
678}
679
75a70cf9 680
58905985 681/* Return true if STMT may be an alloca call. */
75a70cf9 682
683bool
58905985 684gimple_maybe_alloca_call_p (const gimple *stmt)
75a70cf9 685{
686 tree fndecl;
687
688 if (!is_gimple_call (stmt))
689 return false;
690
691 fndecl = gimple_call_fndecl (stmt);
692 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
693 return true;
694
695 return false;
696}
697
58905985 698/* Return true if STMT is a builtin alloca call. */
699
700bool
701gimple_alloca_call_p (const gimple *stmt)
702{
703 tree fndecl;
704
705 if (!is_gimple_call (stmt))
706 return false;
707
708 fndecl = gimple_call_fndecl (stmt);
a0e9bfbb 709 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
58905985 710 switch (DECL_FUNCTION_CODE (fndecl))
711 {
2b34677f 712 CASE_BUILT_IN_ALLOCA:
0cbf4528 713 return gimple_call_num_args (stmt) > 0;
58905985 714 default:
715 break;
716 }
717
718 return false;
719}
720
721/* Return true when exp contains a builtin alloca call. */
75a70cf9 722
9a7ecb49 723bool
5d1b319b 724alloca_call_p (const_tree exp)
9a7ecb49 725{
0b7282f1 726 tree fndecl;
9a7ecb49 727 if (TREE_CODE (exp) == CALL_EXPR
0b7282f1 728 && (fndecl = get_callee_fndecl (exp))
58905985 729 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
730 switch (DECL_FUNCTION_CODE (fndecl))
731 {
2b34677f 732 CASE_BUILT_IN_ALLOCA:
58905985 733 return true;
734 default:
735 break;
736 }
737
9a7ecb49 738 return false;
739}
740
4c0315d0 741/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
742 function. Return FALSE otherwise. */
743
744static bool
745is_tm_builtin (const_tree fndecl)
746{
747 if (fndecl == NULL)
748 return false;
749
750 if (decl_is_tm_clone (fndecl))
751 return true;
752
753 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
754 {
755 switch (DECL_FUNCTION_CODE (fndecl))
756 {
757 case BUILT_IN_TM_COMMIT:
758 case BUILT_IN_TM_COMMIT_EH:
759 case BUILT_IN_TM_ABORT:
760 case BUILT_IN_TM_IRREVOCABLE:
761 case BUILT_IN_TM_GETTMCLONE_IRR:
762 case BUILT_IN_TM_MEMCPY:
763 case BUILT_IN_TM_MEMMOVE:
764 case BUILT_IN_TM_MEMSET:
765 CASE_BUILT_IN_TM_STORE (1):
766 CASE_BUILT_IN_TM_STORE (2):
767 CASE_BUILT_IN_TM_STORE (4):
768 CASE_BUILT_IN_TM_STORE (8):
769 CASE_BUILT_IN_TM_STORE (FLOAT):
770 CASE_BUILT_IN_TM_STORE (DOUBLE):
771 CASE_BUILT_IN_TM_STORE (LDOUBLE):
772 CASE_BUILT_IN_TM_STORE (M64):
773 CASE_BUILT_IN_TM_STORE (M128):
774 CASE_BUILT_IN_TM_STORE (M256):
775 CASE_BUILT_IN_TM_LOAD (1):
776 CASE_BUILT_IN_TM_LOAD (2):
777 CASE_BUILT_IN_TM_LOAD (4):
778 CASE_BUILT_IN_TM_LOAD (8):
779 CASE_BUILT_IN_TM_LOAD (FLOAT):
780 CASE_BUILT_IN_TM_LOAD (DOUBLE):
781 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
782 CASE_BUILT_IN_TM_LOAD (M64):
783 CASE_BUILT_IN_TM_LOAD (M128):
784 CASE_BUILT_IN_TM_LOAD (M256):
785 case BUILT_IN_TM_LOG:
786 case BUILT_IN_TM_LOG_1:
787 case BUILT_IN_TM_LOG_2:
788 case BUILT_IN_TM_LOG_4:
789 case BUILT_IN_TM_LOG_8:
790 case BUILT_IN_TM_LOG_FLOAT:
791 case BUILT_IN_TM_LOG_DOUBLE:
792 case BUILT_IN_TM_LOG_LDOUBLE:
793 case BUILT_IN_TM_LOG_M64:
794 case BUILT_IN_TM_LOG_M128:
795 case BUILT_IN_TM_LOG_M256:
796 return true;
797 default:
798 break;
799 }
800 }
801 return false;
802}
803
5edaabad 804/* Detect flags (function attributes) from the function decl or type node. */
d490e2f2 805
805e22b2 806int
5d1b319b 807flags_from_decl_or_type (const_tree exp)
dfe08167 808{
809 int flags = 0;
7a24815f 810
dfe08167 811 if (DECL_P (exp))
812 {
813 /* The function exp may have the `malloc' attribute. */
7a24815f 814 if (DECL_IS_MALLOC (exp))
dfe08167 815 flags |= ECF_MALLOC;
816
26d1c5ff 817 /* The function exp may have the `returns_twice' attribute. */
818 if (DECL_IS_RETURNS_TWICE (exp))
819 flags |= ECF_RETURNS_TWICE;
820
9c2a0c05 821 /* Process the pure and const attributes. */
67fa4078 822 if (TREE_READONLY (exp))
9c2a0c05 823 flags |= ECF_CONST;
824 if (DECL_PURE_P (exp))
ef689d4e 825 flags |= ECF_PURE;
9c2a0c05 826 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
827 flags |= ECF_LOOPING_CONST_OR_PURE;
26dfc457 828
fc09b200 829 if (DECL_IS_NOVOPS (exp))
830 flags |= ECF_NOVOPS;
7bd95dfd 831 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
832 flags |= ECF_LEAF;
642860fc 833 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
834 flags |= ECF_COLD;
fc09b200 835
dfe08167 836 if (TREE_NOTHROW (exp))
837 flags |= ECF_NOTHROW;
b15db406 838
4c0315d0 839 if (flag_tm)
840 {
841 if (is_tm_builtin (exp))
842 flags |= ECF_TM_BUILTIN;
c86dbacd 843 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
4c0315d0 844 || lookup_attribute ("transaction_pure",
845 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
846 flags |= ECF_TM_PURE;
847 }
848
4ee9c684 849 flags = special_function_p (exp, flags);
dfe08167 850 }
4c0315d0 851 else if (TYPE_P (exp))
852 {
853 if (TYPE_READONLY (exp))
854 flags |= ECF_CONST;
855
856 if (flag_tm
857 && ((flags & ECF_CONST) != 0
858 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
859 flags |= ECF_TM_PURE;
860 }
c579aed5 861 else
862 gcc_unreachable ();
dfe08167 863
864 if (TREE_THIS_VOLATILE (exp))
67fa4078 865 {
866 flags |= ECF_NORETURN;
867 if (flags & (ECF_CONST|ECF_PURE))
868 flags |= ECF_LOOPING_CONST_OR_PURE;
869 }
dfe08167 870
871 return flags;
872}
873
886a914d 874/* Detect flags from a CALL_EXPR. */
875
876int
b7bf20db 877call_expr_flags (const_tree t)
886a914d 878{
879 int flags;
880 tree decl = get_callee_fndecl (t);
881
882 if (decl)
883 flags = flags_from_decl_or_type (decl);
4036aeb0 884 else if (CALL_EXPR_FN (t) == NULL_TREE)
885 flags = internal_fn_flags (CALL_EXPR_IFN (t));
886a914d 886 else
887 {
a27e3913 888 tree type = TREE_TYPE (CALL_EXPR_FN (t));
889 if (type && TREE_CODE (type) == POINTER_TYPE)
890 flags = flags_from_decl_or_type (TREE_TYPE (type));
886a914d 891 else
892 flags = 0;
a27e3913 893 if (CALL_EXPR_BY_DESCRIPTOR (t))
894 flags |= ECF_BY_DESCRIPTOR;
886a914d 895 }
896
897 return flags;
898}
899
a3c76fda 900/* Return true if TYPE should be passed by invisible reference. */
901
902bool
903pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
904 tree type, bool named_arg)
905{
906 if (type)
907 {
908 /* If this type contains non-trivial constructors, then it is
909 forbidden for the middle-end to create any new copies. */
910 if (TREE_ADDRESSABLE (type))
911 return true;
912
913 /* GCC post 3.4 passes *all* variable sized types by reference. */
914 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
915 return true;
916
917 /* If a record type should be passed the same as its first (and only)
918 member, use the type and mode of that member. */
919 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
920 {
921 type = TREE_TYPE (first_field (type));
922 mode = TYPE_MODE (type);
923 }
924 }
925
926 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
927 type, named_arg);
928}
929
930/* Return true if TYPE, which is passed by reference, should be callee
931 copied instead of caller copied. */
932
933bool
934reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
935 tree type, bool named_arg)
936{
937 if (type && TREE_ADDRESSABLE (type))
938 return false;
939 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
940 named_arg);
941}
942
943
6a0e6138 944/* Precompute all register parameters as described by ARGS, storing values
945 into fields within the ARGS array.
946
947 NUM_ACTUALS indicates the total number elements in the ARGS array.
948
949 Set REG_PARM_SEEN if we encounter a register parameter. */
950
951static void
e2ff5c1b 952precompute_register_parameters (int num_actuals, struct arg_data *args,
953 int *reg_parm_seen)
6a0e6138 954{
955 int i;
956
957 *reg_parm_seen = 0;
958
959 for (i = 0; i < num_actuals; i++)
960 if (args[i].reg != 0 && ! args[i].pass_on_stack)
961 {
962 *reg_parm_seen = 1;
963
964 if (args[i].value == 0)
965 {
966 push_temp_slots ();
8ec3c5c2 967 args[i].value = expand_normal (args[i].tree_value);
6a0e6138 968 preserve_temp_slots (args[i].value);
969 pop_temp_slots ();
6a0e6138 970 }
971
972 /* If we are to promote the function arg to a wider mode,
973 do it now. */
974
975 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
976 args[i].value
977 = convert_modes (args[i].mode,
978 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
979 args[i].value, args[i].unsignedp);
980
5e050fa1 981 /* If the value is a non-legitimate constant, force it into a
982 pseudo now. TLS symbols sometimes need a call to resolve. */
983 if (CONSTANT_P (args[i].value)
984 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
985 args[i].value = force_reg (args[i].mode, args[i].value);
986
e2ff5c1b 987 /* If we're going to have to load the value by parts, pull the
988 parts into pseudos. The part extraction process can involve
989 non-trivial computation. */
990 if (GET_CODE (args[i].reg) == PARALLEL)
991 {
992 tree type = TREE_TYPE (args[i].tree_value);
b600a907 993 args[i].parallel_value
e2ff5c1b 994 = emit_group_load_into_temps (args[i].reg, args[i].value,
995 type, int_size_in_bytes (type));
996 }
997
c87678e4 998 /* If the value is expensive, and we are inside an appropriately
6a0e6138 999 short loop, put the value into a pseudo and then put the pseudo
1000 into the hard reg.
1001
1002 For small register classes, also do this if this call uses
1003 register parameters. This is to avoid reload conflicts while
1004 loading the parameters registers. */
1005
e2ff5c1b 1006 else if ((! (REG_P (args[i].value)
1007 || (GET_CODE (args[i].value) == SUBREG
1008 && REG_P (SUBREG_REG (args[i].value)))))
1009 && args[i].mode != BLKmode
5ae4887d 1010 && (set_src_cost (args[i].value, args[i].mode,
1011 optimize_insn_for_speed_p ())
1012 > COSTS_N_INSNS (1))
ed5527ca 1013 && ((*reg_parm_seen
1014 && targetm.small_register_classes_for_mode_p (args[i].mode))
e2ff5c1b 1015 || optimize))
6a0e6138 1016 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1017 }
1018}
1019
4448f543 1020#ifdef REG_PARM_STACK_SPACE
6a0e6138 1021
1022 /* The argument list is the property of the called routine and it
1023 may clobber it. If the fixed area has been used for previous
1024 parameters, we must save and restore it. */
f7c44134 1025
6a0e6138 1026static rtx
4c9e08a4 1027save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
6a0e6138 1028{
e0deb08c 1029 unsigned int low;
1030 unsigned int high;
6a0e6138 1031
6e96b626 1032 /* Compute the boundary of the area that needs to be saved, if any. */
1033 high = reg_parm_stack_space;
ccccd62c 1034 if (ARGS_GROW_DOWNWARD)
1035 high += 1;
1036
6e96b626 1037 if (high > highest_outgoing_arg_in_use)
1038 high = highest_outgoing_arg_in_use;
6a0e6138 1039
6e96b626 1040 for (low = 0; low < high; low++)
e0deb08c 1041 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
6e96b626 1042 {
1043 int num_to_save;
3754d046 1044 machine_mode save_mode;
6e96b626 1045 int delta;
29c05e22 1046 rtx addr;
6e96b626 1047 rtx stack_area;
1048 rtx save_area;
6a0e6138 1049
6e96b626 1050 while (stack_usage_map[--high] == 0)
1051 ;
6a0e6138 1052
6e96b626 1053 *low_to_save = low;
1054 *high_to_save = high;
1055
1056 num_to_save = high - low + 1;
6a0e6138 1057
6e96b626 1058 /* If we don't have the required alignment, must do this
1059 in BLKmode. */
44504d18 1060 scalar_int_mode imode;
1061 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1062 && (low & (MIN (GET_MODE_SIZE (imode),
1063 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1064 save_mode = imode;
1065 else
6e96b626 1066 save_mode = BLKmode;
6a0e6138 1067
ccccd62c 1068 if (ARGS_GROW_DOWNWARD)
1069 delta = -high;
1070 else
1071 delta = low;
1072
29c05e22 1073 addr = plus_constant (Pmode, argblock, delta);
1074 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
2a631e19 1075
6e96b626 1076 set_mem_align (stack_area, PARM_BOUNDARY);
1077 if (save_mode == BLKmode)
1078 {
0ab48139 1079 save_area = assign_stack_temp (BLKmode, num_to_save);
6e96b626 1080 emit_block_move (validize_mem (save_area), stack_area,
1081 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1082 }
1083 else
1084 {
1085 save_area = gen_reg_rtx (save_mode);
1086 emit_move_insn (save_area, stack_area);
1087 }
2a631e19 1088
6e96b626 1089 return save_area;
1090 }
1091
1092 return NULL_RTX;
6a0e6138 1093}
1094
1095static void
4c9e08a4 1096restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
6a0e6138 1097{
3754d046 1098 machine_mode save_mode = GET_MODE (save_area);
6e96b626 1099 int delta;
29c05e22 1100 rtx addr, stack_area;
6e96b626 1101
ccccd62c 1102 if (ARGS_GROW_DOWNWARD)
1103 delta = -high_to_save;
1104 else
1105 delta = low_to_save;
1106
29c05e22 1107 addr = plus_constant (Pmode, argblock, delta);
1108 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
6e96b626 1109 set_mem_align (stack_area, PARM_BOUNDARY);
6a0e6138 1110
1111 if (save_mode != BLKmode)
1112 emit_move_insn (stack_area, save_area);
1113 else
0378dbdc 1114 emit_block_move (stack_area, validize_mem (save_area),
1115 GEN_INT (high_to_save - low_to_save + 1),
1116 BLOCK_OP_CALL_PARM);
6a0e6138 1117}
f6025ee7 1118#endif /* REG_PARM_STACK_SPACE */
c87678e4 1119
6a0e6138 1120/* If any elements in ARGS refer to parameters that are to be passed in
1121 registers, but not in memory, and whose alignment does not permit a
1122 direct copy into registers. Copy the values into a group of pseudos
c87678e4 1123 which we will later copy into the appropriate hard registers.
6d801f27 1124
1125 Pseudos for each unaligned argument will be stored into the array
1126 args[argnum].aligned_regs. The caller is responsible for deallocating
1127 the aligned_regs array if it is nonzero. */
1128
6a0e6138 1129static void
4c9e08a4 1130store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
6a0e6138 1131{
1132 int i, j;
c87678e4 1133
6a0e6138 1134 for (i = 0; i < num_actuals; i++)
1135 if (args[i].reg != 0 && ! args[i].pass_on_stack
33eb84dc 1136 && GET_CODE (args[i].reg) != PARALLEL
6a0e6138 1137 && args[i].mode == BLKmode
77f1b1bb 1138 && MEM_P (args[i].value)
1139 && (MEM_ALIGN (args[i].value)
6a0e6138 1140 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1141 {
1142 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
5f4cd670 1143 int endian_correction = 0;
6a0e6138 1144
f054eb3c 1145 if (args[i].partial)
1146 {
1147 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1148 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1149 }
1150 else
1151 {
1152 args[i].n_aligned_regs
1153 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1154 }
1155
4c36ffe6 1156 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
6a0e6138 1157
5f4cd670 1158 /* Structures smaller than a word are normally aligned to the
1159 least significant byte. On a BYTES_BIG_ENDIAN machine,
6a0e6138 1160 this means we must skip the empty high order bytes when
1161 calculating the bit offset. */
5f4cd670 1162 if (bytes < UNITS_PER_WORD
1163#ifdef BLOCK_REG_PADDING
1164 && (BLOCK_REG_PADDING (args[i].mode,
1165 TREE_TYPE (args[i].tree_value), 1)
d7ab0e3d 1166 == PAD_DOWNWARD)
5f4cd670 1167#else
1168 && BYTES_BIG_ENDIAN
1169#endif
1170 )
1171 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
6a0e6138 1172
1173 for (j = 0; j < args[i].n_aligned_regs; j++)
1174 {
1175 rtx reg = gen_reg_rtx (word_mode);
1176 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1177 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
6a0e6138 1178
1179 args[i].aligned_regs[j] = reg;
3f71db40 1180 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
5d77cce2 1181 word_mode, word_mode, false, NULL);
6a0e6138 1182
1183 /* There is no need to restrict this code to loading items
1184 in TYPE_ALIGN sized hunks. The bitfield instructions can
1185 load up entire word sized registers efficiently.
1186
1187 ??? This may not be needed anymore.
1188 We use to emit a clobber here but that doesn't let later
1189 passes optimize the instructions we emit. By storing 0 into
1190 the register later passes know the first AND to zero out the
1191 bitfield being set in the register is unnecessary. The store
1192 of 0 will be deleted as will at least the first AND. */
1193
1194 emit_move_insn (reg, const0_rtx);
1195
1196 bytes -= bitsize / BITS_PER_UNIT;
4bb60ec7 1197 store_bit_field (reg, bitsize, endian_correction, 0, 0,
292237f3 1198 word_mode, word, false);
6a0e6138 1199 }
1200 }
1201}
1202
370e45b9 1203/* The limit set by -Walloc-larger-than=. */
1204static GTY(()) tree alloc_object_size_limit;
1205
1206/* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1207 setting if the option is specified, or to the maximum object size if it
1208 is not. Return the initialized value. */
1209
1210static tree
1211alloc_max_size (void)
1212{
8306d54c 1213 if (alloc_object_size_limit)
1214 return alloc_object_size_limit;
370e45b9 1215
48cccec0 1216 HOST_WIDE_INT limit = warn_alloc_size_limit;
1217 if (limit == HOST_WIDE_INT_MAX)
1218 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1219
1220 alloc_object_size_limit = build_int_cst (size_type_node, limit);
8306d54c 1221
370e45b9 1222 return alloc_object_size_limit;
1223}
1224
ae0a5f68 1225/* Return true when EXP's range can be determined and set RANGE[] to it
e6a18b5a 1226 after adjusting it if necessary to make EXP a represents a valid size
1227 of object, or a valid size argument to an allocation function declared
1228 with attribute alloc_size (whose argument may be signed), or to a string
1229 manipulation function like memset. When ALLOW_ZERO is true, allow
1230 returning a range of [0, 0] for a size in an anti-range [1, N] where
1231 N > PTRDIFF_MAX. A zero range is a (nearly) invalid argument to
1232 allocation functions like malloc but it is a valid argument to
1233 functions like memset. */
370e45b9 1234
ae0a5f68 1235bool
e6a18b5a 1236get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
370e45b9 1237{
ae0a5f68 1238 if (tree_fits_uhwi_p (exp))
370e45b9 1239 {
ae0a5f68 1240 /* EXP is a constant. */
1241 range[0] = range[1] = exp;
1242 return true;
1243 }
1244
e6a18b5a 1245 tree exptype = TREE_TYPE (exp);
1246 bool integral = INTEGRAL_TYPE_P (exptype);
1247
ae0a5f68 1248 wide_int min, max;
be44111e 1249 enum value_range_kind range_type;
e6a18b5a 1250
23bd14de 1251 if (integral)
1252 range_type = determine_value_range (exp, &min, &max);
e6a18b5a 1253 else
1254 range_type = VR_VARYING;
ae0a5f68 1255
1256 if (range_type == VR_VARYING)
1257 {
e6a18b5a 1258 if (integral)
1259 {
1260 /* Use the full range of the type of the expression when
1261 no value range information is available. */
1262 range[0] = TYPE_MIN_VALUE (exptype);
1263 range[1] = TYPE_MAX_VALUE (exptype);
1264 return true;
1265 }
1266
ae0a5f68 1267 range[0] = NULL_TREE;
1268 range[1] = NULL_TREE;
1269 return false;
1270 }
1271
ae0a5f68 1272 unsigned expprec = TYPE_PRECISION (exptype);
ae0a5f68 1273
1274 bool signed_p = !TYPE_UNSIGNED (exptype);
1275
1276 if (range_type == VR_ANTI_RANGE)
1277 {
1278 if (signed_p)
370e45b9 1279 {
e3d0f65c 1280 if (wi::les_p (max, 0))
370e45b9 1281 {
ae0a5f68 1282 /* EXP is not in a strictly negative range. That means
1283 it must be in some (not necessarily strictly) positive
1284 range which includes zero. Since in signed to unsigned
1285 conversions negative values end up converted to large
1286 positive values, and otherwise they are not valid sizes,
1287 the resulting range is in both cases [0, TYPE_MAX]. */
e3d0f65c 1288 min = wi::zero (expprec);
1289 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
370e45b9 1290 }
e3d0f65c 1291 else if (wi::les_p (min - 1, 0))
ae0a5f68 1292 {
1293 /* EXP is not in a negative-positive range. That means EXP
1294 is either negative, or greater than max. Since negative
1295 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1296 min = max + 1;
e3d0f65c 1297 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
ae0a5f68 1298 }
1299 else
1300 {
1301 max = min - 1;
e3d0f65c 1302 min = wi::zero (expprec);
ae0a5f68 1303 }
1304 }
e3d0f65c 1305 else if (wi::eq_p (0, min - 1))
ae0a5f68 1306 {
1307 /* EXP is unsigned and not in the range [1, MAX]. That means
1308 it's either zero or greater than MAX. Even though 0 would
e6a18b5a 1309 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1310 is true, set the range to [MAX, TYPE_MAX] so that when MAX
1311 is greater than the limit the whole range is diagnosed. */
1312 if (allow_zero)
1313 min = max = wi::zero (expprec);
1314 else
1315 {
1316 min = max + 1;
1317 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1318 }
ae0a5f68 1319 }
1320 else
1321 {
1322 max = min - 1;
e3d0f65c 1323 min = wi::zero (expprec);
370e45b9 1324 }
1325 }
1326
ae0a5f68 1327 range[0] = wide_int_to_tree (exptype, min);
1328 range[1] = wide_int_to_tree (exptype, max);
1329
1330 return true;
370e45b9 1331}
1332
1333/* Diagnose a call EXP to function FN decorated with attribute alloc_size
1334 whose argument numbers given by IDX with values given by ARGS exceed
1335 the maximum object size or cause an unsigned oveflow (wrapping) when
0f15b7f6 1336 multiplied. FN is null when EXP is a call via a function pointer.
1337 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1338 for functions like malloc, and non-null for those like calloc that
1339 are decorated with a two-argument attribute alloc_size. */
370e45b9 1340
1341void
1342maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1343{
1344 /* The range each of the (up to) two arguments is known to be in. */
1345 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1346
1347 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1348 tree maxobjsize = alloc_max_size ();
1349
1350 location_t loc = EXPR_LOCATION (exp);
1351
0f15b7f6 1352 tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
370e45b9 1353 bool warned = false;
1354
1355 /* Validate each argument individually. */
1356 for (unsigned i = 0; i != 2 && args[i]; ++i)
1357 {
1358 if (TREE_CODE (args[i]) == INTEGER_CST)
1359 {
1360 argrange[i][0] = args[i];
1361 argrange[i][1] = args[i];
1362
1363 if (tree_int_cst_lt (args[i], integer_zero_node))
1364 {
1365 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1366 "%Kargument %i value %qE is negative",
1367 exp, idx[i] + 1, args[i]);
370e45b9 1368 }
1369 else if (integer_zerop (args[i]))
1370 {
1371 /* Avoid issuing -Walloc-zero for allocation functions other
1372 than __builtin_alloca that are declared with attribute
1373 returns_nonnull because there's no portability risk. This
1374 avoids warning for such calls to libiberty's xmalloc and
1375 friends.
1376 Also avoid issuing the warning for calls to function named
1377 "alloca". */
50b8400c 1378 if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
1379 ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
1380 : !lookup_attribute ("returns_nonnull",
1381 TYPE_ATTRIBUTES (fntype)))
370e45b9 1382 warned = warning_at (loc, OPT_Walloc_zero,
ae0a5f68 1383 "%Kargument %i value is zero",
1384 exp, idx[i] + 1);
370e45b9 1385 }
1386 else if (tree_int_cst_lt (maxobjsize, args[i]))
1387 {
1388 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1389 mode and with -fno-exceptions as a way to indicate array
1390 size overflow. There's no good way to detect C++98 here
1391 so avoid diagnosing these calls for all C++ modes. */
1392 if (i == 0
0f15b7f6 1393 && fn
370e45b9 1394 && !args[1]
1395 && lang_GNU_CXX ()
92e4277c 1396 && DECL_IS_OPERATOR_NEW_P (fn)
370e45b9 1397 && integer_all_onesp (args[i]))
1398 continue;
1399
1400 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1401 "%Kargument %i value %qE exceeds "
370e45b9 1402 "maximum object size %E",
ae0a5f68 1403 exp, idx[i] + 1, args[i], maxobjsize);
370e45b9 1404 }
1405 }
ae0a5f68 1406 else if (TREE_CODE (args[i]) == SSA_NAME
1407 && get_size_range (args[i], argrange[i]))
370e45b9 1408 {
370e45b9 1409 /* Verify that the argument's range is not negative (including
1410 upper bound of zero). */
1411 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1412 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1413 {
1414 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1415 "%Kargument %i range [%E, %E] is negative",
1416 exp, idx[i] + 1,
1417 argrange[i][0], argrange[i][1]);
370e45b9 1418 }
1419 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1420 {
1421 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1422 "%Kargument %i range [%E, %E] exceeds "
370e45b9 1423 "maximum object size %E",
ae0a5f68 1424 exp, idx[i] + 1,
1425 argrange[i][0], argrange[i][1],
370e45b9 1426 maxobjsize);
1427 }
1428 }
1429 }
1430
1431 if (!argrange[0])
1432 return;
1433
1434 /* For a two-argument alloc_size, validate the product of the two
1435 arguments if both of their values or ranges are known. */
1436 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1437 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1438 && !integer_onep (argrange[0][0])
1439 && !integer_onep (argrange[1][0]))
1440 {
1441 /* Check for overflow in the product of a function decorated with
1442 attribute alloc_size (X, Y). */
1443 unsigned szprec = TYPE_PRECISION (size_type_node);
1444 wide_int x = wi::to_wide (argrange[0][0], szprec);
1445 wide_int y = wi::to_wide (argrange[1][0], szprec);
1446
30b5769f 1447 wi::overflow_type vflow;
370e45b9 1448 wide_int prod = wi::umul (x, y, &vflow);
1449
1450 if (vflow)
1451 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1452 "%Kproduct %<%E * %E%> of arguments %i and %i "
370e45b9 1453 "exceeds %<SIZE_MAX%>",
ae0a5f68 1454 exp, argrange[0][0], argrange[1][0],
370e45b9 1455 idx[0] + 1, idx[1] + 1);
1456 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1457 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
ae0a5f68 1458 "%Kproduct %<%E * %E%> of arguments %i and %i "
370e45b9 1459 "exceeds maximum object size %E",
ae0a5f68 1460 exp, argrange[0][0], argrange[1][0],
370e45b9 1461 idx[0] + 1, idx[1] + 1,
1462 maxobjsize);
1463
1464 if (warned)
1465 {
1466 /* Print the full range of each of the two arguments to make
1467 it clear when it is, in fact, in a range and not constant. */
1468 if (argrange[0][0] != argrange [0][1])
1469 inform (loc, "argument %i in the range [%E, %E]",
1470 idx[0] + 1, argrange[0][0], argrange[0][1]);
1471 if (argrange[1][0] != argrange [1][1])
1472 inform (loc, "argument %i in the range [%E, %E]",
1473 idx[1] + 1, argrange[1][0], argrange[1][1]);
1474 }
1475 }
1476
0f15b7f6 1477 if (warned && fn)
370e45b9 1478 {
1479 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1480
1481 if (DECL_IS_BUILTIN (fn))
1482 inform (loc,
1483 "in a call to built-in allocation function %qD", fn);
1484 else
1485 inform (fnloc,
1486 "in a call to allocation function %qD declared here", fn);
1487 }
1488}
1489
0c45740b 1490/* If EXPR refers to a character array or pointer declared attribute
1491 nonstring return a decl for that array or pointer and set *REF to
1492 the referenced enclosing object or pointer. Otherwise returns
1493 null. */
1494
1495tree
1496get_attr_nonstring_decl (tree expr, tree *ref)
1497{
1498 tree decl = expr;
85df98d7 1499 tree var = NULL_TREE;
0c45740b 1500 if (TREE_CODE (decl) == SSA_NAME)
1501 {
1502 gimple *def = SSA_NAME_DEF_STMT (decl);
1503
1504 if (is_gimple_assign (def))
1505 {
1506 tree_code code = gimple_assign_rhs_code (def);
1507 if (code == ADDR_EXPR
1508 || code == COMPONENT_REF
1509 || code == VAR_DECL)
1510 decl = gimple_assign_rhs1 (def);
1511 }
85df98d7 1512 else
1513 var = SSA_NAME_VAR (decl);
0c45740b 1514 }
1515
1516 if (TREE_CODE (decl) == ADDR_EXPR)
1517 decl = TREE_OPERAND (decl, 0);
1518
85df98d7 1519 /* To simplify calling code, store the referenced DECL regardless of
1520 the attribute determined below, but avoid storing the SSA_NAME_VAR
1521 obtained above (it's not useful for dataflow purposes). */
0c45740b 1522 if (ref)
1523 *ref = decl;
1524
85df98d7 1525 /* Use the SSA_NAME_VAR that was determined above to see if it's
1526 declared nonstring. Otherwise drill down into the referenced
1527 DECL. */
1528 if (var)
1529 decl = var;
1530 else if (TREE_CODE (decl) == ARRAY_REF)
a20cbb58 1531 decl = TREE_OPERAND (decl, 0);
1532 else if (TREE_CODE (decl) == COMPONENT_REF)
0c45740b 1533 decl = TREE_OPERAND (decl, 1);
a20cbb58 1534 else if (TREE_CODE (decl) == MEM_REF)
1535 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
0c45740b 1536
1537 if (DECL_P (decl)
1538 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1539 return decl;
1540
1541 return NULL_TREE;
1542}
1543
0c45740b 1544/* Warn about passing a non-string array/pointer to a function that
1545 expects a nul-terminated string argument. */
1546
1547void
1548maybe_warn_nonstring_arg (tree fndecl, tree exp)
1549{
a0e9bfbb 1550 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
0c45740b 1551 return;
1552
13308b37 1553 if (TREE_NO_WARNING (exp) || !warn_stringop_overflow)
864bd5de 1554 return;
1555
cdde1804 1556 /* Avoid clearly invalid calls (more checking done below). */
625c5395 1557 unsigned nargs = call_expr_nargs (exp);
cdde1804 1558 if (!nargs)
1559 return;
625c5395 1560
0c45740b 1561 /* The bound argument to a bounded string function like strncpy. */
1562 tree bound = NULL_TREE;
1563
14c286b1 1564 /* The longest known or possible string argument to one of the comparison
1565 functions. If the length is less than the bound it is used instead.
1566 Since the length is only used for warning and not for code generation
1567 disable strict mode in the calls to get_range_strlen below. */
1568 tree maxlen = NULL_TREE;
974404bd 1569
0c45740b 1570 /* It's safe to call "bounded" string functions with a non-string
1571 argument since the functions provide an explicit bound for this
0eff2551 1572 purpose. The exception is strncat where the bound may refer to
1573 either the destination or the source. */
1574 int fncode = DECL_FUNCTION_CODE (fndecl);
1575 switch (fncode)
0c45740b 1576 {
974404bd 1577 case BUILT_IN_STRCMP:
0c45740b 1578 case BUILT_IN_STRNCMP:
1579 case BUILT_IN_STRNCASECMP:
974404bd 1580 {
1581 /* For these, if one argument refers to one or more of a set
1582 of string constants or arrays of known size, determine
1583 the range of their known or possible lengths and use it
1584 conservatively as the bound for the unbounded function,
1585 and to adjust the range of the bound of the bounded ones. */
13308b37 1586 for (unsigned argno = 0;
1587 argno < MIN (nargs, 2)
14c286b1 1588 && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
974404bd 1589 {
1590 tree arg = CALL_EXPR_ARG (exp, argno);
1591 if (!get_attr_nonstring_decl (arg))
14c286b1 1592 {
1593 c_strlen_data lendata = { };
1594 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1595 maxlen = lendata.maxbound;
1596 }
974404bd 1597 }
1598 }
1599 /* Fall through. */
1600
0eff2551 1601 case BUILT_IN_STRNCAT:
974404bd 1602 case BUILT_IN_STPNCPY:
0c45740b 1603 case BUILT_IN_STRNCPY:
13308b37 1604 if (nargs > 2)
864bd5de 1605 bound = CALL_EXPR_ARG (exp, 2);
1606 break;
0c45740b 1607
1608 case BUILT_IN_STRNDUP:
13308b37 1609 if (nargs > 1)
864bd5de 1610 bound = CALL_EXPR_ARG (exp, 1);
1611 break;
1612
1613 case BUILT_IN_STRNLEN:
625c5395 1614 {
864bd5de 1615 tree arg = CALL_EXPR_ARG (exp, 0);
1616 if (!get_attr_nonstring_decl (arg))
14c286b1 1617 {
1618 c_strlen_data lendata = { };
1619 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1620 maxlen = lendata.maxbound;
1621 }
13308b37 1622 if (nargs > 1)
864bd5de 1623 bound = CALL_EXPR_ARG (exp, 1);
625c5395 1624 break;
1625 }
0c45740b 1626
1627 default:
1628 break;
1629 }
1630
1631 /* Determine the range of the bound argument (if specified). */
1632 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1633 if (bound)
0b39ade8 1634 {
1635 STRIP_NOPS (bound);
1636 get_size_range (bound, bndrng);
1637 }
0c45740b 1638
864bd5de 1639 location_t loc = EXPR_LOCATION (exp);
1640
1641 if (bndrng[0])
1642 {
1643 /* Diagnose excessive bound prior the adjustment below and
1644 regardless of attribute nonstring. */
1645 tree maxobjsize = max_object_size ();
1646 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1647 {
1648 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1649 warning_at (loc, OPT_Wstringop_overflow_,
1650 "%K%qD specified bound %E "
1651 "exceeds maximum object size %E",
1652 exp, fndecl, bndrng[0], maxobjsize);
1653 else
1654 warning_at (loc, OPT_Wstringop_overflow_,
1655 "%K%qD specified bound [%E, %E] "
1656 "exceeds maximum object size %E",
1657 exp, fndecl, bndrng[0], bndrng[1], maxobjsize);
1658 return;
1659 }
1660 }
1661
14c286b1 1662 if (maxlen && !integer_all_onesp (maxlen))
974404bd 1663 {
1664 /* Add one for the nul. */
14c286b1 1665 maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
1666 size_one_node);
974404bd 1667
1668 if (!bndrng[0])
1669 {
1670 /* Conservatively use the upper bound of the lengths for
1671 both the lower and the upper bound of the operation. */
14c286b1 1672 bndrng[0] = maxlen;
1673 bndrng[1] = maxlen;
974404bd 1674 bound = void_type_node;
1675 }
afe1ec89 1676 else if (maxlen)
974404bd 1677 {
3ab72211 1678 /* Replace the bound on the operation with the upper bound
974404bd 1679 of the length of the string if the latter is smaller. */
14c286b1 1680 if (tree_int_cst_lt (maxlen, bndrng[0]))
1681 bndrng[0] = maxlen;
1682 else if (tree_int_cst_lt (maxlen, bndrng[1]))
1683 bndrng[1] = maxlen;
974404bd 1684 }
1685 }
1686
0c45740b 1687 /* Iterate over the built-in function's formal arguments and check
1688 each const char* against the actual argument. If the actual
1689 argument is declared attribute non-string issue a warning unless
1690 the argument's maximum length is bounded. */
1691 function_args_iterator it;
1692 function_args_iter_init (&it, TREE_TYPE (fndecl));
1693
1694 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1695 {
625c5395 1696 /* Avoid iterating past the declared argument in a call
1697 to function declared without a prototype. */
1698 if (argno >= nargs)
1699 break;
1700
0c45740b 1701 tree argtype = function_args_iter_cond (&it);
1702 if (!argtype)
1703 break;
1704
1705 if (TREE_CODE (argtype) != POINTER_TYPE)
1706 continue;
1707
1708 argtype = TREE_TYPE (argtype);
1709
1710 if (TREE_CODE (argtype) != INTEGER_TYPE
1711 || !TYPE_READONLY (argtype))
1712 continue;
1713
1714 argtype = TYPE_MAIN_VARIANT (argtype);
1715 if (argtype != char_type_node)
1716 continue;
1717
1718 tree callarg = CALL_EXPR_ARG (exp, argno);
1719 if (TREE_CODE (callarg) == ADDR_EXPR)
1720 callarg = TREE_OPERAND (callarg, 0);
1721
1722 /* See if the destination is declared with attribute "nonstring". */
1723 tree decl = get_attr_nonstring_decl (callarg);
1724 if (!decl)
1725 continue;
1726
974404bd 1727 /* The maximum number of array elements accessed. */
0c45740b 1728 offset_int wibnd = 0;
0eff2551 1729
1730 if (argno && fncode == BUILT_IN_STRNCAT)
1731 {
1732 /* See if the bound in strncat is derived from the length
1733 of the strlen of the destination (as it's expected to be).
1734 If so, reset BOUND and FNCODE to trigger a warning. */
1735 tree dstarg = CALL_EXPR_ARG (exp, 0);
1736 if (is_strlen_related_p (dstarg, bound))
1737 {
1738 /* The bound applies to the destination, not to the source,
1739 so reset these to trigger a warning without mentioning
1740 the bound. */
1741 bound = NULL;
1742 fncode = 0;
1743 }
1744 else if (bndrng[1])
1745 /* Use the upper bound of the range for strncat. */
1746 wibnd = wi::to_offset (bndrng[1]);
1747 }
1748 else if (bndrng[0])
1749 /* Use the lower bound of the range for functions other than
1750 strncat. */
0c45740b 1751 wibnd = wi::to_offset (bndrng[0]);
1752
0eff2551 1753 /* Determine the size of the argument array if it is one. */
0c45740b 1754 offset_int asize = wibnd;
0eff2551 1755 bool known_size = false;
1756 tree type = TREE_TYPE (decl);
0c45740b 1757
974404bd 1758 /* Determine the array size. For arrays of unknown bound and
1759 pointers reset BOUND to trigger the appropriate warning. */
0c45740b 1760 if (TREE_CODE (type) == ARRAY_TYPE)
974404bd 1761 {
1762 if (tree arrbnd = TYPE_DOMAIN (type))
1763 {
1764 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
0eff2551 1765 {
1766 asize = wi::to_offset (arrbnd) + 1;
1767 known_size = true;
1768 }
974404bd 1769 }
1770 else if (bound == void_type_node)
1771 bound = NULL_TREE;
1772 }
1773 else if (bound == void_type_node)
1774 bound = NULL_TREE;
0c45740b 1775
0eff2551 1776 /* In a call to strncat with a bound in a range whose lower but
1777 not upper bound is less than the array size, reset ASIZE to
1778 be the same as the bound and the other variable to trigger
1779 the apprpriate warning below. */
1780 if (fncode == BUILT_IN_STRNCAT
1781 && bndrng[0] != bndrng[1]
1782 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1783 && (!known_size
1784 || wi::ltu_p (asize, wibnd)))
1785 {
1786 asize = wibnd;
1787 bound = NULL_TREE;
1788 fncode = 0;
1789 }
1790
0c45740b 1791 bool warned = false;
1792
bc35ef65 1793 auto_diagnostic_group d;
0c45740b 1794 if (wi::ltu_p (asize, wibnd))
0eff2551 1795 {
1796 if (bndrng[0] == bndrng[1])
1797 warned = warning_at (loc, OPT_Wstringop_overflow_,
1798 "%qD argument %i declared attribute "
1799 "%<nonstring%> is smaller than the specified "
1800 "bound %wu",
1801 fndecl, argno + 1, wibnd.to_uhwi ());
1802 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1803 warned = warning_at (loc, OPT_Wstringop_overflow_,
1804 "%qD argument %i declared attribute "
1805 "%<nonstring%> is smaller than "
1806 "the specified bound [%E, %E]",
1807 fndecl, argno + 1, bndrng[0], bndrng[1]);
1808 else
1809 warned = warning_at (loc, OPT_Wstringop_overflow_,
1810 "%qD argument %i declared attribute "
1811 "%<nonstring%> may be smaller than "
1812 "the specified bound [%E, %E]",
1813 fndecl, argno + 1, bndrng[0], bndrng[1]);
1814 }
1815 else if (fncode == BUILT_IN_STRNCAT)
1816 ; /* Avoid warning for calls to strncat() when the bound
1817 is equal to the size of the non-string argument. */
0c45740b 1818 else if (!bound)
1819 warned = warning_at (loc, OPT_Wstringop_overflow_,
1820 "%qD argument %i declared attribute %<nonstring%>",
1821 fndecl, argno + 1);
1822
1823 if (warned)
1824 inform (DECL_SOURCE_LOCATION (decl),
1825 "argument %qD declared here", decl);
1826 }
1827}
1828
b4a61e77 1829/* Issue an error if CALL_EXPR was flagged as requiring
1830 tall-call optimization. */
1831
1832static void
1833maybe_complain_about_tail_call (tree call_expr, const char *reason)
1834{
1835 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1836 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1837 return;
1838
1839 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1840}
1841
cb543c54 1842/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
48e1416a 1843 CALL_EXPR EXP.
cb543c54 1844
1845 NUM_ACTUALS is the total number of parameters.
1846
1847 N_NAMED_ARGS is the total number of named arguments.
1848
cd46caee 1849 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1850 value, or null.
1851
cb543c54 1852 FNDECL is the tree code for the target of this call (if known)
1853
1854 ARGS_SO_FAR holds state needed by the target to know where to place
1855 the next argument.
1856
1857 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1858 for arguments which are passed in registers.
1859
1860 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1861 and may be modified by this routine.
1862
dfe08167 1863 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
47ae02b7 1864 flags which may be modified by this routine.
eaa112a0 1865
4ee9c684 1866 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1867 that requires allocation of stack space.
1868
eaa112a0 1869 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1870 the thunked-to function. */
cb543c54 1871
1872static void
4c9e08a4 1873initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1874 struct arg_data *args,
1875 struct args_size *args_size,
1876 int n_named_args ATTRIBUTE_UNUSED,
cd46caee 1877 tree exp, tree struct_value_addr_value,
d8b9c828 1878 tree fndecl, tree fntype,
39cba157 1879 cumulative_args_t args_so_far,
4c9e08a4 1880 int reg_parm_stack_space,
e0deb08c 1881 rtx *old_stack_level,
1882 poly_int64_pod *old_pending_adj,
eaa112a0 1883 int *must_preallocate, int *ecf_flags,
4ee9c684 1884 bool *may_tailcall, bool call_from_thunk_p)
cb543c54 1885{
39cba157 1886 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
389dd41b 1887 location_t loc = EXPR_LOCATION (exp);
cb543c54 1888
1889 /* Count arg position in order args appear. */
1890 int argpos;
1891
1892 int i;
c87678e4 1893
cb543c54 1894 args_size->constant = 0;
1895 args_size->var = 0;
1896
058a1b7a 1897 bitmap_obstack_initialize (NULL);
1898
cb543c54 1899 /* In this loop, we consider args in the order they are written.
bf29c577 1900 We fill up ARGS from the back. */
cb543c54 1901
bf29c577 1902 i = num_actuals - 1;
cd46caee 1903 {
1e42d5c6 1904 int j = i;
cd46caee 1905 call_expr_arg_iterator iter;
1906 tree arg;
058a1b7a 1907 bitmap slots = NULL;
cd46caee 1908
1909 if (struct_value_addr_value)
1910 {
1911 args[j].tree_value = struct_value_addr_value;
bf29c577 1912 j--;
cd46caee 1913 }
e66d763a 1914 argpos = 0;
cd46caee 1915 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1916 {
1917 tree argtype = TREE_TYPE (arg);
058a1b7a 1918
cd46caee 1919 if (targetm.calls.split_complex_arg
1920 && argtype
1921 && TREE_CODE (argtype) == COMPLEX_TYPE
1922 && targetm.calls.split_complex_arg (argtype))
1923 {
1924 tree subtype = TREE_TYPE (argtype);
cd46caee 1925 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
bf29c577 1926 j--;
cd46caee 1927 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1928 }
1929 else
1930 args[j].tree_value = arg;
bf29c577 1931 j--;
e66d763a 1932 argpos++;
cd46caee 1933 }
058a1b7a 1934
1935 if (slots)
1936 BITMAP_FREE (slots);
cd46caee 1937 }
1938
058a1b7a 1939 bitmap_obstack_release (NULL);
1940
0f15b7f6 1941 /* Extract attribute alloc_size from the type of the called expression
1942 (which could be a function or a function pointer) and if set, store
1943 the indices of the corresponding arguments in ALLOC_IDX, and then
1944 the actual argument(s) at those indices in ALLOC_ARGS. */
370e45b9 1945 int alloc_idx[2] = { -1, -1 };
0f15b7f6 1946 if (tree alloc_size = lookup_attribute ("alloc_size",
1947 TYPE_ATTRIBUTES (fntype)))
370e45b9 1948 {
1949 tree args = TREE_VALUE (alloc_size);
1950 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1951 if (TREE_CHAIN (args))
1952 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1953 }
1954
1955 /* Array for up to the two attribute alloc_size arguments. */
1956 tree alloc_args[] = { NULL_TREE, NULL_TREE };
1957
cb543c54 1958 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
bf29c577 1959 for (argpos = 0; argpos < num_actuals; i--, argpos++)
cb543c54 1960 {
cd46caee 1961 tree type = TREE_TYPE (args[i].tree_value);
cb543c54 1962 int unsignedp;
3754d046 1963 machine_mode mode;
cb543c54 1964
cb543c54 1965 /* Replace erroneous argument with constant zero. */
4b72716d 1966 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
cb543c54 1967 args[i].tree_value = integer_zero_node, type = integer_type_node;
1968
8df5a43d 1969 /* If TYPE is a transparent union or record, pass things the way
1970 we would pass the first field of the union or record. We have
1971 already verified that the modes are the same. */
1972 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1973 && TYPE_TRANSPARENT_AGGR (type))
1974 type = TREE_TYPE (first_field (type));
cb543c54 1975
1976 /* Decide where to pass this arg.
1977
1978 args[i].reg is nonzero if all or part is passed in registers.
1979
1980 args[i].partial is nonzero if part but not all is passed in registers,
f054eb3c 1981 and the exact value says how many bytes are passed in registers.
cb543c54 1982
1983 args[i].pass_on_stack is nonzero if the argument must at least be
1984 computed on the stack. It may then be loaded back into registers
1985 if args[i].reg is nonzero.
1986
1987 These decisions are driven by the FUNCTION_... macros and must agree
1988 with those made by function.c. */
1989
1990 /* See if this argument should be passed by invisible reference. */
39cba157 1991 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
cc9b8628 1992 type, argpos < n_named_args))
cb543c54 1993 {
41dc12b4 1994 bool callee_copies;
bc4577c4 1995 tree base = NULL_TREE;
41dc12b4 1996
1997 callee_copies
39cba157 1998 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
13f08ee7 1999 type, argpos < n_named_args);
41dc12b4 2000
2001 /* If we're compiling a thunk, pass through invisible references
2002 instead of making a copy. */
eaa112a0 2003 if (call_from_thunk_p
41dc12b4 2004 || (callee_copies
2005 && !TREE_ADDRESSABLE (type)
2006 && (base = get_base_address (args[i].tree_value))
d6230243 2007 && TREE_CODE (base) != SSA_NAME
41dc12b4 2008 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
cb543c54 2009 {
6b7d03d8 2010 /* We may have turned the parameter value into an SSA name.
2011 Go back to the original parameter so we can take the
2012 address. */
2013 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
2014 {
2015 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2016 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2017 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2018 }
9502706b 2019 /* Argument setup code may have copied the value to register. We
2020 revert that optimization now because the tail call code must
2021 use the original location. */
2022 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2023 && !MEM_P (DECL_RTL (args[i].tree_value))
2024 && DECL_INCOMING_RTL (args[i].tree_value)
2025 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2026 set_decl_rtl (args[i].tree_value,
2027 DECL_INCOMING_RTL (args[i].tree_value));
2028
006e2d5a 2029 mark_addressable (args[i].tree_value);
2030
41dc12b4 2031 /* We can't use sibcalls if a callee-copied argument is
2032 stored in the current function's frame. */
2033 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
b4a61e77 2034 {
2035 *may_tailcall = false;
2036 maybe_complain_about_tail_call (exp,
2037 "a callee-copied argument is"
dc4ed9fd 2038 " stored in the current"
b4a61e77 2039 " function's frame");
2040 }
c71e72dd 2041
389dd41b 2042 args[i].tree_value = build_fold_addr_expr_loc (loc,
2043 args[i].tree_value);
41dc12b4 2044 type = TREE_TYPE (args[i].tree_value);
2045
9c2a0c05 2046 if (*ecf_flags & ECF_CONST)
2047 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
ce95a955 2048 }
cb543c54 2049 else
2050 {
2051 /* We make a copy of the object and pass the address to the
2052 function being called. */
2053 rtx copy;
2054
4b72716d 2055 if (!COMPLETE_TYPE_P (type)
4852b829 2056 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2057 || (flag_stack_check == GENERIC_STACK_CHECK
2058 && compare_tree_int (TYPE_SIZE_UNIT (type),
2059 STACK_CHECK_MAX_VAR_SIZE) > 0))
cb543c54 2060 {
2061 /* This is a variable-sized object. Make space on the stack
2062 for it. */
cd46caee 2063 rtx size_rtx = expr_size (args[i].tree_value);
cb543c54 2064
2065 if (*old_stack_level == 0)
2066 {
e9c97615 2067 emit_stack_save (SAVE_BLOCK, old_stack_level);
cb543c54 2068 *old_pending_adj = pending_stack_adjust;
2069 pending_stack_adjust = 0;
2070 }
2071
990495a7 2072 /* We can pass TRUE as the 4th argument because we just
2073 saved the stack pointer and will restore it right after
2074 the call. */
5be42b39 2075 copy = allocate_dynamic_stack_space (size_rtx,
2076 TYPE_ALIGN (type),
2077 TYPE_ALIGN (type),
2b34677f 2078 max_int_size_in_bytes
2079 (type),
5be42b39 2080 true);
2081 copy = gen_rtx_MEM (BLKmode, copy);
f7c44134 2082 set_mem_attributes (copy, type, 1);
cb543c54 2083 }
2084 else
0ab48139 2085 copy = assign_temp (type, 1, 0);
cb543c54 2086
292237f3 2087 store_expr (args[i].tree_value, copy, 0, false, false);
cb543c54 2088
9c2a0c05 2089 /* Just change the const function to pure and then let
2090 the next test clear the pure based on
2091 callee_copies. */
2092 if (*ecf_flags & ECF_CONST)
2093 {
2094 *ecf_flags &= ~ECF_CONST;
2095 *ecf_flags |= ECF_PURE;
2096 }
2097
2098 if (!callee_copies && *ecf_flags & ECF_PURE)
2099 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
41dc12b4 2100
2101 args[i].tree_value
389dd41b 2102 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
41dc12b4 2103 type = TREE_TYPE (args[i].tree_value);
4ee9c684 2104 *may_tailcall = false;
b4a61e77 2105 maybe_complain_about_tail_call (exp,
2106 "argument must be passed"
2107 " by copying");
cb543c54 2108 }
2109 }
2110
78a8ed03 2111 unsignedp = TYPE_UNSIGNED (type);
3b2411a8 2112 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2113 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
cb543c54 2114
2115 args[i].unsignedp = unsignedp;
2116 args[i].mode = mode;
7a8d641b 2117
532d84ff 2118 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2119
f387af4f 2120 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
2121 argpos < n_named_args);
2122
058a1b7a 2123 if (args[i].reg && CONST_INT_P (args[i].reg))
bb78a1de 2124 args[i].reg = NULL;
058a1b7a 2125
7a8d641b 2126 /* If this is a sibling call and the machine has register windows, the
2127 register window has to be unwinded before calling the routine, so
2128 arguments have to go into the incoming registers. */
f387af4f 2129 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2130 args[i].tail_call_reg
2131 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
2132 argpos < n_named_args);
2133 else
2134 args[i].tail_call_reg = args[i].reg;
7a8d641b 2135
cb543c54 2136 if (args[i].reg)
2137 args[i].partial
f054eb3c 2138 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
2139 argpos < n_named_args);
cb543c54 2140
0336f0f0 2141 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
cb543c54 2142
2143 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2144 it means that we are to pass this arg in the register(s) designated
2145 by the PARALLEL, but also to pass it in the stack. */
2146 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2147 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2148 args[i].pass_on_stack = 1;
2149
2150 /* If this is an addressable type, we must preallocate the stack
2151 since we must evaluate the object into its final location.
2152
2153 If this is to be passed in both registers and the stack, it is simpler
2154 to preallocate. */
2155 if (TREE_ADDRESSABLE (type)
2156 || (args[i].pass_on_stack && args[i].reg != 0))
2157 *must_preallocate = 1;
2158
cb543c54 2159 /* Compute the stack-size of this argument. */
1e42d5c6 2160 if (args[i].reg == 0 || args[i].partial != 0
058a1b7a 2161 || reg_parm_stack_space > 0
2162 || args[i].pass_on_stack)
cb543c54 2163 locate_and_pad_parm (mode, type,
2164#ifdef STACK_PARMS_IN_REG_PARM_AREA
2165 1,
2166#else
2167 args[i].reg != 0,
2168#endif
2e090bf6 2169 reg_parm_stack_space,
241399f6 2170 args[i].pass_on_stack ? 0 : args[i].partial,
2171 fndecl, args_size, &args[i].locate);
0fee47f4 2172#ifdef BLOCK_REG_PADDING
2173 else
2174 /* The argument is passed entirely in registers. See at which
2175 end it should be padded. */
2176 args[i].locate.where_pad =
2177 BLOCK_REG_PADDING (mode, type,
2178 int_size_in_bytes (type) <= UNITS_PER_WORD);
2179#endif
c87678e4 2180
cb543c54 2181 /* Update ARGS_SIZE, the total stack space for args so far. */
2182
241399f6 2183 args_size->constant += args[i].locate.size.constant;
2184 if (args[i].locate.size.var)
2185 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
cb543c54 2186
2187 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2188 have been used, etc. */
2189
f387af4f 2190 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
2191 type, argpos < n_named_args);
370e45b9 2192
2193 /* Store argument values for functions decorated with attribute
2194 alloc_size. */
2195 if (argpos == alloc_idx[0])
2196 alloc_args[0] = args[i].tree_value;
2197 else if (argpos == alloc_idx[1])
2198 alloc_args[1] = args[i].tree_value;
2199 }
2200
2201 if (alloc_args[0])
2202 {
2203 /* Check the arguments of functions decorated with attribute
2204 alloc_size. */
2205 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
cb543c54 2206 }
0c45740b 2207
2208 /* Detect passing non-string arguments to functions expecting
2209 nul-terminated strings. */
2210 maybe_warn_nonstring_arg (fndecl, exp);
cb543c54 2211}
2212
cc45e5e8 2213/* Update ARGS_SIZE to contain the total size for the argument block.
2214 Return the original constant component of the argument block's size.
2215
2216 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2217 for arguments passed in registers. */
2218
e0deb08c 2219static poly_int64
4c9e08a4 2220compute_argument_block_size (int reg_parm_stack_space,
2221 struct args_size *args_size,
60e2260d 2222 tree fndecl ATTRIBUTE_UNUSED,
fa20f865 2223 tree fntype ATTRIBUTE_UNUSED,
4c9e08a4 2224 int preferred_stack_boundary ATTRIBUTE_UNUSED)
cc45e5e8 2225{
e0deb08c 2226 poly_int64 unadjusted_args_size = args_size->constant;
cc45e5e8 2227
4448f543 2228 /* For accumulate outgoing args mode we don't need to align, since the frame
2229 will be already aligned. Align to STACK_BOUNDARY in order to prevent
35a3065a 2230 backends from generating misaligned frame sizes. */
4448f543 2231 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2232 preferred_stack_boundary = STACK_BOUNDARY;
4448f543 2233
cc45e5e8 2234 /* Compute the actual size of the argument block required. The variable
2235 and constant sizes must be combined, the size may have to be rounded,
2236 and there may be a minimum required size. */
2237
2238 if (args_size->var)
2239 {
2240 args_size->var = ARGS_SIZE_TREE (*args_size);
2241 args_size->constant = 0;
2242
d0285dd8 2243 preferred_stack_boundary /= BITS_PER_UNIT;
2244 if (preferred_stack_boundary > 1)
91b70175 2245 {
2246 /* We don't handle this case yet. To handle it correctly we have
35a3065a 2247 to add the delta, round and subtract the delta.
91b70175 2248 Currently no machine description requires this support. */
e0deb08c 2249 gcc_assert (multiple_p (stack_pointer_delta,
2250 preferred_stack_boundary));
91b70175 2251 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2252 }
cc45e5e8 2253
2254 if (reg_parm_stack_space > 0)
2255 {
2256 args_size->var
2257 = size_binop (MAX_EXPR, args_size->var,
902de8ed 2258 ssize_int (reg_parm_stack_space));
cc45e5e8 2259
cc45e5e8 2260 /* The area corresponding to register parameters is not to count in
2261 the size of the block we need. So make the adjustment. */
fa20f865 2262 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 2263 args_size->var
2264 = size_binop (MINUS_EXPR, args_size->var,
2265 ssize_int (reg_parm_stack_space));
cc45e5e8 2266 }
2267 }
2268 else
2269 {
d0285dd8 2270 preferred_stack_boundary /= BITS_PER_UNIT;
60ecc450 2271 if (preferred_stack_boundary < 1)
2272 preferred_stack_boundary = 1;
e0deb08c 2273 args_size->constant = (aligned_upper_bound (args_size->constant
2274 + stack_pointer_delta,
2275 preferred_stack_boundary)
91b70175 2276 - stack_pointer_delta);
cc45e5e8 2277
e0deb08c 2278 args_size->constant = upper_bound (args_size->constant,
2279 reg_parm_stack_space);
cc45e5e8 2280
fa20f865 2281 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 2282 args_size->constant -= reg_parm_stack_space;
cc45e5e8 2283 }
2284 return unadjusted_args_size;
2285}
2286
caa1595a 2287/* Precompute parameters as needed for a function call.
04707f1c 2288
dfe08167 2289 FLAGS is mask of ECF_* constants.
04707f1c 2290
04707f1c 2291 NUM_ACTUALS is the number of arguments.
2292
c87678e4 2293 ARGS is an array containing information for each argument; this
2294 routine fills in the INITIAL_VALUE and VALUE fields for each
2295 precomputed argument. */
04707f1c 2296
2297static void
2dd6f9ed 2298precompute_arguments (int num_actuals, struct arg_data *args)
04707f1c 2299{
2300 int i;
2301
8c78c14b 2302 /* If this is a libcall, then precompute all arguments so that we do not
67c155cb 2303 get extraneous instructions emitted as part of the libcall sequence. */
c5dc094f 2304
2305 /* If we preallocated the stack space, and some arguments must be passed
2306 on the stack, then we must precompute any parameter which contains a
2307 function call which will store arguments on the stack.
2308 Otherwise, evaluating the parameter may clobber previous parameters
2309 which have already been stored into the stack. (we have code to avoid
2310 such case by saving the outgoing stack arguments, but it results in
2311 worse code) */
2dd6f9ed 2312 if (!ACCUMULATE_OUTGOING_ARGS)
67c155cb 2313 return;
0d568ddf 2314
04707f1c 2315 for (i = 0; i < num_actuals; i++)
67c155cb 2316 {
3b2411a8 2317 tree type;
3754d046 2318 machine_mode mode;
701e46d0 2319
2dd6f9ed 2320 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
c5dc094f 2321 continue;
2322
67c155cb 2323 /* If this is an addressable type, we cannot pre-evaluate it. */
3b2411a8 2324 type = TREE_TYPE (args[i].tree_value);
2325 gcc_assert (!TREE_ADDRESSABLE (type));
04707f1c 2326
67c155cb 2327 args[i].initial_value = args[i].value
8ec3c5c2 2328 = expand_normal (args[i].tree_value);
04707f1c 2329
3b2411a8 2330 mode = TYPE_MODE (type);
67c155cb 2331 if (mode != args[i].mode)
2332 {
3b2411a8 2333 int unsignedp = args[i].unsignedp;
67c155cb 2334 args[i].value
2335 = convert_modes (args[i].mode, mode,
2336 args[i].value, args[i].unsignedp);
3b2411a8 2337
67c155cb 2338 /* CSE will replace this only if it contains args[i].value
2339 pseudo, so convert it down to the declared mode using
2340 a SUBREG. */
2341 if (REG_P (args[i].value)
3b2411a8 2342 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2343 && promote_mode (type, mode, &unsignedp) != args[i].mode)
67c155cb 2344 {
2345 args[i].initial_value
2346 = gen_lowpart_SUBREG (mode, args[i].value);
2347 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
5a9ccd1b 2348 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
67c155cb 2349 }
67c155cb 2350 }
2351 }
04707f1c 2352}
2353
e717ffc2 2354/* Given the current state of MUST_PREALLOCATE and information about
2355 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2356 compute and return the final value for MUST_PREALLOCATE. */
2357
2358static int
48e1416a 2359finalize_must_preallocate (int must_preallocate, int num_actuals,
c2f47e15 2360 struct arg_data *args, struct args_size *args_size)
e717ffc2 2361{
2362 /* See if we have or want to preallocate stack space.
2363
2364 If we would have to push a partially-in-regs parm
2365 before other stack parms, preallocate stack space instead.
2366
2367 If the size of some parm is not a multiple of the required stack
2368 alignment, we must preallocate.
2369
2370 If the total size of arguments that would otherwise create a copy in
2371 a temporary (such as a CALL) is more than half the total argument list
2372 size, preallocation is faster.
2373
2374 Another reason to preallocate is if we have a machine (like the m88k)
2375 where stack alignment is required to be maintained between every
2376 pair of insns, not just when the call is made. However, we assume here
2377 that such machines either do not have push insns (and hence preallocation
2378 would occur anyway) or the problem is taken care of with
2379 PUSH_ROUNDING. */
2380
2381 if (! must_preallocate)
2382 {
2383 int partial_seen = 0;
e0deb08c 2384 poly_int64 copy_to_evaluate_size = 0;
e717ffc2 2385 int i;
2386
2387 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2388 {
2389 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2390 partial_seen = 1;
2391 else if (partial_seen && args[i].reg == 0)
2392 must_preallocate = 1;
2393
2394 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2395 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2396 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2397 || TREE_CODE (args[i].tree_value) == COND_EXPR
2398 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2399 copy_to_evaluate_size
2400 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2401 }
2402
e0deb08c 2403 if (maybe_ne (args_size->constant, 0)
2404 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
e717ffc2 2405 must_preallocate = 1;
2406 }
2407 return must_preallocate;
2408}
cc45e5e8 2409
f3012854 2410/* If we preallocated stack space, compute the address of each argument
2411 and store it into the ARGS array.
2412
c87678e4 2413 We need not ensure it is a valid memory address here; it will be
f3012854 2414 validized when it is used.
2415
2416 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2417
2418static void
4c9e08a4 2419compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
f3012854 2420{
2421 if (argblock)
2422 {
2423 rtx arg_reg = argblock;
e0deb08c 2424 int i;
2425 poly_int64 arg_offset = 0;
f3012854 2426
2427 if (GET_CODE (argblock) == PLUS)
e0deb08c 2428 {
2429 arg_reg = XEXP (argblock, 0);
2430 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2431 }
f3012854 2432
2433 for (i = 0; i < num_actuals; i++)
2434 {
241399f6 2435 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2436 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
f3012854 2437 rtx addr;
c5dc0c32 2438 unsigned int align, boundary;
e0deb08c 2439 poly_uint64 units_on_stack = 0;
3754d046 2440 machine_mode partial_mode = VOIDmode;
f3012854 2441
2442 /* Skip this parm if it will not be passed on the stack. */
c2ca1bab 2443 if (! args[i].pass_on_stack
2444 && args[i].reg != 0
2445 && args[i].partial == 0)
f3012854 2446 continue;
2447
aed50899 2448 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2449 continue;
2450
53fdf12a 2451 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
29c05e22 2452 addr = plus_constant (Pmode, addr, arg_offset);
c2ca1bab 2453
2454 if (args[i].partial != 0)
2455 {
2456 /* Only part of the parameter is being passed on the stack.
2457 Generate a simple memory reference of the correct size. */
2458 units_on_stack = args[i].locate.size.constant;
e0deb08c 2459 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
517be012 2460 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
c2ca1bab 2461 args[i].stack = gen_rtx_MEM (partial_mode, addr);
5b2a69fa 2462 set_mem_size (args[i].stack, units_on_stack);
c2ca1bab 2463 }
2464 else
2465 {
2466 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2467 set_mem_attributes (args[i].stack,
2468 TREE_TYPE (args[i].tree_value), 1);
2469 }
c5dc0c32 2470 align = BITS_PER_UNIT;
2471 boundary = args[i].locate.boundary;
e0deb08c 2472 poly_int64 offset_val;
d7ab0e3d 2473 if (args[i].locate.where_pad != PAD_DOWNWARD)
c5dc0c32 2474 align = boundary;
e0deb08c 2475 else if (poly_int_rtx_p (offset, &offset_val))
c5dc0c32 2476 {
e0deb08c 2477 align = least_bit_hwi (boundary);
2478 unsigned int offset_align
2479 = known_alignment (offset_val) * BITS_PER_UNIT;
2480 if (offset_align != 0)
2481 align = MIN (align, offset_align);
c5dc0c32 2482 }
2483 set_mem_align (args[i].stack, align);
f3012854 2484
53fdf12a 2485 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
29c05e22 2486 addr = plus_constant (Pmode, addr, arg_offset);
c2ca1bab 2487
2488 if (args[i].partial != 0)
2489 {
2490 /* Only part of the parameter is being passed on the stack.
2491 Generate a simple memory reference of the correct size.
2492 */
2493 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
5b2a69fa 2494 set_mem_size (args[i].stack_slot, units_on_stack);
c2ca1bab 2495 }
2496 else
2497 {
2498 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2499 set_mem_attributes (args[i].stack_slot,
2500 TREE_TYPE (args[i].tree_value), 1);
2501 }
c5dc0c32 2502 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
a9f2963b 2503
2504 /* Function incoming arguments may overlap with sibling call
2505 outgoing arguments and we cannot allow reordering of reads
2506 from function arguments with stores to outgoing arguments
2507 of sibling calls. */
ab6ab77e 2508 set_mem_alias_set (args[i].stack, 0);
2509 set_mem_alias_set (args[i].stack_slot, 0);
f3012854 2510 }
2511 }
2512}
c87678e4 2513
f3012854 2514/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2515 in a call instruction.
2516
2517 FNDECL is the tree node for the target function. For an indirect call
2518 FNDECL will be NULL_TREE.
2519
95672afe 2520 ADDR is the operand 0 of CALL_EXPR for this call. */
f3012854 2521
2522static rtx
4c9e08a4 2523rtx_for_function_call (tree fndecl, tree addr)
f3012854 2524{
2525 rtx funexp;
2526
2527 /* Get the function to call, in the form of RTL. */
2528 if (fndecl)
2529 {
3d053e06 2530 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
ea259bbe 2531 TREE_USED (fndecl) = 1;
f3012854 2532
2533 /* Get a SYMBOL_REF rtx for the function address. */
2534 funexp = XEXP (DECL_RTL (fndecl), 0);
2535 }
2536 else
2537 /* Generate an rtx (probably a pseudo-register) for the address. */
2538 {
2539 push_temp_slots ();
8ec3c5c2 2540 funexp = expand_normal (addr);
c87678e4 2541 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
f3012854 2542 }
2543 return funexp;
2544}
2545
3c56e0c1 2546/* Return the static chain for this function, if any. */
2547
2548rtx
2549rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2550{
2551 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2552 return NULL;
2553
2554 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2555}
2556
74c02416 2557/* Internal state for internal_arg_pointer_based_exp and its helpers. */
2558static struct
2559{
2560 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2561 or NULL_RTX if none has been scanned yet. */
3663becd 2562 rtx_insn *scan_start;
74c02416 2563 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2564 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2565 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2566 with fixed offset, or PC if this is with variable or unknown offset. */
f1f41a6c 2567 vec<rtx> cache;
74c02416 2568} internal_arg_pointer_exp_state;
2569
474ce66a 2570static rtx internal_arg_pointer_based_exp (const_rtx, bool);
74c02416 2571
2572/* Helper function for internal_arg_pointer_based_exp. Scan insns in
2573 the tail call sequence, starting with first insn that hasn't been
2574 scanned yet, and note for each pseudo on the LHS whether it is based
2575 on crtl->args.internal_arg_pointer or not, and what offset from that
2576 that pointer it has. */
2577
2578static void
2579internal_arg_pointer_based_exp_scan (void)
2580{
3663becd 2581 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
74c02416 2582
2583 if (scan_start == NULL_RTX)
2584 insn = get_insns ();
2585 else
2586 insn = NEXT_INSN (scan_start);
2587
2588 while (insn)
2589 {
2590 rtx set = single_set (insn);
2591 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2592 {
2593 rtx val = NULL_RTX;
2594 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2595 /* Punt on pseudos set multiple times. */
f1f41a6c 2596 if (idx < internal_arg_pointer_exp_state.cache.length ()
2597 && (internal_arg_pointer_exp_state.cache[idx]
74c02416 2598 != NULL_RTX))
2599 val = pc_rtx;
2600 else
2601 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2602 if (val != NULL_RTX)
2603 {
f1f41a6c 2604 if (idx >= internal_arg_pointer_exp_state.cache.length ())
9af5ce0c 2605 internal_arg_pointer_exp_state.cache
2606 .safe_grow_cleared (idx + 1);
f1f41a6c 2607 internal_arg_pointer_exp_state.cache[idx] = val;
74c02416 2608 }
2609 }
2610 if (NEXT_INSN (insn) == NULL_RTX)
2611 scan_start = insn;
2612 insn = NEXT_INSN (insn);
2613 }
2614
2615 internal_arg_pointer_exp_state.scan_start = scan_start;
2616}
2617
74c02416 2618/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
2619 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2620 it with fixed offset, or PC if this is with variable or unknown offset.
2621 TOPLEVEL is true if the function is invoked at the topmost level. */
2622
2623static rtx
474ce66a 2624internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
74c02416 2625{
2626 if (CONSTANT_P (rtl))
2627 return NULL_RTX;
2628
2629 if (rtl == crtl->args.internal_arg_pointer)
2630 return const0_rtx;
2631
2632 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2633 return NULL_RTX;
2634
e0deb08c 2635 poly_int64 offset;
2636 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
74c02416 2637 {
2638 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2639 if (val == NULL_RTX || val == pc_rtx)
2640 return val;
e0deb08c 2641 return plus_constant (Pmode, val, offset);
74c02416 2642 }
2643
2644 /* When called at the topmost level, scan pseudo assignments in between the
2645 last scanned instruction in the tail call sequence and the latest insn
2646 in that sequence. */
2647 if (toplevel)
2648 internal_arg_pointer_based_exp_scan ();
2649
2650 if (REG_P (rtl))
2651 {
2652 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
f1f41a6c 2653 if (idx < internal_arg_pointer_exp_state.cache.length ())
2654 return internal_arg_pointer_exp_state.cache[idx];
74c02416 2655
2656 return NULL_RTX;
2657 }
2658
474ce66a 2659 subrtx_iterator::array_type array;
2660 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2661 {
2662 const_rtx x = *iter;
2663 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2664 return pc_rtx;
2665 if (MEM_P (x))
2666 iter.skip_subrtxes ();
2667 }
74c02416 2668
2669 return NULL_RTX;
2670}
2671
e0deb08c 2672/* Return true if SIZE bytes starting from address ADDR might overlap an
2673 already-clobbered argument area. This function is used to determine
2674 if we should give up a sibcall. */
ff6c0ab2 2675
2676static bool
e0deb08c 2677mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
ff6c0ab2 2678{
e0deb08c 2679 poly_int64 i;
2680 unsigned HOST_WIDE_INT start, end;
74c02416 2681 rtx val;
ff6c0ab2 2682
e0deb08c 2683 if (bitmap_empty_p (stored_args_map)
2684 && stored_args_watermark == HOST_WIDE_INT_M1U)
9ddeff7e 2685 return false;
74c02416 2686 val = internal_arg_pointer_based_exp (addr, true);
2687 if (val == NULL_RTX)
2688 return false;
e0deb08c 2689 else if (!poly_int_rtx_p (val, &i))
cc0595c0 2690 return true;
e0deb08c 2691
2692 if (known_eq (size, 0U))
2693 return false;
a8b58ffb 2694
2695 if (STACK_GROWS_DOWNWARD)
2696 i -= crtl->args.pretend_args_size;
2697 else
2698 i += crtl->args.pretend_args_size;
2699
ccccd62c 2700 if (ARGS_GROW_DOWNWARD)
2701 i = -i - size;
2702
e0deb08c 2703 /* We can ignore any references to the function's pretend args,
2704 which at this point would manifest as negative values of I. */
2705 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2706 return false;
ff6c0ab2 2707
e0deb08c 2708 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
2709 if (!(i + size).is_constant (&end))
2710 end = HOST_WIDE_INT_M1U;
2711
2712 if (end > stored_args_watermark)
2713 return true;
2714
2715 end = MIN (end, SBITMAP_SIZE (stored_args_map));
2716 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2717 if (bitmap_bit_p (stored_args_map, k))
2718 return true;
ff6c0ab2 2719
2720 return false;
2721}
2722
cde25025 2723/* Do the register loads required for any wholly-register parms or any
2724 parms which are passed both on the stack and in a register. Their
c87678e4 2725 expressions were already evaluated.
cde25025 2726
2727 Mark all register-parms as living through the call, putting these USE
4c9e08a4 2728 insns in the CALL_INSN_FUNCTION_USAGE field.
2729
dc537795 2730 When IS_SIBCALL, perform the check_sibcall_argument_overlap
42b11544 2731 checking, setting *SIBCALL_FAILURE if appropriate. */
cde25025 2732
2733static void
4c9e08a4 2734load_register_parameters (struct arg_data *args, int num_actuals,
2735 rtx *call_fusage, int flags, int is_sibcall,
2736 int *sibcall_failure)
cde25025 2737{
2738 int i, j;
2739
cde25025 2740 for (i = 0; i < num_actuals; i++)
cde25025 2741 {
0e0be288 2742 rtx reg = ((flags & ECF_SIBCALL)
2743 ? args[i].tail_call_reg : args[i].reg);
cde25025 2744 if (reg)
2745 {
5f4cd670 2746 int partial = args[i].partial;
2747 int nregs;
8e2882f4 2748 poly_int64 size = 0;
2749 HOST_WIDE_INT const_size = 0;
3663becd 2750 rtx_insn *before_arg = get_last_insn ();
83272ab4 2751 /* Set non-negative if we must move a word at a time, even if
2752 just one word (e.g, partial == 4 && mode == DFmode). Set
2753 to -1 if we just use a normal move insn. This value can be
2754 zero if the argument is a zero size structure. */
5f4cd670 2755 nregs = -1;
f054eb3c 2756 if (GET_CODE (reg) == PARALLEL)
2757 ;
2758 else if (partial)
2759 {
2760 gcc_assert (partial % UNITS_PER_WORD == 0);
2761 nregs = partial / UNITS_PER_WORD;
2762 }
5f4cd670 2763 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2764 {
8e2882f4 2765 /* Variable-sized parameters should be described by a
2766 PARALLEL instead. */
2767 const_size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2768 gcc_assert (const_size >= 0);
2769 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2770 size = const_size;
5f4cd670 2771 }
2772 else
2773 size = GET_MODE_SIZE (args[i].mode);
cde25025 2774
2775 /* Handle calls that pass values in multiple non-contiguous
2776 locations. The Irix 6 ABI has examples of this. */
2777
2778 if (GET_CODE (reg) == PARALLEL)
b600a907 2779 emit_group_move (reg, args[i].parallel_value);
cde25025 2780
2781 /* If simple case, just do move. If normal partial, store_one_arg
2782 has already loaded the register for us. In all other cases,
2783 load the register(s) from memory. */
2784
8e67abab 2785 else if (nregs == -1)
2786 {
2787 emit_move_insn (reg, args[i].value);
5f4cd670 2788#ifdef BLOCK_REG_PADDING
8e67abab 2789 /* Handle case where we have a value that needs shifting
2790 up to the msb. eg. a QImode value and we're padding
2791 upward on a BYTES_BIG_ENDIAN machine. */
8e2882f4 2792 if (args[i].locate.where_pad
2793 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
8e67abab 2794 {
8e2882f4 2795 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2796 if (maybe_lt (size, UNITS_PER_WORD))
2797 {
2798 rtx x;
2799 poly_int64 shift
2800 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2801
2802 /* Assigning REG here rather than a temp makes
2803 CALL_FUSAGE report the whole reg as used.
2804 Strictly speaking, the call only uses SIZE
2805 bytes at the msb end, but it doesn't seem worth
2806 generating rtl to say that. */
2807 reg = gen_rtx_REG (word_mode, REGNO (reg));
2808 x = expand_shift (LSHIFT_EXPR, word_mode,
2809 reg, shift, reg, 1);
2810 if (x != reg)
2811 emit_move_insn (reg, x);
2812 }
8e67abab 2813 }
5f4cd670 2814#endif
8e67abab 2815 }
cde25025 2816
2817 /* If we have pre-computed the values to put in the registers in
2818 the case of non-aligned structures, copy them in now. */
2819
2820 else if (args[i].n_aligned_regs != 0)
2821 for (j = 0; j < args[i].n_aligned_regs; j++)
2822 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2823 args[i].aligned_regs[j]);
2824
e2e0ef92 2825 else if (partial == 0 || args[i].pass_on_stack)
5f4cd670 2826 {
8e2882f4 2827 /* SIZE and CONST_SIZE are 0 for partial arguments and
2828 the size of a BLKmode type otherwise. */
2829 gcc_checking_assert (known_eq (size, const_size));
d2b9158b 2830 rtx mem = validize_mem (copy_rtx (args[i].value));
5f4cd670 2831
e2e0ef92 2832 /* Check for overlap with already clobbered argument area,
2833 providing that this has non-zero size. */
ff6c0ab2 2834 if (is_sibcall
8e2882f4 2835 && const_size != 0
e0deb08c 2836 && (mem_might_overlap_already_clobbered_arg_p
8e2882f4 2837 (XEXP (args[i].value, 0), const_size)))
ff6c0ab2 2838 *sibcall_failure = 1;
2839
8e2882f4 2840 if (const_size % UNITS_PER_WORD == 0
72f2d6cc 2841 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2842 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2843 else
2844 {
2845 if (nregs > 1)
2846 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2847 args[i].mode);
2848 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2849 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
8e2882f4 2850 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
292237f3 2851 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
5d77cce2 2852 word_mode, word_mode, false,
2853 NULL);
72f2d6cc 2854 if (BYTES_BIG_ENDIAN)
2855 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2856 BITS_PER_WORD - bitsize, dest, 1);
2857 if (x != dest)
2858 emit_move_insn (dest, x);
2859 }
2860
5f4cd670 2861 /* Handle a BLKmode that needs shifting. */
8e2882f4 2862 if (nregs == 1 && const_size < UNITS_PER_WORD
2c267f1a 2863#ifdef BLOCK_REG_PADDING
d7ab0e3d 2864 && args[i].locate.where_pad == PAD_DOWNWARD
2c267f1a 2865#else
2866 && BYTES_BIG_ENDIAN
2867#endif
72f2d6cc 2868 )
5f4cd670 2869 {
72f2d6cc 2870 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
8e2882f4 2871 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
72f2d6cc 2872 enum tree_code dir = (BYTES_BIG_ENDIAN
2873 ? RSHIFT_EXPR : LSHIFT_EXPR);
2874 rtx x;
5f4cd670 2875
72f2d6cc 2876 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2877 if (x != dest)
2878 emit_move_insn (dest, x);
5f4cd670 2879 }
5f4cd670 2880 }
cde25025 2881
42b11544 2882 /* When a parameter is a block, and perhaps in other cases, it is
2883 possible that it did a load from an argument slot that was
6a8fa8e2 2884 already clobbered. */
42b11544 2885 if (is_sibcall
2886 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2887 *sibcall_failure = 1;
2888
cde25025 2889 /* Handle calls that pass values in multiple non-contiguous
2890 locations. The Irix 6 ABI has examples of this. */
2891 if (GET_CODE (reg) == PARALLEL)
2892 use_group_regs (call_fusage, reg);
2893 else if (nregs == -1)
b4eeceb9 2894 use_reg_mode (call_fusage, reg,
2895 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
c75d013c 2896 else if (nregs > 0)
2897 use_regs (call_fusage, REGNO (reg), nregs);
cde25025 2898 }
2899 }
2900}
2901
92e1ef5b 2902/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2903 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2904 bytes, then we would need to push some additional bytes to pad the
e0deb08c 2905 arguments. So, we try to compute an adjust to the stack pointer for an
481feae3 2906 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2907 bytes. Then, when the arguments are pushed the stack will be perfectly
e0deb08c 2908 aligned.
92e1ef5b 2909
e0deb08c 2910 Return true if this optimization is possible, storing the adjustment
2911 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2912 bytes that should be popped after the call. */
2913
2914static bool
2915combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
2916 poly_int64 unadjusted_args_size,
4c9e08a4 2917 struct args_size *args_size,
38413c80 2918 unsigned int preferred_unit_stack_boundary)
92e1ef5b 2919{
2920 /* The number of bytes to pop so that the stack will be
2921 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
e0deb08c 2922 poly_int64 adjustment;
92e1ef5b 2923 /* The alignment of the stack after the arguments are pushed, if we
2924 just pushed the arguments without adjust the stack here. */
38413c80 2925 unsigned HOST_WIDE_INT unadjusted_alignment;
92e1ef5b 2926
e0deb08c 2927 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2928 preferred_unit_stack_boundary,
2929 &unadjusted_alignment))
2930 return false;
92e1ef5b 2931
2932 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2933 as possible -- leaving just enough left to cancel out the
2934 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2935 PENDING_STACK_ADJUST is non-negative, and congruent to
2936 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2937
2938 /* Begin by trying to pop all the bytes. */
e0deb08c 2939 unsigned HOST_WIDE_INT tmp_misalignment;
2940 if (!known_misalignment (pending_stack_adjust,
2941 preferred_unit_stack_boundary,
2942 &tmp_misalignment))
2943 return false;
2944 unadjusted_alignment -= tmp_misalignment;
92e1ef5b 2945 adjustment = pending_stack_adjust;
2946 /* Push enough additional bytes that the stack will be aligned
2947 after the arguments are pushed. */
b47bf174 2948 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2949 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
c87678e4 2950
e0deb08c 2951 /* We need to know whether the adjusted argument size
2952 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2953 or a deallocation. */
2954 if (!ordered_p (adjustment, unadjusted_args_size))
2955 return false;
2956
92e1ef5b 2957 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2958 bytes after the call. The right number is the entire
2959 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2960 by the arguments in the first place. */
c87678e4 2961 args_size->constant
92e1ef5b 2962 = pending_stack_adjust - adjustment + unadjusted_args_size;
2963
e0deb08c 2964 *adjustment_out = adjustment;
2965 return true;
92e1ef5b 2966}
2967
7ecc63d3 2968/* Scan X expression if it does not dereference any argument slots
2969 we already clobbered by tail call arguments (as noted in stored_args_map
2970 bitmap).
d10cfa8d 2971 Return nonzero if X expression dereferences such argument slots,
7ecc63d3 2972 zero otherwise. */
2973
2974static int
4c9e08a4 2975check_sibcall_argument_overlap_1 (rtx x)
7ecc63d3 2976{
2977 RTX_CODE code;
2978 int i, j;
7ecc63d3 2979 const char *fmt;
2980
2981 if (x == NULL_RTX)
2982 return 0;
2983
2984 code = GET_CODE (x);
2985
cc0595c0 2986 /* We need not check the operands of the CALL expression itself. */
2987 if (code == CALL)
2988 return 0;
2989
7ecc63d3 2990 if (code == MEM)
e0deb08c 2991 return (mem_might_overlap_already_clobbered_arg_p
2992 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
7ecc63d3 2993
c87678e4 2994 /* Scan all subexpressions. */
7ecc63d3 2995 fmt = GET_RTX_FORMAT (code);
2996 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2997 {
2998 if (*fmt == 'e')
c87678e4 2999 {
3000 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
3001 return 1;
3002 }
7ecc63d3 3003 else if (*fmt == 'E')
c87678e4 3004 {
3005 for (j = 0; j < XVECLEN (x, i); j++)
3006 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
3007 return 1;
3008 }
7ecc63d3 3009 }
3010 return 0;
7ecc63d3 3011}
3012
3013/* Scan sequence after INSN if it does not dereference any argument slots
3014 we already clobbered by tail call arguments (as noted in stored_args_map
42b11544 3015 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3016 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3017 should be 0). Return nonzero if sequence after INSN dereferences such argument
3018 slots, zero otherwise. */
7ecc63d3 3019
3020static int
3663becd 3021check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3022 int mark_stored_args_map)
c87678e4 3023{
e0deb08c 3024 poly_uint64 low, high;
3025 unsigned HOST_WIDE_INT const_low, const_high;
7ecc63d3 3026
3027 if (insn == NULL_RTX)
3028 insn = get_insns ();
3029 else
3030 insn = NEXT_INSN (insn);
3031
3032 for (; insn; insn = NEXT_INSN (insn))
c87678e4 3033 if (INSN_P (insn)
3034 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
7ecc63d3 3035 break;
3036
42b11544 3037 if (mark_stored_args_map)
3038 {
ccccd62c 3039 if (ARGS_GROW_DOWNWARD)
3040 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3041 else
3042 low = arg->locate.slot_offset.constant;
e0deb08c 3043 high = low + arg->locate.size.constant;
db10eec8 3044
e0deb08c 3045 const_low = constant_lower_bound (low);
3046 if (high.is_constant (&const_high))
3047 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3048 bitmap_set_bit (stored_args_map, i);
3049 else
3050 stored_args_watermark = MIN (stored_args_watermark, const_low);
42b11544 3051 }
7ecc63d3 3052 return insn != NULL_RTX;
3053}
3054
05d18e8b 3055/* Given that a function returns a value of mode MODE at the most
3056 significant end of hard register VALUE, shift VALUE left or right
3057 as specified by LEFT_P. Return true if some action was needed. */
2c8ff1ed 3058
05d18e8b 3059bool
3754d046 3060shift_return_value (machine_mode mode, bool left_p, rtx value)
2c8ff1ed 3061{
05d18e8b 3062 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
bd39703a 3063 machine_mode value_mode = GET_MODE (value);
eafbcd13 3064 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3065
3066 if (known_eq (shift, 0))
05d18e8b 3067 return false;
3068
3069 /* Use ashr rather than lshr for right shifts. This is for the benefit
3070 of the MIPS port, which requires SImode values to be sign-extended
3071 when stored in 64-bit registers. */
bd39703a 3072 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3073 value, gen_int_shift_amount (value_mode, shift),
3074 value, 1, OPTAB_WIDEN))
05d18e8b 3075 gcc_unreachable ();
3076 return true;
2c8ff1ed 3077}
3078
90af1361 3079/* If X is a likely-spilled register value, copy it to a pseudo
3080 register and return that register. Return X otherwise. */
3081
3082static rtx
3083avoid_likely_spilled_reg (rtx x)
3084{
f4e36c33 3085 rtx new_rtx;
90af1361 3086
3087 if (REG_P (x)
3088 && HARD_REGISTER_P (x)
24dd0668 3089 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
90af1361 3090 {
3091 /* Make sure that we generate a REG rather than a CONCAT.
3092 Moves into CONCATs can need nontrivial instructions,
3093 and the whole point of this function is to avoid
3094 using the hard register directly in such a situation. */
3095 generating_concat_p = 0;
f4e36c33 3096 new_rtx = gen_reg_rtx (GET_MODE (x));
90af1361 3097 generating_concat_p = 1;
f4e36c33 3098 emit_move_insn (new_rtx, x);
3099 return new_rtx;
90af1361 3100 }
3101 return x;
3102}
3103
80e11038 3104/* Helper function for expand_call.
3105 Return false is EXP is not implementable as a sibling call. */
3106
3107static bool
3108can_implement_as_sibling_call_p (tree exp,
3109 rtx structure_value_addr,
3110 tree funtype,
869bb2b6 3111 int reg_parm_stack_space ATTRIBUTE_UNUSED,
80e11038 3112 tree fndecl,
3113 int flags,
3114 tree addr,
3115 const args_size &args_size)
3116{
3117 if (!targetm.have_sibcall_epilogue ())
b4a61e77 3118 {
3119 maybe_complain_about_tail_call
3120 (exp,
3121 "machine description does not have"
3122 " a sibcall_epilogue instruction pattern");
3123 return false;
3124 }
80e11038 3125
3126 /* Doing sibling call optimization needs some work, since
3127 structure_value_addr can be allocated on the stack.
3128 It does not seem worth the effort since few optimizable
3129 sibling calls will return a structure. */
3130 if (structure_value_addr != NULL_RTX)
b4a61e77 3131 {
3132 maybe_complain_about_tail_call (exp, "callee returns a structure");
3133 return false;
3134 }
80e11038 3135
3136#ifdef REG_PARM_STACK_SPACE
f4d3c071 3137 /* If outgoing reg parm stack space changes, we cannot do sibcall. */
80e11038 3138 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3139 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3140 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
b4a61e77 3141 {
3142 maybe_complain_about_tail_call (exp,
3143 "inconsistent size of stack space"
3144 " allocated for arguments which are"
3145 " passed in registers");
3146 return false;
3147 }
80e11038 3148#endif
3149
3150 /* Check whether the target is able to optimize the call
3151 into a sibcall. */
3152 if (!targetm.function_ok_for_sibcall (fndecl, exp))
b4a61e77 3153 {
3154 maybe_complain_about_tail_call (exp,
3155 "target is not able to optimize the"
3156 " call into a sibling call");
3157 return false;
3158 }
80e11038 3159
3160 /* Functions that do not return exactly once may not be sibcall
3161 optimized. */
b4a61e77 3162 if (flags & ECF_RETURNS_TWICE)
3163 {
3164 maybe_complain_about_tail_call (exp, "callee returns twice");
3165 return false;
3166 }
3167 if (flags & ECF_NORETURN)
3168 {
3169 maybe_complain_about_tail_call (exp, "callee does not return");
3170 return false;
3171 }
80e11038 3172
3173 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
b4a61e77 3174 {
3175 maybe_complain_about_tail_call (exp, "volatile function type");
3176 return false;
3177 }
80e11038 3178
3179 /* If the called function is nested in the current one, it might access
3180 some of the caller's arguments, but could clobber them beforehand if
3181 the argument areas are shared. */
3182 if (fndecl && decl_function_context (fndecl) == current_function_decl)
b4a61e77 3183 {
3184 maybe_complain_about_tail_call (exp, "nested function");
3185 return false;
3186 }
80e11038 3187
3188 /* If this function requires more stack slots than the current
3189 function, we cannot change it into a sibling call.
3190 crtl->args.pretend_args_size is not part of the
3191 stack allocated by our caller. */
e0deb08c 3192 if (maybe_gt (args_size.constant,
3193 crtl->args.size - crtl->args.pretend_args_size))
b4a61e77 3194 {
3195 maybe_complain_about_tail_call (exp,
3196 "callee required more stack slots"
3197 " than the caller");
3198 return false;
3199 }
80e11038 3200
3201 /* If the callee pops its own arguments, then it must pop exactly
3202 the same number of arguments as the current function. */
e0deb08c 3203 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3204 args_size.constant),
3205 targetm.calls.return_pops_args (current_function_decl,
3206 TREE_TYPE
3207 (current_function_decl),
3208 crtl->args.size)))
b4a61e77 3209 {
3210 maybe_complain_about_tail_call (exp,
3211 "inconsistent number of"
3212 " popped arguments");
3213 return false;
3214 }
80e11038 3215
3216 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
b4a61e77 3217 {
3218 maybe_complain_about_tail_call (exp, "frontend does not support"
3219 " sibling call");
3220 return false;
3221 }
80e11038 3222
3223 /* All checks passed. */
3224 return true;
3225}
3226
2e24a52c 3227/* Update stack alignment when the parameter is passed in the stack
3228 since the outgoing parameter requires extra alignment on the calling
3229 function side. */
3230
3231static void
3232update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
3233{
3234 if (crtl->stack_alignment_needed < locate->boundary)
3235 crtl->stack_alignment_needed = locate->boundary;
3236 if (crtl->preferred_stack_boundary < locate->boundary)
3237 crtl->preferred_stack_boundary = locate->boundary;
3238}
3239
c2f47e15 3240/* Generate all the code for a CALL_EXPR exp
66d433c7 3241 and return an rtx for its value.
3242 Store the value in TARGET (specified as an rtx) if convenient.
3243 If the value is stored in TARGET then TARGET is returned.
3244 If IGNORE is nonzero, then we ignore the value of the function call. */
3245
3246rtx
4c9e08a4 3247expand_call (tree exp, rtx target, int ignore)
66d433c7 3248{
60ecc450 3249 /* Nonzero if we are currently expanding a call. */
3250 static int currently_expanding_call = 0;
3251
66d433c7 3252 /* RTX for the function to be called. */
3253 rtx funexp;
60ecc450 3254 /* Sequence of insns to perform a normal "call". */
3663becd 3255 rtx_insn *normal_call_insns = NULL;
4ee9c684 3256 /* Sequence of insns to perform a tail "call". */
3663becd 3257 rtx_insn *tail_call_insns = NULL;
66d433c7 3258 /* Data type of the function. */
3259 tree funtype;
915e81b8 3260 tree type_arg_types;
16c9337c 3261 tree rettype;
66d433c7 3262 /* Declaration of the function being called,
3263 or 0 if the function is computed (not known by name). */
3264 tree fndecl = 0;
e100aadc 3265 /* The type of the function being called. */
3266 tree fntype;
4ee9c684 3267 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
b4a61e77 3268 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
60ecc450 3269 int pass;
66d433c7 3270
3271 /* Register in which non-BLKmode value will be returned,
3272 or 0 if no value or if value is BLKmode. */
3273 rtx valreg;
3274 /* Address where we should return a BLKmode value;
3275 0 if value not BLKmode. */
3276 rtx structure_value_addr = 0;
3277 /* Nonzero if that address is being passed by treating it as
3278 an extra, implicit first parameter. Otherwise,
3279 it is passed by being copied directly into struct_value_rtx. */
3280 int structure_value_addr_parm = 0;
cd46caee 3281 /* Holds the value of implicit argument for the struct value. */
3282 tree structure_value_addr_value = NULL_TREE;
66d433c7 3283 /* Size of aggregate value wanted, or zero if none wanted
3284 or if we are using the non-reentrant PCC calling convention
3285 or expecting the value in registers. */
e967c3ed 3286 poly_int64 struct_value_size = 0;
66d433c7 3287 /* Nonzero if called function returns an aggregate in memory PCC style,
3288 by returning the address of where to find it. */
3289 int pcc_struct_value = 0;
45550790 3290 rtx struct_value = 0;
66d433c7 3291
3292 /* Number of actual parameters in this call, including struct value addr. */
3293 int num_actuals;
3294 /* Number of named args. Args after this are anonymous ones
3295 and they must all go on the stack. */
3296 int n_named_args;
cd46caee 3297 /* Number of complex actual arguments that need to be split. */
3298 int num_complex_actuals = 0;
66d433c7 3299
3300 /* Vector of information about each argument.
3301 Arguments are numbered in the order they will be pushed,
3302 not the order they are written. */
3303 struct arg_data *args;
3304
3305 /* Total size in bytes of all the stack-parms scanned so far. */
3306 struct args_size args_size;
0e0be288 3307 struct args_size adjusted_args_size;
66d433c7 3308 /* Size of arguments before any adjustments (such as rounding). */
e0deb08c 3309 poly_int64 unadjusted_args_size;
66d433c7 3310 /* Data on reg parms scanned so far. */
39cba157 3311 CUMULATIVE_ARGS args_so_far_v;
3312 cumulative_args_t args_so_far;
66d433c7 3313 /* Nonzero if a reg parm has been scanned. */
3314 int reg_parm_seen;
a50ca374 3315 /* Nonzero if this is an indirect function call. */
66d433c7 3316
c87678e4 3317 /* Nonzero if we must avoid push-insns in the args for this call.
66d433c7 3318 If stack space is allocated for register parameters, but not by the
3319 caller, then it is preallocated in the fixed part of the stack frame.
3320 So the entire argument block must then be preallocated (i.e., we
3321 ignore PUSH_ROUNDING in that case). */
3322
4448f543 3323 int must_preallocate = !PUSH_ARGS;
66d433c7 3324
eb2f80f3 3325 /* Size of the stack reserved for parameter registers. */
2d7187c2 3326 int reg_parm_stack_space = 0;
3327
66d433c7 3328 /* Address of space preallocated for stack parms
3329 (on machines that lack push insns), or 0 if space not preallocated. */
3330 rtx argblock = 0;
3331
c8010b80 3332 /* Mask of ECF_ and ERF_ flags. */
dfe08167 3333 int flags = 0;
c8010b80 3334 int return_flags = 0;
4448f543 3335#ifdef REG_PARM_STACK_SPACE
66d433c7 3336 /* Define the boundary of the register parm stack space that needs to be
6e96b626 3337 saved, if any. */
3338 int low_to_save, high_to_save;
66d433c7 3339 rtx save_area = 0; /* Place that it is saved */
3340#endif
3341
e0deb08c 3342 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
66d433c7 3343 char *initial_stack_usage_map = stack_usage_map;
e0deb08c 3344 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
a331ea1b 3345 char *stack_usage_map_buf = NULL;
66d433c7 3346
e0deb08c 3347 poly_int64 old_stack_allocated;
9069face 3348
3349 /* State variables to track stack modifications. */
66d433c7 3350 rtx old_stack_level = 0;
9069face 3351 int old_stack_arg_under_construction = 0;
e0deb08c 3352 poly_int64 old_pending_adj = 0;
66d433c7 3353 int old_inhibit_defer_pop = inhibit_defer_pop;
9069face 3354
3355 /* Some stack pointer alterations we make are performed via
3356 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3357 which we then also need to save/restore along the way. */
e0deb08c 3358 poly_int64 old_stack_pointer_delta = 0;
9069face 3359
60ecc450 3360 rtx call_fusage;
c2f47e15 3361 tree addr = CALL_EXPR_FN (exp);
19cb6b50 3362 int i;
92e1ef5b 3363 /* The alignment of the stack, in bits. */
38413c80 3364 unsigned HOST_WIDE_INT preferred_stack_boundary;
92e1ef5b 3365 /* The alignment of the stack, in bytes. */
38413c80 3366 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
4ee9c684 3367 /* The static chain value to use for this call. */
3368 rtx static_chain_value;
dfe08167 3369 /* See if this is "nothrow" function call. */
3370 if (TREE_NOTHROW (exp))
3371 flags |= ECF_NOTHROW;
3372
4ee9c684 3373 /* See if we can find a DECL-node for the actual function, and get the
3374 function attributes (flags) from the function decl or type node. */
97a1590b 3375 fndecl = get_callee_fndecl (exp);
3376 if (fndecl)
66d433c7 3377 {
e100aadc 3378 fntype = TREE_TYPE (fndecl);
97a1590b 3379 flags |= flags_from_decl_or_type (fndecl);
c8010b80 3380 return_flags |= decl_return_flags (fndecl);
66d433c7 3381 }
97a1590b 3382 else
8a8cdb8d 3383 {
16c9337c 3384 fntype = TREE_TYPE (TREE_TYPE (addr));
e100aadc 3385 flags |= flags_from_decl_or_type (fntype);
a27e3913 3386 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3387 flags |= ECF_BY_DESCRIPTOR;
8a8cdb8d 3388 }
16c9337c 3389 rettype = TREE_TYPE (exp);
d490e2f2 3390
e100aadc 3391 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
45550790 3392
4a081ddd 3393 /* Warn if this value is an aggregate type,
3394 regardless of which calling convention we are using for it. */
16c9337c 3395 if (AGGREGATE_TYPE_P (rettype))
efb9d9ee 3396 warning (OPT_Waggregate_return, "function call has aggregate value");
4a081ddd 3397
9c2a0c05 3398 /* If the result of a non looping pure or const function call is
3399 ignored (or void), and none of its arguments are volatile, we can
3400 avoid expanding the call and just evaluate the arguments for
3401 side-effects. */
4a081ddd 3402 if ((flags & (ECF_CONST | ECF_PURE))
9c2a0c05 3403 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
4a081ddd 3404 && (ignore || target == const0_rtx
16c9337c 3405 || TYPE_MODE (rettype) == VOIDmode))
4a081ddd 3406 {
3407 bool volatilep = false;
3408 tree arg;
cd46caee 3409 call_expr_arg_iterator iter;
4a081ddd 3410
cd46caee 3411 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3412 if (TREE_THIS_VOLATILE (arg))
4a081ddd 3413 {
3414 volatilep = true;
3415 break;
3416 }
3417
3418 if (! volatilep)
3419 {
cd46caee 3420 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3421 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
4a081ddd 3422 return const0_rtx;
3423 }
3424 }
3425
2d7187c2 3426#ifdef REG_PARM_STACK_SPACE
fa20f865 3427 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2d7187c2 3428#endif
2d7187c2 3429
fa20f865 3430 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
22c61100 3431 && reg_parm_stack_space > 0 && PUSH_ARGS)
997d68fe 3432 must_preallocate = 1;
997d68fe 3433
66d433c7 3434 /* Set up a place to return a structure. */
3435
3436 /* Cater to broken compilers. */
4cd5bb61 3437 if (aggregate_value_p (exp, fntype))
66d433c7 3438 {
3439 /* This call returns a big structure. */
2dd6f9ed 3440 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
66d433c7 3441
3442#ifdef PCC_STATIC_STRUCT_RETURN
f49c64ba 3443 {
3444 pcc_struct_value = 1;
f49c64ba 3445 }
3446#else /* not PCC_STATIC_STRUCT_RETURN */
3447 {
e967c3ed 3448 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3449 struct_value_size = -1;
66d433c7 3450
e012cdc7 3451 /* Even if it is semantically safe to use the target as the return
3452 slot, it may be not sufficiently aligned for the return type. */
3453 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3454 && target
3455 && MEM_P (target)
8fb8d942 3456 /* If rettype is addressable, we may not create a temporary.
3457 If target is properly aligned at runtime and the compiler
3458 just doesn't know about it, it will work fine, otherwise it
3459 will be UB. */
3460 && (TREE_ADDRESSABLE (rettype)
3461 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3462 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3463 MEM_ALIGN (target)))))
f49c64ba 3464 structure_value_addr = XEXP (target, 0);
3465 else
3466 {
f49c64ba 3467 /* For variable-sized objects, we must be called with a target
3468 specified. If we were to allocate space on the stack here,
3469 we would have no way of knowing when to free it. */
0ab48139 3470 rtx d = assign_temp (rettype, 1, 1);
930f0e87 3471 structure_value_addr = XEXP (d, 0);
f49c64ba 3472 target = 0;
3473 }
3474 }
3475#endif /* not PCC_STATIC_STRUCT_RETURN */
66d433c7 3476 }
3477
0e0be288 3478 /* Figure out the amount to which the stack should be aligned. */
0e0be288 3479 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
28992b23 3480 if (fndecl)
3481 {
35ee1c66 3482 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
9a27561f 3483 /* Without automatic stack alignment, we can't increase preferred
3484 stack boundary. With automatic stack alignment, it is
3485 unnecessary since unless we can guarantee that all callers will
3486 align the outgoing stack properly, callee has to align its
3487 stack anyway. */
3488 if (i
3489 && i->preferred_incoming_stack_boundary
3490 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
28992b23 3491 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3492 }
0e0be288 3493
3494 /* Operand 0 is a pointer-to-function; get the type of the function. */
95672afe 3495 funtype = TREE_TYPE (addr);
231bd014 3496 gcc_assert (POINTER_TYPE_P (funtype));
0e0be288 3497 funtype = TREE_TYPE (funtype);
3498
cd46caee 3499 /* Count whether there are actual complex arguments that need to be split
3500 into their real and imaginary parts. Munge the type_arg_types
3501 appropriately here as well. */
92d40bc4 3502 if (targetm.calls.split_complex_arg)
915e81b8 3503 {
cd46caee 3504 call_expr_arg_iterator iter;
3505 tree arg;
3506 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3507 {
3508 tree type = TREE_TYPE (arg);
3509 if (type && TREE_CODE (type) == COMPLEX_TYPE
3510 && targetm.calls.split_complex_arg (type))
3511 num_complex_actuals++;
3512 }
915e81b8 3513 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
915e81b8 3514 }
3515 else
3516 type_arg_types = TYPE_ARG_TYPES (funtype);
3517
0e0be288 3518 if (flags & ECF_MAY_BE_ALLOCA)
18d50ae6 3519 cfun->calls_alloca = 1;
0e0be288 3520
3521 /* If struct_value_rtx is 0, it means pass the address
cd46caee 3522 as if it were an extra parameter. Put the argument expression
3523 in structure_value_addr_value. */
45550790 3524 if (structure_value_addr && struct_value == 0)
0e0be288 3525 {
3526 /* If structure_value_addr is a REG other than
3527 virtual_outgoing_args_rtx, we can use always use it. If it
3528 is not a REG, we must always copy it into a register.
3529 If it is virtual_outgoing_args_rtx, we must copy it to another
3530 register in some cases. */
8ad4c111 3531 rtx temp = (!REG_P (structure_value_addr)
0e0be288 3532 || (ACCUMULATE_OUTGOING_ARGS
3533 && stack_arg_under_construction
3534 && structure_value_addr == virtual_outgoing_args_rtx)
0d568ddf 3535 ? copy_addr_to_reg (convert_memory_address
e100aadc 3536 (Pmode, structure_value_addr))
0e0be288 3537 : structure_value_addr);
3538
cd46caee 3539 structure_value_addr_value =
3540 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
1e42d5c6 3541 structure_value_addr_parm = 1;
0e0be288 3542 }
3543
3544 /* Count the arguments and set NUM_ACTUALS. */
cd46caee 3545 num_actuals =
3546 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
0e0be288 3547
3548 /* Compute number of named args.
30a10006 3549 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3550
3551 if (type_arg_types != 0)
3552 n_named_args
3553 = (list_length (type_arg_types)
3554 /* Count the struct value address, if it is passed as a parm. */
3555 + structure_value_addr_parm);
3556 else
3557 /* If we know nothing, treat all args as named. */
3558 n_named_args = num_actuals;
3559
3560 /* Start updating where the next arg would go.
3561
3562 On some machines (such as the PA) indirect calls have a different
3563 calling convention than normal calls. The fourth argument in
3564 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3565 or not. */
39cba157 3566 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3567 args_so_far = pack_cumulative_args (&args_so_far_v);
30a10006 3568
3569 /* Now possibly adjust the number of named args.
0e0be288 3570 Normally, don't include the last named arg if anonymous args follow.
8bdddbd1 3571 We do include the last named arg if
3572 targetm.calls.strict_argument_naming() returns nonzero.
0e0be288 3573 (If no anonymous args follow, the result of list_length is actually
3574 one too large. This is harmless.)
3575
a107cd89 3576 If targetm.calls.pretend_outgoing_varargs_named() returns
8bdddbd1 3577 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3578 this machine will be able to place unnamed args that were passed
3579 in registers into the stack. So treat all args as named. This
3580 allows the insns emitting for a specific argument list to be
3581 independent of the function declaration.
a107cd89 3582
3583 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3584 we do not have any reliable way to pass unnamed args in
3585 registers, so we must force them into memory. */
0e0be288 3586
30a10006 3587 if (type_arg_types != 0
39cba157 3588 && targetm.calls.strict_argument_naming (args_so_far))
30a10006 3589 ;
3590 else if (type_arg_types != 0
39cba157 3591 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
30a10006 3592 /* Don't include the last named arg. */
3593 --n_named_args;
0e0be288 3594 else
30a10006 3595 /* Treat all args as named. */
0e0be288 3596 n_named_args = num_actuals;
3597
0e0be288 3598 /* Make a vector to hold all the information about each arg. */
1f303606 3599 args = XCNEWVEC (struct arg_data, num_actuals);
0e0be288 3600
00dddcf2 3601 /* Build up entries in the ARGS array, compute the size of the
3602 arguments into ARGS_SIZE, etc. */
0e0be288 3603 initialize_argument_information (num_actuals, args, &args_size,
cd46caee 3604 n_named_args, exp,
d8b9c828 3605 structure_value_addr_value, fndecl, fntype,
39cba157 3606 args_so_far, reg_parm_stack_space,
0e0be288 3607 &old_stack_level, &old_pending_adj,
eaa112a0 3608 &must_preallocate, &flags,
4ee9c684 3609 &try_tail_call, CALL_FROM_THUNK_P (exp));
0e0be288 3610
3611 if (args_size.var)
2dd6f9ed 3612 must_preallocate = 1;
0e0be288 3613
3614 /* Now make final decision about preallocating stack space. */
3615 must_preallocate = finalize_must_preallocate (must_preallocate,
3616 num_actuals, args,
3617 &args_size);
3618
3619 /* If the structure value address will reference the stack pointer, we
3620 must stabilize it. We don't need to do this if we know that we are
3621 not going to adjust the stack pointer in processing this call. */
3622
3623 if (structure_value_addr
3624 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3625 || reg_mentioned_p (virtual_outgoing_args_rtx,
3626 structure_value_addr))
3627 && (args_size.var
e0deb08c 3628 || (!ACCUMULATE_OUTGOING_ARGS
3629 && maybe_ne (args_size.constant, 0))))
0e0be288 3630 structure_value_addr = copy_to_reg (structure_value_addr);
60ecc450 3631
0d568ddf 3632 /* Tail calls can make things harder to debug, and we've traditionally
4f8af819 3633 pushed these optimizations into -O2. Don't try if we're already
fdf2b689 3634 expanding a call, as that means we're an argument. Don't try if
011e6b51 3635 there's cleanups, as we know there's code to follow the call. */
0e0be288 3636 if (currently_expanding_call++ != 0
4c7db812 3637 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
4ee9c684 3638 || args_size.var
3072d30e 3639 || dbg_cnt (tail_call) == false)
4ee9c684 3640 try_tail_call = 0;
0e0be288 3641
1fc5e56f 3642 /* Workaround buggy C/C++ wrappers around Fortran routines with
3643 character(len=constant) arguments if the hidden string length arguments
3644 are passed on the stack; if the callers forget to pass those arguments,
3645 attempting to tail call in such routines leads to stack corruption.
3646 Avoid tail calls in functions where at least one such hidden string
3647 length argument is passed (partially or fully) on the stack in the
3648 caller and the callee needs to pass any arguments on the stack.
3649 See PR90329. */
3650 if (try_tail_call && maybe_ne (args_size.constant, 0))
3651 for (tree arg = DECL_ARGUMENTS (current_function_decl);
3652 arg; arg = DECL_CHAIN (arg))
3653 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
3654 {
3655 subrtx_iterator::array_type array;
3656 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
3657 if (MEM_P (*iter))
3658 {
3659 try_tail_call = 0;
3660 break;
3661 }
3662 }
3663
b4a61e77 3664 /* If the user has marked the function as requiring tail-call
3665 optimization, attempt it. */
3666 if (must_tail_call)
3667 try_tail_call = 1;
3668
0e0be288 3669 /* Rest of purposes for tail call optimizations to fail. */
80e11038 3670 if (try_tail_call)
b4a61e77 3671 try_tail_call = can_implement_as_sibling_call_p (exp,
3672 structure_value_addr,
3673 funtype,
3674 reg_parm_stack_space,
3675 fndecl,
80e11038 3676 flags, addr, args_size);
4b066641 3677
4681dd41 3678 /* Check if caller and callee disagree in promotion of function
3679 return value. */
3680 if (try_tail_call)
3681 {
3754d046 3682 machine_mode caller_mode, caller_promoted_mode;
3683 machine_mode callee_mode, callee_promoted_mode;
4681dd41 3684 int caller_unsignedp, callee_unsignedp;
3685 tree caller_res = DECL_RESULT (current_function_decl);
3686
3687 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3b2411a8 3688 caller_mode = DECL_MODE (caller_res);
4681dd41 3689 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3b2411a8 3690 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3691 caller_promoted_mode
3692 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3693 &caller_unsignedp,
3694 TREE_TYPE (current_function_decl), 1);
3695 callee_promoted_mode
c879dbcf 3696 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
3b2411a8 3697 &callee_unsignedp,
c879dbcf 3698 funtype, 1);
4681dd41 3699 if (caller_mode != VOIDmode
3700 && (caller_promoted_mode != callee_promoted_mode
3701 || ((caller_mode != caller_promoted_mode
3702 || callee_mode != callee_promoted_mode)
3703 && (caller_unsignedp != callee_unsignedp
974534ab 3704 || partial_subreg_p (caller_mode, callee_mode)))))
b4a61e77 3705 {
3706 try_tail_call = 0;
3707 maybe_complain_about_tail_call (exp,
3708 "caller and callee disagree in"
3709 " promotion of function"
3710 " return value");
3711 }
4681dd41 3712 }
3713
755ece1f 3714 /* Ensure current function's preferred stack boundary is at least
3715 what we need. Stack alignment may also increase preferred stack
3716 boundary. */
2e24a52c 3717 for (i = 0; i < num_actuals; i++)
3718 if (reg_parm_stack_space > 0
3719 || args[i].reg == 0
3720 || args[i].partial != 0
3721 || args[i].pass_on_stack)
3722 update_stack_alignment_for_call (&args[i].locate);
54d759e3 3723 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
edb7afe8 3724 crtl->preferred_stack_boundary = preferred_stack_boundary;
755ece1f 3725 else
3726 preferred_stack_boundary = crtl->preferred_stack_boundary;
d0285dd8 3727
0e0be288 3728 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4b066641 3729
60ecc450 3730 /* We want to make two insn chains; one for a sibling call, the other
3731 for a normal call. We will select one of the two chains after
3732 initial RTL generation is complete. */
6e96b626 3733 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
60ecc450 3734 {
3735 int sibcall_failure = 0;
35a3065a 3736 /* We want to emit any pending stack adjustments before the tail
60ecc450 3737 recursion "call". That way we know any adjustment after the tail
0d568ddf 3738 recursion call can be ignored if we indeed use the tail
60ecc450 3739 call expansion. */
b6d206a2 3740 saved_pending_stack_adjust save;
3663becd 3741 rtx_insn *insns, *before_call, *after_args;
3742 rtx next_arg_reg;
1e2b2ab3 3743
60ecc450 3744 if (pass == 0)
3745 {
60ecc450 3746 /* State variables we need to save and restore between
3747 iterations. */
b6d206a2 3748 save_pending_stack_adjust (&save);
60ecc450 3749 }
dfe08167 3750 if (pass)
3751 flags &= ~ECF_SIBCALL;
3752 else
3753 flags |= ECF_SIBCALL;
66d433c7 3754
60ecc450 3755 /* Other state variables that we must reinitialize each time
dfe08167 3756 through the loop (that are not initialized by the loop itself). */
60ecc450 3757 argblock = 0;
3758 call_fusage = 0;
2f921ec9 3759
c87678e4 3760 /* Start a new sequence for the normal call case.
66d433c7 3761
60ecc450 3762 From this point on, if the sibling call fails, we want to set
3763 sibcall_failure instead of continuing the loop. */
3764 start_sequence ();
412321ce 3765
60ecc450 3766 /* Don't let pending stack adjusts add up to too much.
3767 Also, do all pending adjustments now if there is any chance
3768 this might be a call to alloca or if we are expanding a sibling
ff3ae375 3769 call sequence.
82e95be3 3770 Also do the adjustments before a throwing call, otherwise
3771 exception handling can fail; PR 19225. */
e0deb08c 3772 if (maybe_ge (pending_stack_adjust, 32)
3773 || (maybe_ne (pending_stack_adjust, 0)
ff3ae375 3774 && (flags & ECF_MAY_BE_ALLOCA))
e0deb08c 3775 || (maybe_ne (pending_stack_adjust, 0)
82e95be3 3776 && flag_exceptions && !(flags & ECF_NOTHROW))
60ecc450 3777 || pass == 0)
3778 do_pending_stack_adjust ();
66d433c7 3779
60ecc450 3780 /* Precompute any arguments as needed. */
02510658 3781 if (pass)
2dd6f9ed 3782 precompute_arguments (num_actuals, args);
66d433c7 3783
60ecc450 3784 /* Now we are about to start emitting insns that can be deleted
3785 if a libcall is deleted. */
2dd6f9ed 3786 if (pass && (flags & ECF_MALLOC))
60ecc450 3787 start_sequence ();
66d433c7 3788
783f362b 3789 if (pass == 0
3790 && crtl->stack_protect_guard
3791 && targetm.stack_protect_runtime_enabled_p ())
71d89928 3792 stack_protect_epilogue ();
3793
0e0be288 3794 adjusted_args_size = args_size;
481feae3 3795 /* Compute the actual size of the argument block required. The variable
3796 and constant sizes must be combined, the size may have to be rounded,
3797 and there may be a minimum required size. When generating a sibcall
3798 pattern, do not round up, since we'll be re-using whatever space our
3799 caller provided. */
3800 unadjusted_args_size
c87678e4 3801 = compute_argument_block_size (reg_parm_stack_space,
3802 &adjusted_args_size,
fa20f865 3803 fndecl, fntype,
481feae3 3804 (pass == 0 ? 0
3805 : preferred_stack_boundary));
3806
c87678e4 3807 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
481feae3 3808
02510658 3809 /* The argument block when performing a sibling call is the
a0c938f0 3810 incoming argument block. */
02510658 3811 if (pass == 0)
7ecc63d3 3812 {
27a7a23a 3813 argblock = crtl->args.internal_arg_pointer;
a8b58ffb 3814 if (STACK_GROWS_DOWNWARD)
3815 argblock
3816 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3817 else
3818 argblock
3819 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3820
e0deb08c 3821 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
3822 stored_args_map = sbitmap_alloc (map_size);
53c5d9d4 3823 bitmap_clear (stored_args_map);
e0deb08c 3824 stored_args_watermark = HOST_WIDE_INT_M1U;
7ecc63d3 3825 }
481feae3 3826
60ecc450 3827 /* If we have no actual push instructions, or shouldn't use them,
3828 make space for all args right now. */
0e0be288 3829 else if (adjusted_args_size.var != 0)
66d433c7 3830 {
60ecc450 3831 if (old_stack_level == 0)
3832 {
e9c97615 3833 emit_stack_save (SAVE_BLOCK, &old_stack_level);
9069face 3834 old_stack_pointer_delta = stack_pointer_delta;
60ecc450 3835 old_pending_adj = pending_stack_adjust;
3836 pending_stack_adjust = 0;
60ecc450 3837 /* stack_arg_under_construction says whether a stack arg is
3838 being constructed at the old stack level. Pushing the stack
3839 gets a clean outgoing argument block. */
3840 old_stack_arg_under_construction = stack_arg_under_construction;
3841 stack_arg_under_construction = 0;
60ecc450 3842 }
0e0be288 3843 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
8c0dd614 3844 if (flag_stack_usage_info)
990495a7 3845 current_function_has_unbounded_dynamic_stack_size = 1;
66d433c7 3846 }
60ecc450 3847 else
3848 {
3849 /* Note that we must go through the motions of allocating an argument
3850 block even if the size is zero because we may be storing args
3851 in the area reserved for register arguments, which may be part of
3852 the stack frame. */
7221f864 3853
e0deb08c 3854 poly_int64 needed = adjusted_args_size.constant;
66d433c7 3855
60ecc450 3856 /* Store the maximum argument space used. It will be pushed by
3857 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3858 checking). */
66d433c7 3859
e0deb08c 3860 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3861 needed);
66d433c7 3862
60ecc450 3863 if (must_preallocate)
3864 {
4448f543 3865 if (ACCUMULATE_OUTGOING_ARGS)
3866 {
02510658 3867 /* Since the stack pointer will never be pushed, it is
3868 possible for the evaluation of a parm to clobber
3869 something we have already written to the stack.
3870 Since most function calls on RISC machines do not use
3871 the stack, this is uncommon, but must work correctly.
7221f864 3872
4448f543 3873 Therefore, we save any area of the stack that was already
02510658 3874 written and that we are using. Here we set up to do this
3875 by making a new stack usage map from the old one. The
c87678e4 3876 actual save will be done by store_one_arg.
7221f864 3877
4448f543 3878 Another approach might be to try to reorder the argument
3879 evaluations to avoid this conflicting stack usage. */
7221f864 3880
02510658 3881 /* Since we will be writing into the entire argument area,
3882 the map must be allocated for its entire size, not just
3883 the part that is the responsibility of the caller. */
fa20f865 3884 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 3885 needed += reg_parm_stack_space;
66d433c7 3886
e0deb08c 3887 poly_int64 limit = needed;
ccccd62c 3888 if (ARGS_GROW_DOWNWARD)
e0deb08c 3889 limit += 1;
3890
3891 /* For polynomial sizes, this is the maximum possible
3892 size needed for arguments with a constant size
3893 and offset. */
3894 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
3895 highest_outgoing_arg_in_use
3896 = MAX (initial_highest_arg_in_use, const_limit);
ccccd62c 3897
dd045aee 3898 free (stack_usage_map_buf);
4c36ffe6 3899 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 3900 stack_usage_map = stack_usage_map_buf;
66d433c7 3901
4448f543 3902 if (initial_highest_arg_in_use)
8e547276 3903 memcpy (stack_usage_map, initial_stack_usage_map,
3904 initial_highest_arg_in_use);
d1b03b62 3905
4448f543 3906 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 3907 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 3908 (highest_outgoing_arg_in_use
3909 - initial_highest_arg_in_use));
3910 needed = 0;
d1b03b62 3911
02510658 3912 /* The address of the outgoing argument list must not be
3913 copied to a register here, because argblock would be left
3914 pointing to the wrong place after the call to
c87678e4 3915 allocate_dynamic_stack_space below. */
d1b03b62 3916
4448f543 3917 argblock = virtual_outgoing_args_rtx;
c87678e4 3918 }
4448f543 3919 else
7221f864 3920 {
e0deb08c 3921 /* Try to reuse some or all of the pending_stack_adjust
3922 to get this space. */
3923 if (inhibit_defer_pop == 0
3924 && (combine_pending_stack_adjustment_and_call
3925 (&needed,
3926 unadjusted_args_size,
3927 &adjusted_args_size,
3928 preferred_unit_stack_boundary)))
60ecc450 3929 {
481feae3 3930 /* combine_pending_stack_adjustment_and_call computes
3931 an adjustment before the arguments are allocated.
3932 Account for them and see whether or not the stack
3933 needs to go up or down. */
3934 needed = unadjusted_args_size - needed;
3935
e0deb08c 3936 /* Checked by
3937 combine_pending_stack_adjustment_and_call. */
3938 gcc_checking_assert (ordered_p (needed, 0));
3939 if (maybe_lt (needed, 0))
4448f543 3940 {
481feae3 3941 /* We're releasing stack space. */
3942 /* ??? We can avoid any adjustment at all if we're
3943 already aligned. FIXME. */
3944 pending_stack_adjust = -needed;
3945 do_pending_stack_adjust ();
4448f543 3946 needed = 0;
3947 }
c87678e4 3948 else
481feae3 3949 /* We need to allocate space. We'll do that in
3950 push_block below. */
3951 pending_stack_adjust = 0;
60ecc450 3952 }
481feae3 3953
3954 /* Special case this because overhead of `push_block' in
3955 this case is non-trivial. */
e0deb08c 3956 if (known_eq (needed, 0))
4448f543 3957 argblock = virtual_outgoing_args_rtx;
60ecc450 3958 else
ad3b56f3 3959 {
e0deb08c 3960 rtx needed_rtx = gen_int_mode (needed, Pmode);
3961 argblock = push_block (needed_rtx, 0, 0);
ccccd62c 3962 if (ARGS_GROW_DOWNWARD)
3963 argblock = plus_constant (Pmode, argblock, needed);
ad3b56f3 3964 }
4448f543 3965
02510658 3966 /* We only really need to call `copy_to_reg' in the case
3967 where push insns are going to be used to pass ARGBLOCK
3968 to a function call in ARGS. In that case, the stack
3969 pointer changes value from the allocation point to the
3970 call point, and hence the value of
3971 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3972 as well always do it. */
4448f543 3973 argblock = copy_to_reg (argblock);
9069face 3974 }
3975 }
3976 }
60ecc450 3977
9069face 3978 if (ACCUMULATE_OUTGOING_ARGS)
3979 {
3980 /* The save/restore code in store_one_arg handles all
3981 cases except one: a constructor call (including a C
3982 function returning a BLKmode struct) to initialize
3983 an argument. */
3984 if (stack_arg_under_construction)
3985 {
63c68695 3986 rtx push_size
e0deb08c 3987 = (gen_int_mode
3988 (adjusted_args_size.constant
3989 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3990 : TREE_TYPE (fndecl))
3991 ? 0 : reg_parm_stack_space), Pmode));
9069face 3992 if (old_stack_level == 0)
3993 {
e9c97615 3994 emit_stack_save (SAVE_BLOCK, &old_stack_level);
9069face 3995 old_stack_pointer_delta = stack_pointer_delta;
3996 old_pending_adj = pending_stack_adjust;
3997 pending_stack_adjust = 0;
3998 /* stack_arg_under_construction says whether a stack
3999 arg is being constructed at the old stack level.
4000 Pushing the stack gets a clean outgoing argument
4001 block. */
4002 old_stack_arg_under_construction
4003 = stack_arg_under_construction;
4004 stack_arg_under_construction = 0;
4005 /* Make a new map for the new argument list. */
dd045aee 4006 free (stack_usage_map_buf);
43959b95 4007 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 4008 stack_usage_map = stack_usage_map_buf;
9069face 4009 highest_outgoing_arg_in_use = 0;
e0deb08c 4010 stack_usage_watermark = HOST_WIDE_INT_M1U;
4448f543 4011 }
990495a7 4012 /* We can pass TRUE as the 4th argument because we just
4013 saved the stack pointer and will restore it right after
4014 the call. */
2b34677f 4015 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4016 -1, true);
60ecc450 4017 }
a3585b90 4018
9069face 4019 /* If argument evaluation might modify the stack pointer,
4020 copy the address of the argument list to a register. */
4021 for (i = 0; i < num_actuals; i++)
4022 if (args[i].pass_on_stack)
4023 {
4024 argblock = copy_addr_to_reg (argblock);
4025 break;
4026 }
4027 }
4c9e08a4 4028
60ecc450 4029 compute_argument_addresses (args, argblock, num_actuals);
a3585b90 4030
2d298c93 4031 /* Stack is properly aligned, pops can't safely be deferred during
4032 the evaluation of the arguments. */
4033 NO_DEFER_POP;
4034
3a12804f 4035 /* Precompute all register parameters. It isn't safe to compute
4036 anything once we have started filling any specific hard regs.
4037 TLS symbols sometimes need a call to resolve. Precompute
4038 register parameters before any stack pointer manipulation
4039 to avoid unaligned stack in the called function. */
4040 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
4041
2d298c93 4042 OK_DEFER_POP;
4043
bf29c577 4044 /* Perform stack alignment before the first push (the last arg). */
4045 if (argblock == 0
e0deb08c 4046 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4047 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
ff92623c 4048 {
60ecc450 4049 /* When the stack adjustment is pending, we get better code
4050 by combining the adjustments. */
e0deb08c 4051 if (maybe_ne (pending_stack_adjust, 0)
4052 && ! inhibit_defer_pop
4053 && (combine_pending_stack_adjustment_and_call
4054 (&pending_stack_adjust,
4055 unadjusted_args_size,
4056 &adjusted_args_size,
4057 preferred_unit_stack_boundary)))
4058 do_pending_stack_adjust ();
60ecc450 4059 else if (argblock == 0)
e0deb08c 4060 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4061 - unadjusted_args_size,
4062 Pmode));
60ecc450 4063 }
fa4f1f09 4064 /* Now that the stack is properly aligned, pops can't safely
4065 be deferred during the evaluation of the arguments. */
4066 NO_DEFER_POP;
66d433c7 4067
990495a7 4068 /* Record the maximum pushed stack space size. We need to delay
4069 doing it this far to take into account the optimization done
4070 by combine_pending_stack_adjustment_and_call. */
8c0dd614 4071 if (flag_stack_usage_info
990495a7 4072 && !ACCUMULATE_OUTGOING_ARGS
4073 && pass
4074 && adjusted_args_size.var == 0)
4075 {
e0deb08c 4076 poly_int64 pushed = (adjusted_args_size.constant
4077 + pending_stack_adjust);
4078 current_function_pushed_stack_size
4079 = upper_bound (current_function_pushed_stack_size, pushed);
990495a7 4080 }
4081
95672afe 4082 funexp = rtx_for_function_call (fndecl, addr);
66d433c7 4083
c2f47e15 4084 if (CALL_EXPR_STATIC_CHAIN (exp))
4085 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4ee9c684 4086 else
4087 static_chain_value = 0;
4088
4448f543 4089#ifdef REG_PARM_STACK_SPACE
60ecc450 4090 /* Save the fixed argument area if it's part of the caller's frame and
4091 is clobbered by argument setup for this call. */
02510658 4092 if (ACCUMULATE_OUTGOING_ARGS && pass)
4448f543 4093 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4094 &low_to_save, &high_to_save);
41332f48 4095#endif
66d433c7 4096
60ecc450 4097 /* Now store (and compute if necessary) all non-register parms.
4098 These come before register parms, since they can require block-moves,
4099 which could clobber the registers used for register parms.
4100 Parms which have partial registers are not stored here,
4101 but we do preallocate space here if they want that. */
66d433c7 4102
60ecc450 4103 for (i = 0; i < num_actuals; i++)
eb940a48 4104 {
1e42d5c6 4105 if (args[i].reg == 0 || args[i].pass_on_stack)
eb940a48 4106 {
3663becd 4107 rtx_insn *before_arg = get_last_insn ();
eb940a48 4108
ba83222c 4109 /* We don't allow passing huge (> 2^30 B) arguments
4110 by value. It would cause an overflow later on. */
e0deb08c 4111 if (constant_lower_bound (adjusted_args_size.constant)
ba83222c 4112 >= (1 << (HOST_BITS_PER_INT - 2)))
4113 {
4114 sorry ("passing too large argument on stack");
4115 continue;
4116 }
4117
eb940a48 4118 if (store_one_arg (&args[i], argblock, flags,
4119 adjusted_args_size.var != 0,
4120 reg_parm_stack_space)
4121 || (pass == 0
4122 && check_sibcall_argument_overlap (before_arg,
4123 &args[i], 1)))
4124 sibcall_failure = 1;
4125 }
4126
4143d08b 4127 if (args[i].stack)
b4eeceb9 4128 call_fusage
4129 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4130 gen_rtx_USE (VOIDmode, args[i].stack),
4131 call_fusage);
eb940a48 4132 }
60ecc450 4133
4134 /* If we have a parm that is passed in registers but not in memory
4135 and whose alignment does not permit a direct copy into registers,
4136 make a group of pseudos that correspond to each register that we
4137 will later fill. */
4138 if (STRICT_ALIGNMENT)
4139 store_unaligned_arguments_into_pseudos (args, num_actuals);
4140
4141 /* Now store any partially-in-registers parm.
4142 This is the last place a block-move can happen. */
4143 if (reg_parm_seen)
4144 for (i = 0; i < num_actuals; i++)
4145 if (args[i].partial != 0 && ! args[i].pass_on_stack)
7ecc63d3 4146 {
3663becd 4147 rtx_insn *before_arg = get_last_insn ();
7ecc63d3 4148
a95e5776 4149 /* On targets with weird calling conventions (e.g. PA) it's
4150 hard to ensure that all cases of argument overlap between
4151 stack and registers work. Play it safe and bail out. */
4152 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4153 {
4154 sibcall_failure = 1;
4155 break;
4156 }
4157
57679d39 4158 if (store_one_arg (&args[i], argblock, flags,
4159 adjusted_args_size.var != 0,
4160 reg_parm_stack_space)
4161 || (pass == 0
4162 && check_sibcall_argument_overlap (before_arg,
42b11544 4163 &args[i], 1)))
7ecc63d3 4164 sibcall_failure = 1;
4165 }
66d433c7 4166
53597a55 4167 bool any_regs = false;
4168 for (i = 0; i < num_actuals; i++)
4169 if (args[i].reg != NULL_RTX)
4170 {
4171 any_regs = true;
4172 targetm.calls.call_args (args[i].reg, funtype);
4173 }
4174 if (!any_regs)
4175 targetm.calls.call_args (pc_rtx, funtype);
4176
4177 /* Figure out the register where the value, if any, will come back. */
4178 valreg = 0;
53597a55 4179 if (TYPE_MODE (rettype) != VOIDmode
4180 && ! structure_value_addr)
4181 {
4182 if (pcc_struct_value)
1e42d5c6 4183 valreg = hard_function_value (build_pointer_type (rettype),
4184 fndecl, NULL, (pass == 0));
53597a55 4185 else
1e42d5c6 4186 valreg = hard_function_value (rettype, fndecl, fntype,
4187 (pass == 0));
53597a55 4188
4189 /* If VALREG is a PARALLEL whose first member has a zero
4190 offset, use that. This is for targets such as m68k that
4191 return the same value in multiple places. */
4192 if (GET_CODE (valreg) == PARALLEL)
4193 {
4194 rtx elem = XVECEXP (valreg, 0, 0);
4195 rtx where = XEXP (elem, 0);
4196 rtx offset = XEXP (elem, 1);
4197 if (offset == const0_rtx
4198 && GET_MODE (where) == GET_MODE (valreg))
4199 valreg = where;
4200 }
4201 }
4202
60ecc450 4203 /* If register arguments require space on the stack and stack space
4204 was not preallocated, allocate stack space here for arguments
4205 passed in registers. */
fa20f865 4206 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
22c61100 4207 && !ACCUMULATE_OUTGOING_ARGS
c87678e4 4208 && must_preallocate == 0 && reg_parm_stack_space > 0)
60ecc450 4209 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
985adbca 4210
60ecc450 4211 /* Pass the function the address in which to return a
4212 structure value. */
4213 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4214 {
0d568ddf 4215 structure_value_addr
85d654dd 4216 = convert_memory_address (Pmode, structure_value_addr);
45550790 4217 emit_move_insn (struct_value,
60ecc450 4218 force_reg (Pmode,
4219 force_operand (structure_value_addr,
4220 NULL_RTX)));
4221
8ad4c111 4222 if (REG_P (struct_value))
45550790 4223 use_reg (&call_fusage, struct_value);
60ecc450 4224 }
02c736f4 4225
c0e7e9f7 4226 after_args = get_last_insn ();
88f80691 4227 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4228 static_chain_value, &call_fusage,
4229 reg_parm_seen, flags);
e726704c 4230
42b11544 4231 load_register_parameters (args, num_actuals, &call_fusage, flags,
4232 pass == 0, &sibcall_failure);
c87678e4 4233
60ecc450 4234 /* Save a pointer to the last insn before the call, so that we can
4235 later safely search backwards to find the CALL_INSN. */
4236 before_call = get_last_insn ();
66d433c7 4237
7a8d641b 4238 /* Set up next argument register. For sibling calls on machines
4239 with register windows this should be the incoming register. */
7a8d641b 4240 if (pass == 0)
39cba157 4241 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
f387af4f 4242 VOIDmode,
4243 void_type_node,
4244 true);
7a8d641b 4245 else
39cba157 4246 next_arg_reg = targetm.calls.function_arg (args_so_far,
f387af4f 4247 VOIDmode, void_type_node,
4248 true);
7a8d641b 4249
c8010b80 4250 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4251 {
4252 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
bf29c577 4253 arg_nr = num_actuals - arg_nr - 1;
3d38d682 4254 if (arg_nr >= 0
4255 && arg_nr < num_actuals
4256 && args[arg_nr].reg
c8010b80 4257 && valreg
4258 && REG_P (valreg)
4259 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4260 call_fusage
4261 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
d1f9b275 4262 gen_rtx_SET (valreg, args[arg_nr].reg),
c8010b80 4263 call_fusage);
4264 }
60ecc450 4265 /* All arguments and registers used for the call must be set up by
4266 now! */
4267
481feae3 4268 /* Stack must be properly aligned now. */
231bd014 4269 gcc_assert (!pass
e0deb08c 4270 || multiple_p (stack_pointer_delta,
4271 preferred_unit_stack_boundary));
fa4f1f09 4272
60ecc450 4273 /* Generate the actual call instruction. */
4ee9c684 4274 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
0e0be288 4275 adjusted_args_size.constant, struct_value_size,
7a8d641b 4276 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
39cba157 4277 flags, args_so_far);
60ecc450 4278
fcf56aaf 4279 if (flag_ipa_ra)
2e3b0d0f 4280 {
3663becd 4281 rtx_call_insn *last;
4282 rtx datum = NULL_RTX;
2e3b0d0f 4283 if (fndecl != NULL_TREE)
4284 {
4285 datum = XEXP (DECL_RTL (fndecl), 0);
4286 gcc_assert (datum != NULL_RTX
4287 && GET_CODE (datum) == SYMBOL_REF);
4288 }
4289 last = last_call_insn ();
4290 add_reg_note (last, REG_CALL_DECL, datum);
4291 }
4292
c0e7e9f7 4293 /* If the call setup or the call itself overlaps with anything
4294 of the argument setup we probably clobbered our call address.
4295 In that case we can't do sibcalls. */
4296 if (pass == 0
4297 && check_sibcall_argument_overlap (after_args, 0, 0))
4298 sibcall_failure = 1;
4299
05d18e8b 4300 /* If a non-BLKmode value is returned at the most significant end
4301 of a register, shift the register right by the appropriate amount
4302 and update VALREG accordingly. BLKmode values are handled by the
4303 group load/store machinery below. */
4304 if (!structure_value_addr
4305 && !pcc_struct_value
d8ef55fc 4306 && TYPE_MODE (rettype) != VOIDmode
16c9337c 4307 && TYPE_MODE (rettype) != BLKmode
d8ef55fc 4308 && REG_P (valreg)
16c9337c 4309 && targetm.calls.return_in_msb (rettype))
05d18e8b 4310 {
16c9337c 4311 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
05d18e8b 4312 sibcall_failure = 1;
16c9337c 4313 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
05d18e8b 4314 }
4315
2dd6f9ed 4316 if (pass && (flags & ECF_MALLOC))
60ecc450 4317 {
4318 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3663becd 4319 rtx_insn *last, *insns;
60ecc450 4320
c87678e4 4321 /* The return value from a malloc-like function is a pointer. */
16c9337c 4322 if (TREE_CODE (rettype) == POINTER_TYPE)
10836fcc 4323 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
60ecc450 4324
4325 emit_move_insn (temp, valreg);
4326
f4d3c071 4327 /* The return value from a malloc-like function cannot alias
60ecc450 4328 anything else. */
4329 last = get_last_insn ();
a1ddb869 4330 add_reg_note (last, REG_NOALIAS, temp);
60ecc450 4331
4332 /* Write out the sequence. */
4333 insns = get_insns ();
4334 end_sequence ();
31d3e01c 4335 emit_insn (insns);
60ecc450 4336 valreg = temp;
4337 }
66d433c7 4338
3072d30e 4339 /* For calls to `setjmp', etc., inform
4340 function.c:setjmp_warnings that it should complain if
4341 nonvolatile values are live. For functions that cannot
4342 return, inform flow that control does not fall through. */
66d433c7 4343
4fec1d6c 4344 if ((flags & ECF_NORETURN) || pass == 0)
02c736f4 4345 {
9239aee6 4346 /* The barrier must be emitted
60ecc450 4347 immediately after the CALL_INSN. Some ports emit more
4348 than just a CALL_INSN above, so we must search for it here. */
66d433c7 4349
3663becd 4350 rtx_insn *last = get_last_insn ();
6d7dc5b9 4351 while (!CALL_P (last))
60ecc450 4352 {
4353 last = PREV_INSN (last);
4354 /* There was no CALL_INSN? */
231bd014 4355 gcc_assert (last != before_call);
60ecc450 4356 }
66d433c7 4357
9239aee6 4358 emit_barrier_after (last);
20f5f6d0 4359
b494d193 4360 /* Stack adjustments after a noreturn call are dead code.
4361 However when NO_DEFER_POP is in effect, we must preserve
4362 stack_pointer_delta. */
4363 if (inhibit_defer_pop == 0)
4364 {
4365 stack_pointer_delta = old_stack_allocated;
4366 pending_stack_adjust = 0;
4367 }
60ecc450 4368 }
66d433c7 4369
60ecc450 4370 /* If value type not void, return an rtx for the value. */
66d433c7 4371
16c9337c 4372 if (TYPE_MODE (rettype) == VOIDmode
60ecc450 4373 || ignore)
5edaabad 4374 target = const0_rtx;
60ecc450 4375 else if (structure_value_addr)
4376 {
e16ceb8e 4377 if (target == 0 || !MEM_P (target))
60ecc450 4378 {
f7c44134 4379 target
16c9337c 4380 = gen_rtx_MEM (TYPE_MODE (rettype),
4381 memory_address (TYPE_MODE (rettype),
f7c44134 4382 structure_value_addr));
16c9337c 4383 set_mem_attributes (target, rettype, 1);
60ecc450 4384 }
4385 }
4386 else if (pcc_struct_value)
566d850a 4387 {
60ecc450 4388 /* This is the special C++ case where we need to
4389 know what the true target was. We take care to
4390 never use this value more than once in one expression. */
16c9337c 4391 target = gen_rtx_MEM (TYPE_MODE (rettype),
60ecc450 4392 copy_to_reg (valreg));
16c9337c 4393 set_mem_attributes (target, rettype, 1);
566d850a 4394 }
60ecc450 4395 /* Handle calls that return values in multiple non-contiguous locations.
4396 The Irix 6 ABI has examples of this. */
4397 else if (GET_CODE (valreg) == PARALLEL)
4398 {
4ee9c684 4399 if (target == 0)
2d0fd66d 4400 target = emit_group_move_into_temps (valreg);
5bd5c1c2 4401 else if (rtx_equal_p (target, valreg))
4402 ;
4403 else if (GET_CODE (target) == PARALLEL)
4404 /* Handle the result of a emit_group_move_into_temps
4405 call in the previous pass. */
4406 emit_group_move (target, valreg);
4407 else
16c9337c 4408 emit_group_store (target, valreg, rettype,
4409 int_size_in_bytes (rettype));
60ecc450 4410 }
4411 else if (target
16c9337c 4412 && GET_MODE (target) == TYPE_MODE (rettype)
60ecc450 4413 && GET_MODE (target) == GET_MODE (valreg))
4414 {
aadbaa40 4415 bool may_overlap = false;
4416
360738f1 4417 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4418 reg to a plain register. */
90af1361 4419 if (!REG_P (target) || HARD_REGISTER_P (target))
4420 valreg = avoid_likely_spilled_reg (valreg);
360738f1 4421
aadbaa40 4422 /* If TARGET is a MEM in the argument area, and we have
4423 saved part of the argument area, then we can't store
4424 directly into TARGET as it may get overwritten when we
4425 restore the argument save area below. Don't work too
4426 hard though and simply force TARGET to a register if it
4427 is a MEM; the optimizer is quite likely to sort it out. */
4428 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4429 for (i = 0; i < num_actuals; i++)
4430 if (args[i].save_area)
4431 {
4432 may_overlap = true;
4433 break;
4434 }
dbe1f550 4435
aadbaa40 4436 if (may_overlap)
4437 target = copy_to_reg (valreg);
4438 else
4439 {
4440 /* TARGET and VALREG cannot be equal at this point
4441 because the latter would not have
4442 REG_FUNCTION_VALUE_P true, while the former would if
4443 it were referring to the same register.
4444
4445 If they refer to the same register, this move will be
4446 a no-op, except when function inlining is being
4447 done. */
4448 emit_move_insn (target, valreg);
4449
4450 /* If we are setting a MEM, this code must be executed.
4451 Since it is emitted after the call insn, sibcall
4452 optimization cannot be performed in that case. */
4453 if (MEM_P (target))
4454 sibcall_failure = 1;
4455 }
60ecc450 4456 }
60ecc450 4457 else
90af1361 4458 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
66d433c7 4459
3b2411a8 4460 /* If we promoted this return value, make the proper SUBREG.
4461 TARGET might be const0_rtx here, so be careful. */
4462 if (REG_P (target)
16c9337c 4463 && TYPE_MODE (rettype) != BLKmode
4464 && GET_MODE (target) != TYPE_MODE (rettype))
45550790 4465 {
16c9337c 4466 tree type = rettype;
3b2411a8 4467 int unsignedp = TYPE_UNSIGNED (type);
3754d046 4468 machine_mode pmode;
3b2411a8 4469
4470 /* Ensure we promote as expected, and get the new unsignedness. */
4471 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4472 funtype, 1);
4473 gcc_assert (GET_MODE (target) == pmode);
4474
9edf7ea8 4475 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4476 GET_MODE (target));
3b2411a8 4477 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4478 SUBREG_PROMOTED_VAR_P (target) = 1;
e8629f9e 4479 SUBREG_PROMOTED_SET (target, unsignedp);
45550790 4480 }
23eb5fa6 4481
60ecc450 4482 /* If size of args is variable or this was a constructor call for a stack
4483 argument, restore saved stack-pointer value. */
66d433c7 4484
ff3ae375 4485 if (old_stack_level)
60ecc450 4486 {
3663becd 4487 rtx_insn *prev = get_last_insn ();
dfe00a8f 4488
e9c97615 4489 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9069face 4490 stack_pointer_delta = old_stack_pointer_delta;
dfe00a8f 4491
897445c7 4492 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
dfe00a8f 4493
60ecc450 4494 pending_stack_adjust = old_pending_adj;
80f06481 4495 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
60ecc450 4496 stack_arg_under_construction = old_stack_arg_under_construction;
4497 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4498 stack_usage_map = initial_stack_usage_map;
e0deb08c 4499 stack_usage_watermark = initial_stack_usage_watermark;
60ecc450 4500 sibcall_failure = 1;
4501 }
02510658 4502 else if (ACCUMULATE_OUTGOING_ARGS && pass)
60ecc450 4503 {
66d433c7 4504#ifdef REG_PARM_STACK_SPACE
60ecc450 4505 if (save_area)
6e96b626 4506 restore_fixed_argument_area (save_area, argblock,
4507 high_to_save, low_to_save);
41332f48 4508#endif
66d433c7 4509
60ecc450 4510 /* If we saved any argument areas, restore them. */
4511 for (i = 0; i < num_actuals; i++)
4512 if (args[i].save_area)
4513 {
3754d046 4514 machine_mode save_mode = GET_MODE (args[i].save_area);
60ecc450 4515 rtx stack_area
4516 = gen_rtx_MEM (save_mode,
4517 memory_address (save_mode,
4518 XEXP (args[i].stack_slot, 0)));
4519
4520 if (save_mode != BLKmode)
4521 emit_move_insn (stack_area, args[i].save_area);
4522 else
0378dbdc 4523 emit_block_move (stack_area, args[i].save_area,
e0deb08c 4524 (gen_int_mode
4525 (args[i].locate.size.constant, Pmode)),
0378dbdc 4526 BLOCK_OP_CALL_PARM);
60ecc450 4527 }
66d433c7 4528
60ecc450 4529 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4530 stack_usage_map = initial_stack_usage_map;
e0deb08c 4531 stack_usage_watermark = initial_stack_usage_watermark;
60ecc450 4532 }
66d433c7 4533
97354ae4 4534 /* If this was alloca, record the new stack level. */
4535 if (flags & ECF_MAY_BE_ALLOCA)
4536 record_new_stack_level ();
66d433c7 4537
60ecc450 4538 /* Free up storage we no longer need. */
4539 for (i = 0; i < num_actuals; ++i)
dd045aee 4540 free (args[i].aligned_regs);
60ecc450 4541
53597a55 4542 targetm.calls.end_call_args ();
4543
60ecc450 4544 insns = get_insns ();
4545 end_sequence ();
4546
4547 if (pass == 0)
4548 {
4549 tail_call_insns = insns;
4550
60ecc450 4551 /* Restore the pending stack adjustment now that we have
4552 finished generating the sibling call sequence. */
91b70175 4553
b6d206a2 4554 restore_pending_stack_adjust (&save);
0e0be288 4555
4556 /* Prepare arg structure for next iteration. */
c87678e4 4557 for (i = 0; i < num_actuals; i++)
0e0be288 4558 {
4559 args[i].value = 0;
4560 args[i].aligned_regs = 0;
4561 args[i].stack = 0;
4562 }
7ecc63d3 4563
4564 sbitmap_free (stored_args_map);
3663becd 4565 internal_arg_pointer_exp_state.scan_start = NULL;
f1f41a6c 4566 internal_arg_pointer_exp_state.cache.release ();
60ecc450 4567 }
4568 else
9069face 4569 {
4570 normal_call_insns = insns;
4571
4572 /* Verify that we've deallocated all the stack we used. */
4fec1d6c 4573 gcc_assert ((flags & ECF_NORETURN)
e0deb08c 4574 || known_eq (old_stack_allocated,
4575 stack_pointer_delta
4576 - pending_stack_adjust));
9069face 4577 }
ae8d6151 4578
4579 /* If something prevents making this a sibling call,
4580 zero out the sequence. */
4581 if (sibcall_failure)
3663becd 4582 tail_call_insns = NULL;
4ee9c684 4583 else
4584 break;
60ecc450 4585 }
4586
365db11e 4587 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4ee9c684 4588 arguments too, as argument area is now clobbered by the call. */
4589 if (tail_call_insns)
60ecc450 4590 {
4ee9c684 4591 emit_insn (tail_call_insns);
18d50ae6 4592 crtl->tail_call_emit = true;
60ecc450 4593 }
4594 else
b4a61e77 4595 {
4596 emit_insn (normal_call_insns);
4597 if (try_tail_call)
4598 /* Ideally we'd emit a message for all of the ways that it could
4599 have failed. */
4600 maybe_complain_about_tail_call (exp, "tail call production failed");
4601 }
66d433c7 4602
60ecc450 4603 currently_expanding_call--;
6d801f27 4604
dd045aee 4605 free (stack_usage_map_buf);
1f303606 4606 free (args);
66d433c7 4607 return target;
4608}
915e81b8 4609
4ee9c684 4610/* A sibling call sequence invalidates any REG_EQUIV notes made for
4611 this function's incoming arguments.
4612
4613 At the start of RTL generation we know the only REG_EQUIV notes
0a227ed5 4614 in the rtl chain are those for incoming arguments, so we can look
4615 for REG_EQUIV notes between the start of the function and the
4616 NOTE_INSN_FUNCTION_BEG.
4ee9c684 4617
4618 This is (slight) overkill. We could keep track of the highest
4619 argument we clobber and be more selective in removing notes, but it
4620 does not seem to be worth the effort. */
0a227ed5 4621
4ee9c684 4622void
4623fixup_tail_calls (void)
4624{
3663becd 4625 rtx_insn *insn;
0a227ed5 4626
4627 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4628 {
750a330e 4629 rtx note;
4630
0a227ed5 4631 /* There are never REG_EQUIV notes for the incoming arguments
4632 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4633 if (NOTE_P (insn)
ad4583d9 4634 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
0a227ed5 4635 break;
4636
750a330e 4637 note = find_reg_note (insn, REG_EQUIV, 0);
4638 if (note)
4639 remove_note (insn, note);
4640 note = find_reg_note (insn, REG_EQUIV, 0);
4641 gcc_assert (!note);
0a227ed5 4642 }
4ee9c684 4643}
4644
915e81b8 4645/* Traverse a list of TYPES and expand all complex types into their
4646 components. */
5ab29745 4647static tree
915e81b8 4648split_complex_types (tree types)
4649{
4650 tree p;
4651
92d40bc4 4652 /* Before allocating memory, check for the common case of no complex. */
4653 for (p = types; p; p = TREE_CHAIN (p))
4654 {
4655 tree type = TREE_VALUE (p);
4656 if (TREE_CODE (type) == COMPLEX_TYPE
4657 && targetm.calls.split_complex_arg (type))
a0c938f0 4658 goto found;
92d40bc4 4659 }
4660 return types;
4661
4662 found:
915e81b8 4663 types = copy_list (types);
4664
4665 for (p = types; p; p = TREE_CHAIN (p))
4666 {
4667 tree complex_type = TREE_VALUE (p);
4668
92d40bc4 4669 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4670 && targetm.calls.split_complex_arg (complex_type))
915e81b8 4671 {
4672 tree next, imag;
4673
4674 /* Rewrite complex type with component type. */
4675 TREE_VALUE (p) = TREE_TYPE (complex_type);
4676 next = TREE_CHAIN (p);
4677
4678 /* Add another component type for the imaginary part. */
4679 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4680 TREE_CHAIN (p) = imag;
4681 TREE_CHAIN (imag) = next;
4682
4683 /* Skip the newly created node. */
4684 p = TREE_CHAIN (p);
4685 }
4686 }
4687
4688 return types;
4689}
66d433c7 4690\f
9e9e5c15 4691/* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4692 for a value of mode OUTMODE,
4693 with NARGS different arguments, passed as ARGS.
4694 Store the return value if RETVAL is nonzero: store it in VALUE if
4695 VALUE is nonnull, otherwise pick a convenient location. In either
4696 case return the location of the stored value.
2a631e19 4697
9e9e5c15 4698 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4699 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4700 other types of library calls. */
4701
4702rtx
4c9e08a4 4703emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4704 enum libcall_type fn_type,
9e9e5c15 4705 machine_mode outmode, int nargs, rtx_mode_t *args)
b39693dd 4706{
9bdaf1ba 4707 /* Total size in bytes of all the stack-parms scanned so far. */
4708 struct args_size args_size;
4709 /* Size of arguments before any adjustments (such as rounding). */
4710 struct args_size original_args_size;
19cb6b50 4711 int argnum;
9bdaf1ba 4712 rtx fun;
22c61100 4713 /* Todo, choose the correct decl type of orgfun. Sadly this information
4714 isn't present here, so we default to native calling abi here. */
60e2260d 4715 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
fa20f865 4716 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
9bdaf1ba 4717 int count;
9bdaf1ba 4718 rtx argblock = 0;
39cba157 4719 CUMULATIVE_ARGS args_so_far_v;
4720 cumulative_args_t args_so_far;
c87678e4 4721 struct arg
4722 {
4723 rtx value;
3754d046 4724 machine_mode mode;
c87678e4 4725 rtx reg;
4726 int partial;
241399f6 4727 struct locate_and_pad_arg_data locate;
c87678e4 4728 rtx save_area;
4729 };
9bdaf1ba 4730 struct arg *argvec;
4731 int old_inhibit_defer_pop = inhibit_defer_pop;
4732 rtx call_fusage = 0;
4733 rtx mem_value = 0;
16204096 4734 rtx valreg;
9bdaf1ba 4735 int pcc_struct_value = 0;
52acb7ae 4736 poly_int64 struct_value_size = 0;
df4b504c 4737 int flags;
9bdaf1ba 4738 int reg_parm_stack_space = 0;
e0deb08c 4739 poly_int64 needed;
3663becd 4740 rtx_insn *before_call;
8700bf9e 4741 bool have_push_fusage;
771d21fa 4742 tree tfom; /* type_for_mode (outmode, 0) */
9bdaf1ba 4743
4448f543 4744#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 4745 /* Define the boundary of the register parm stack space that needs to be
4746 save, if any. */
75a70cf9 4747 int low_to_save = 0, high_to_save = 0;
c87678e4 4748 rtx save_area = 0; /* Place that it is saved. */
9bdaf1ba 4749#endif
4750
9bdaf1ba 4751 /* Size of the stack reserved for parameter registers. */
e0deb08c 4752 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
9bdaf1ba 4753 char *initial_stack_usage_map = stack_usage_map;
e0deb08c 4754 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
a331ea1b 4755 char *stack_usage_map_buf = NULL;
9bdaf1ba 4756
45550790 4757 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4758
9bdaf1ba 4759#ifdef REG_PARM_STACK_SPACE
9bdaf1ba 4760 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
9bdaf1ba 4761#endif
4762
1c1a1b9a 4763 /* By default, library functions cannot throw. */
df4b504c 4764 flags = ECF_NOTHROW;
4765
ab7ccfa2 4766 switch (fn_type)
4767 {
4768 case LCT_NORMAL:
2a0c81bf 4769 break;
ab7ccfa2 4770 case LCT_CONST:
2a0c81bf 4771 flags |= ECF_CONST;
4772 break;
ab7ccfa2 4773 case LCT_PURE:
2a0c81bf 4774 flags |= ECF_PURE;
ab7ccfa2 4775 break;
ab7ccfa2 4776 case LCT_NORETURN:
4777 flags |= ECF_NORETURN;
4778 break;
4779 case LCT_THROW:
1c1a1b9a 4780 flags &= ~ECF_NOTHROW;
ab7ccfa2 4781 break;
0ff18307 4782 case LCT_RETURNS_TWICE:
4783 flags = ECF_RETURNS_TWICE;
4784 break;
ab7ccfa2 4785 }
9bdaf1ba 4786 fun = orgfun;
4787
9bdaf1ba 4788 /* Ensure current function's preferred stack boundary is at least
4789 what we need. */
edb7afe8 4790 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4791 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
9bdaf1ba 4792
4793 /* If this kind of value comes back in memory,
4794 decide where in memory it should come back. */
771d21fa 4795 if (outmode != VOIDmode)
9bdaf1ba 4796 {
dc24ddbd 4797 tfom = lang_hooks.types.type_for_mode (outmode, 0);
45550790 4798 if (aggregate_value_p (tfom, 0))
771d21fa 4799 {
9bdaf1ba 4800#ifdef PCC_STATIC_STRUCT_RETURN
771d21fa 4801 rtx pointer_reg
46b3ff29 4802 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
771d21fa 4803 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4804 pcc_struct_value = 1;
4805 if (value == 0)
4806 value = gen_reg_rtx (outmode);
9bdaf1ba 4807#else /* not PCC_STATIC_STRUCT_RETURN */
771d21fa 4808 struct_value_size = GET_MODE_SIZE (outmode);
e16ceb8e 4809 if (value != 0 && MEM_P (value))
771d21fa 4810 mem_value = value;
4811 else
0ab48139 4812 mem_value = assign_temp (tfom, 1, 1);
9bdaf1ba 4813#endif
771d21fa 4814 /* This call returns a big structure. */
2dd6f9ed 4815 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
771d21fa 4816 }
9bdaf1ba 4817 }
771d21fa 4818 else
4819 tfom = void_type_node;
9bdaf1ba 4820
4821 /* ??? Unfinished: must pass the memory address as an argument. */
4822
4823 /* Copy all the libcall-arguments out of the varargs data
4824 and into a vector ARGVEC.
4825
4826 Compute how to pass each argument. We only support a very small subset
4827 of the full argument passing conventions to limit complexity here since
4828 library functions shouldn't have many args. */
4829
364c0c59 4830 argvec = XALLOCAVEC (struct arg, nargs + 1);
f0af5a88 4831 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
9bdaf1ba 4832
e1efd914 4833#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
39cba157 4834 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
e1efd914 4835#else
39cba157 4836 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
e1efd914 4837#endif
39cba157 4838 args_so_far = pack_cumulative_args (&args_so_far_v);
9bdaf1ba 4839
4840 args_size.constant = 0;
4841 args_size.var = 0;
4842
4843 count = 0;
4844
4845 push_temp_slots ();
4846
4847 /* If there's a structure value address to be passed,
4848 either pass it in the special place, or pass it as an extra argument. */
45550790 4849 if (mem_value && struct_value == 0 && ! pcc_struct_value)
9bdaf1ba 4850 {
4851 rtx addr = XEXP (mem_value, 0);
a0c938f0 4852
9bdaf1ba 4853 nargs++;
4854
a56c46d2 4855 /* Make sure it is a reasonable operand for a move or push insn. */
4856 if (!REG_P (addr) && !MEM_P (addr)
ca316360 4857 && !(CONSTANT_P (addr)
4858 && targetm.legitimate_constant_p (Pmode, addr)))
a56c46d2 4859 addr = force_operand (addr, NULL_RTX);
4860
9bdaf1ba 4861 argvec[count].value = addr;
4862 argvec[count].mode = Pmode;
4863 argvec[count].partial = 0;
4864
39cba157 4865 argvec[count].reg = targetm.calls.function_arg (args_so_far,
f387af4f 4866 Pmode, NULL_TREE, true);
39cba157 4867 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
f054eb3c 4868 NULL_TREE, 1) == 0);
9bdaf1ba 4869
4870 locate_and_pad_parm (Pmode, NULL_TREE,
2e735c0d 4871#ifdef STACK_PARMS_IN_REG_PARM_AREA
a0c938f0 4872 1,
2e735c0d 4873#else
4874 argvec[count].reg != 0,
4875#endif
2e090bf6 4876 reg_parm_stack_space, 0,
4877 NULL_TREE, &args_size, &argvec[count].locate);
9bdaf1ba 4878
9bdaf1ba 4879 if (argvec[count].reg == 0 || argvec[count].partial != 0
4880 || reg_parm_stack_space > 0)
241399f6 4881 args_size.constant += argvec[count].locate.size.constant;
9bdaf1ba 4882
39cba157 4883 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
9bdaf1ba 4884
4885 count++;
4886 }
4887
9e9e5c15 4888 for (unsigned int i = 0; count < nargs; i++, count++)
9bdaf1ba 4889 {
9e9e5c15 4890 rtx val = args[i].first;
4891 machine_mode mode = args[i].second;
adaf4ef0 4892 int unsigned_p = 0;
9bdaf1ba 4893
4894 /* We cannot convert the arg value to the mode the library wants here;
4895 must do it earlier where we know the signedness of the arg. */
231bd014 4896 gcc_assert (mode != BLKmode
4897 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
9bdaf1ba 4898
a56c46d2 4899 /* Make sure it is a reasonable operand for a move or push insn. */
4900 if (!REG_P (val) && !MEM_P (val)
ca316360 4901 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
a56c46d2 4902 val = force_operand (val, NULL_RTX);
4903
39cba157 4904 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
9bdaf1ba 4905 {
ddaf7ad3 4906 rtx slot;
13f08ee7 4907 int must_copy
39cba157 4908 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
ddaf7ad3 4909
9c2a0c05 4910 /* If this was a CONST function, it is now PURE since it now
4911 reads memory. */
5096b8b0 4912 if (flags & ECF_CONST)
4913 {
4914 flags &= ~ECF_CONST;
4915 flags |= ECF_PURE;
4916 }
4917
590c3166 4918 if (MEM_P (val) && !must_copy)
006e2d5a 4919 {
4920 tree val_expr = MEM_EXPR (val);
4921 if (val_expr)
4922 mark_addressable (val_expr);
4923 slot = val;
4924 }
41dc12b4 4925 else
ddaf7ad3 4926 {
dc24ddbd 4927 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
0ab48139 4928 1, 1);
ddaf7ad3 4929 emit_move_insn (slot, val);
4930 }
387bc205 4931
a683e787 4932 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4933 gen_rtx_USE (VOIDmode, slot),
4934 call_fusage);
ddaf7ad3 4935 if (must_copy)
4936 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4937 gen_rtx_CLOBBER (VOIDmode,
4938 slot),
4939 call_fusage);
4940
9bdaf1ba 4941 mode = Pmode;
ddaf7ad3 4942 val = force_operand (XEXP (slot, 0), NULL_RTX);
9bdaf1ba 4943 }
9bdaf1ba 4944
adaf4ef0 4945 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
9bdaf1ba 4946 argvec[count].mode = mode;
adaf4ef0 4947 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
39cba157 4948 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
f387af4f 4949 NULL_TREE, true);
9bdaf1ba 4950
9bdaf1ba 4951 argvec[count].partial
39cba157 4952 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
9bdaf1ba 4953
11fb947f 4954 if (argvec[count].reg == 0
4955 || argvec[count].partial != 0
4956 || reg_parm_stack_space > 0)
4957 {
4958 locate_and_pad_parm (mode, NULL_TREE,
2e735c0d 4959#ifdef STACK_PARMS_IN_REG_PARM_AREA
11fb947f 4960 1,
2e735c0d 4961#else
11fb947f 4962 argvec[count].reg != 0,
4963#endif
2e090bf6 4964 reg_parm_stack_space, argvec[count].partial,
11fb947f 4965 NULL_TREE, &args_size, &argvec[count].locate);
4966 args_size.constant += argvec[count].locate.size.constant;
4967 gcc_assert (!argvec[count].locate.size.var);
4968 }
4969#ifdef BLOCK_REG_PADDING
4970 else
4971 /* The argument is passed entirely in registers. See at which
4972 end it should be padded. */
4973 argvec[count].locate.where_pad =
4974 BLOCK_REG_PADDING (mode, NULL_TREE,
52acb7ae 4975 known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD));
2e735c0d 4976#endif
9bdaf1ba 4977
39cba157 4978 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
9bdaf1ba 4979 }
9bdaf1ba 4980
2e24a52c 4981 for (int i = 0; i < nargs; i++)
4982 if (reg_parm_stack_space > 0
4983 || argvec[i].reg == 0
4984 || argvec[i].partial != 0)
4985 update_stack_alignment_for_call (&argvec[i].locate);
4986
9bdaf1ba 4987 /* If this machine requires an external definition for library
4988 functions, write one out. */
4989 assemble_external_libcall (fun);
4990
4991 original_args_size = args_size;
e0deb08c 4992 args_size.constant = (aligned_upper_bound (args_size.constant
4993 + stack_pointer_delta,
4994 STACK_BYTES)
4995 - stack_pointer_delta);
9bdaf1ba 4996
e0deb08c 4997 args_size.constant = upper_bound (args_size.constant,
4998 reg_parm_stack_space);
9bdaf1ba 4999
fa20f865 5000 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 5001 args_size.constant -= reg_parm_stack_space;
9bdaf1ba 5002
e0deb08c 5003 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
5004 args_size.constant);
9bdaf1ba 5005
8c0dd614 5006 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
990495a7 5007 {
e0deb08c 5008 poly_int64 pushed = args_size.constant + pending_stack_adjust;
5009 current_function_pushed_stack_size
5010 = upper_bound (current_function_pushed_stack_size, pushed);
990495a7 5011 }
5012
4448f543 5013 if (ACCUMULATE_OUTGOING_ARGS)
5014 {
5015 /* Since the stack pointer will never be pushed, it is possible for
5016 the evaluation of a parm to clobber something we have already
5017 written to the stack. Since most function calls on RISC machines
5018 do not use the stack, this is uncommon, but must work correctly.
9bdaf1ba 5019
4448f543 5020 Therefore, we save any area of the stack that was already written
5021 and that we are using. Here we set up to do this by making a new
5022 stack usage map from the old one.
9bdaf1ba 5023
4448f543 5024 Another approach might be to try to reorder the argument
5025 evaluations to avoid this conflicting stack usage. */
9bdaf1ba 5026
4448f543 5027 needed = args_size.constant;
9bdaf1ba 5028
4448f543 5029 /* Since we will be writing into the entire argument area, the
5030 map must be allocated for its entire size, not just the part that
5031 is the responsibility of the caller. */
fa20f865 5032 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
63c68695 5033 needed += reg_parm_stack_space;
9bdaf1ba 5034
e0deb08c 5035 poly_int64 limit = needed;
ccccd62c 5036 if (ARGS_GROW_DOWNWARD)
e0deb08c 5037 limit += 1;
5038
5039 /* For polynomial sizes, this is the maximum possible size needed
5040 for arguments with a constant size and offset. */
5041 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5042 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5043 const_limit);
ccccd62c 5044
4c36ffe6 5045 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
a331ea1b 5046 stack_usage_map = stack_usage_map_buf;
9bdaf1ba 5047
4448f543 5048 if (initial_highest_arg_in_use)
8e547276 5049 memcpy (stack_usage_map, initial_stack_usage_map,
5050 initial_highest_arg_in_use);
9bdaf1ba 5051
4448f543 5052 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
93d3b7de 5053 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4448f543 5054 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5055 needed = 0;
9bdaf1ba 5056
9c0a756f 5057 /* We must be careful to use virtual regs before they're instantiated,
a0c938f0 5058 and real regs afterwards. Loop optimization, for example, can create
9c0a756f 5059 new libcalls after we've instantiated the virtual regs, and if we
5060 use virtuals anyway, they won't match the rtl patterns. */
9bdaf1ba 5061
9c0a756f 5062 if (virtuals_instantiated)
29c05e22 5063 argblock = plus_constant (Pmode, stack_pointer_rtx,
5064 STACK_POINTER_OFFSET);
9c0a756f 5065 else
5066 argblock = virtual_outgoing_args_rtx;
4448f543 5067 }
5068 else
5069 {
5070 if (!PUSH_ARGS)
e0deb08c 5071 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
4448f543 5072 }
9bdaf1ba 5073
bf29c577 5074 /* We push args individually in reverse order, perform stack alignment
9bdaf1ba 5075 before the first push (the last arg). */
bf29c577 5076 if (argblock == 0)
e0deb08c 5077 anti_adjust_stack (gen_int_mode (args_size.constant
5078 - original_args_size.constant,
5079 Pmode));
9bdaf1ba 5080
bf29c577 5081 argnum = nargs - 1;
9bdaf1ba 5082
4448f543 5083#ifdef REG_PARM_STACK_SPACE
5084 if (ACCUMULATE_OUTGOING_ARGS)
5085 {
5086 /* The argument list is the property of the called routine and it
5087 may clobber it. If the fixed area has been used for previous
6e96b626 5088 parameters, we must save and restore it. */
5089 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5090 &low_to_save, &high_to_save);
9bdaf1ba 5091 }
5092#endif
c87678e4 5093
53597a55 5094 /* When expanding a normal call, args are stored in push order,
5095 which is the reverse of what we have here. */
5096 bool any_regs = false;
5097 for (int i = nargs; i-- > 0; )
5098 if (argvec[i].reg != NULL_RTX)
5099 {
5100 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5101 any_regs = true;
5102 }
5103 if (!any_regs)
5104 targetm.calls.call_args (pc_rtx, NULL_TREE);
5105
9bdaf1ba 5106 /* Push the args that need to be pushed. */
5107
8700bf9e 5108 have_push_fusage = false;
5109
9bdaf1ba 5110 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5111 are to be pushed. */
bf29c577 5112 for (count = 0; count < nargs; count++, argnum--)
9bdaf1ba 5113 {
3754d046 5114 machine_mode mode = argvec[argnum].mode;
19cb6b50 5115 rtx val = argvec[argnum].value;
9bdaf1ba 5116 rtx reg = argvec[argnum].reg;
5117 int partial = argvec[argnum].partial;
c2fd5e89 5118 unsigned int parm_align = argvec[argnum].locate.boundary;
e0deb08c 5119 poly_int64 lower_bound = 0, upper_bound = 0;
9bdaf1ba 5120
5121 if (! (reg != 0 && partial == 0))
5122 {
4143d08b 5123 rtx use;
5124
4448f543 5125 if (ACCUMULATE_OUTGOING_ARGS)
5126 {
02510658 5127 /* If this is being stored into a pre-allocated, fixed-size,
5128 stack area, save any previous data at that location. */
9bdaf1ba 5129
ccccd62c 5130 if (ARGS_GROW_DOWNWARD)
5131 {
5132 /* stack_slot is negative, but we want to index stack_usage_map
5133 with positive values. */
5134 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5135 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5136 }
5137 else
5138 {
5139 lower_bound = argvec[argnum].locate.slot_offset.constant;
5140 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5141 }
9bdaf1ba 5142
e0deb08c 5143 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5144 reg_parm_stack_space))
4448f543 5145 {
241399f6 5146 /* We need to make a save area. */
e0deb08c 5147 poly_uint64 size
241399f6 5148 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3754d046 5149 machine_mode save_mode
517be012 5150 = int_mode_for_size (size, 1).else_blk ();
241399f6 5151 rtx adr
29c05e22 5152 = plus_constant (Pmode, argblock,
241399f6 5153 argvec[argnum].locate.offset.constant);
4448f543 5154 rtx stack_area
241399f6 5155 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4448f543 5156
f9c6a9c3 5157 if (save_mode == BLKmode)
5158 {
5159 argvec[argnum].save_area
5160 = assign_stack_temp (BLKmode,
0ab48139 5161 argvec[argnum].locate.size.constant
5162 );
f9c6a9c3 5163
d2b9158b 5164 emit_block_move (validize_mem
5165 (copy_rtx (argvec[argnum].save_area)),
a0c938f0 5166 stack_area,
e0deb08c 5167 (gen_int_mode
5168 (argvec[argnum].locate.size.constant,
5169 Pmode)),
f9c6a9c3 5170 BLOCK_OP_CALL_PARM);
5171 }
5172 else
5173 {
5174 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5175
5176 emit_move_insn (argvec[argnum].save_area, stack_area);
5177 }
4448f543 5178 }
9bdaf1ba 5179 }
325d1c45 5180
c2fd5e89 5181 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
0378dbdc 5182 partial, reg, 0, argblock,
e0deb08c 5183 (gen_int_mode
5184 (argvec[argnum].locate.offset.constant, Pmode)),
241399f6 5185 reg_parm_stack_space,
a95e5776 5186 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
9bdaf1ba 5187
9bdaf1ba 5188 /* Now mark the segment we just used. */
4448f543 5189 if (ACCUMULATE_OUTGOING_ARGS)
e0deb08c 5190 mark_stack_region_used (lower_bound, upper_bound);
9bdaf1ba 5191
5192 NO_DEFER_POP;
2eb9302a 5193
4143d08b 5194 /* Indicate argument access so that alias.c knows that these
5195 values are live. */
5196 if (argblock)
29c05e22 5197 use = plus_constant (Pmode, argblock,
4143d08b 5198 argvec[argnum].locate.offset.constant);
8700bf9e 5199 else if (have_push_fusage)
5200 continue;
4143d08b 5201 else
8700bf9e 5202 {
5203 /* When arguments are pushed, trying to tell alias.c where
5204 exactly this argument is won't work, because the
5205 auto-increment causes confusion. So we merely indicate
5206 that we access something with a known mode somewhere on
5207 the stack. */
5208 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5209 gen_rtx_SCRATCH (Pmode));
5210 have_push_fusage = true;
5211 }
4143d08b 5212 use = gen_rtx_MEM (argvec[argnum].mode, use);
5213 use = gen_rtx_USE (VOIDmode, use);
5214 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
9bdaf1ba 5215 }
5216 }
5217
bf29c577 5218 argnum = nargs - 1;
9bdaf1ba 5219
82c7907c 5220 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
9bdaf1ba 5221
5222 /* Now load any reg parms into their regs. */
5223
5224 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5225 are to be pushed. */
bf29c577 5226 for (count = 0; count < nargs; count++, argnum--)
9bdaf1ba 5227 {
3754d046 5228 machine_mode mode = argvec[argnum].mode;
19cb6b50 5229 rtx val = argvec[argnum].value;
9bdaf1ba 5230 rtx reg = argvec[argnum].reg;
5231 int partial = argvec[argnum].partial;
37cd19a4 5232
9bdaf1ba 5233 /* Handle calls that pass values in multiple non-contiguous
5234 locations. The PA64 has examples of this for library calls. */
5235 if (reg != 0 && GET_CODE (reg) == PARALLEL)
bec917cc 5236 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
9bdaf1ba 5237 else if (reg != 0 && partial == 0)
37cd19a4 5238 {
5239 emit_move_insn (reg, val);
5240#ifdef BLOCK_REG_PADDING
52acb7ae 5241 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
37cd19a4 5242
5243 /* Copied from load_register_parameters. */
5244
5245 /* Handle case where we have a value that needs shifting
5246 up to the msb. eg. a QImode value and we're padding
5247 upward on a BYTES_BIG_ENDIAN machine. */
52acb7ae 5248 if (known_lt (size, UNITS_PER_WORD)
37cd19a4 5249 && (argvec[argnum].locate.where_pad
d7ab0e3d 5250 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
37cd19a4 5251 {
5252 rtx x;
52acb7ae 5253 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
37cd19a4 5254
5255 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5256 report the whole reg as used. Strictly speaking, the
5257 call only uses SIZE bytes at the msb end, but it doesn't
5258 seem worth generating rtl to say that. */
5259 reg = gen_rtx_REG (word_mode, REGNO (reg));
5260 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5261 if (x != reg)
5262 emit_move_insn (reg, x);
5263 }
5264#endif
5265 }
9bdaf1ba 5266
5267 NO_DEFER_POP;
5268 }
5269
9bdaf1ba 5270 /* Any regs containing parms remain in use through the call. */
5271 for (count = 0; count < nargs; count++)
5272 {
5273 rtx reg = argvec[count].reg;
5274 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5275 use_group_regs (&call_fusage, reg);
5276 else if (reg != 0)
6c6f16e5 5277 {
5278 int partial = argvec[count].partial;
5279 if (partial)
5280 {
5281 int nregs;
5282 gcc_assert (partial % UNITS_PER_WORD == 0);
5283 nregs = partial / UNITS_PER_WORD;
5284 use_regs (&call_fusage, REGNO (reg), nregs);
5285 }
5286 else
5287 use_reg (&call_fusage, reg);
5288 }
9bdaf1ba 5289 }
5290
5291 /* Pass the function the address in which to return a structure value. */
45550790 5292 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
9bdaf1ba 5293 {
45550790 5294 emit_move_insn (struct_value,
9bdaf1ba 5295 force_reg (Pmode,
5296 force_operand (XEXP (mem_value, 0),
5297 NULL_RTX)));
8ad4c111 5298 if (REG_P (struct_value))
45550790 5299 use_reg (&call_fusage, struct_value);
9bdaf1ba 5300 }
5301
5302 /* Don't allow popping to be deferred, since then
5303 cse'ing of library calls could delete a call and leave the pop. */
5304 NO_DEFER_POP;
16204096 5305 valreg = (mem_value == 0 && outmode != VOIDmode
578d1295 5306 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
9bdaf1ba 5307
481feae3 5308 /* Stack must be properly aligned now. */
e0deb08c 5309 gcc_assert (multiple_p (stack_pointer_delta,
5310 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
fa4f1f09 5311
644c283b 5312 before_call = get_last_insn ();
5313
9bdaf1ba 5314 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5315 will set inhibit_defer_pop to that value. */
20f7032f 5316 /* The return type is needed to decide how many bytes the function pops.
5317 Signedness plays no role in that, so for simplicity, we pretend it's
5318 always signed. We also assume that the list of arguments passed has
5319 no impact, so we pretend it is unknown. */
9bdaf1ba 5320
4ee9c684 5321 emit_call_1 (fun, NULL,
c87678e4 5322 get_identifier (XSTR (orgfun, 0)),
771d21fa 5323 build_function_type (tfom, NULL_TREE),
c87678e4 5324 original_args_size.constant, args_size.constant,
9bdaf1ba 5325 struct_value_size,
39cba157 5326 targetm.calls.function_arg (args_so_far,
f387af4f 5327 VOIDmode, void_type_node, true),
16204096 5328 valreg,
39cba157 5329 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
9bdaf1ba 5330
fcf56aaf 5331 if (flag_ipa_ra)
2e3b0d0f 5332 {
9ed997be 5333 rtx datum = orgfun;
2e3b0d0f 5334 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
9ed997be 5335 rtx_call_insn *last = last_call_insn ();
2e3b0d0f 5336 add_reg_note (last, REG_CALL_DECL, datum);
5337 }
5338
37cd19a4 5339 /* Right-shift returned value if necessary. */
5340 if (!pcc_struct_value
5341 && TYPE_MODE (tfom) != BLKmode
5342 && targetm.calls.return_in_msb (tfom))
5343 {
5344 shift_return_value (TYPE_MODE (tfom), false, valreg);
5345 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5346 }
5347
53597a55 5348 targetm.calls.end_call_args ();
5349
3072d30e 5350 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5351 that it should complain if nonvolatile values are live. For
5352 functions that cannot return, inform flow that control does not
5353 fall through. */
4fec1d6c 5354 if (flags & ECF_NORETURN)
644c283b 5355 {
9239aee6 5356 /* The barrier note must be emitted
644c283b 5357 immediately after the CALL_INSN. Some ports emit more than
5358 just a CALL_INSN above, so we must search for it here. */
3663becd 5359 rtx_insn *last = get_last_insn ();
6d7dc5b9 5360 while (!CALL_P (last))
644c283b 5361 {
5362 last = PREV_INSN (last);
5363 /* There was no CALL_INSN? */
231bd014 5364 gcc_assert (last != before_call);
644c283b 5365 }
5366
9239aee6 5367 emit_barrier_after (last);
644c283b 5368 }
5369
43926c6a 5370 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5371 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5372 if (flags & ECF_NOTHROW)
5373 {
3663becd 5374 rtx_insn *last = get_last_insn ();
43926c6a 5375 while (!CALL_P (last))
5376 {
5377 last = PREV_INSN (last);
5378 /* There was no CALL_INSN? */
5379 gcc_assert (last != before_call);
5380 }
5381
5382 make_reg_eh_region_note_nothrow_nononlocal (last);
5383 }
5384
9bdaf1ba 5385 /* Now restore inhibit_defer_pop to its actual original value. */
5386 OK_DEFER_POP;
5387
5388 pop_temp_slots ();
5389
5390 /* Copy the value to the right place. */
20f7032f 5391 if (outmode != VOIDmode && retval)
9bdaf1ba 5392 {
5393 if (mem_value)
5394 {
5395 if (value == 0)
5396 value = mem_value;
5397 if (value != mem_value)
5398 emit_move_insn (value, mem_value);
5399 }
40651bac 5400 else if (GET_CODE (valreg) == PARALLEL)
5401 {
5402 if (value == 0)
5403 value = gen_reg_rtx (outmode);
4c3a0ea5 5404 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
40651bac 5405 }
9bdaf1ba 5406 else
4e1a3169 5407 {
3b2411a8 5408 /* Convert to the proper mode if a promotion has been active. */
4e1a3169 5409 if (GET_MODE (valreg) != outmode)
5410 {
5411 int unsignedp = TYPE_UNSIGNED (tfom);
5412
3b2411a8 5413 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5414 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4e1a3169 5415 == GET_MODE (valreg));
4e1a3169 5416 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5417 }
5418
5419 if (value != 0)
5420 emit_move_insn (value, valreg);
5421 else
5422 value = valreg;
5423 }
9bdaf1ba 5424 }
5425
4448f543 5426 if (ACCUMULATE_OUTGOING_ARGS)
9bdaf1ba 5427 {
4448f543 5428#ifdef REG_PARM_STACK_SPACE
5429 if (save_area)
6e96b626 5430 restore_fixed_argument_area (save_area, argblock,
5431 high_to_save, low_to_save);
9bdaf1ba 5432#endif
c87678e4 5433
4448f543 5434 /* If we saved any argument areas, restore them. */
5435 for (count = 0; count < nargs; count++)
5436 if (argvec[count].save_area)
5437 {
3754d046 5438 machine_mode save_mode = GET_MODE (argvec[count].save_area);
29c05e22 5439 rtx adr = plus_constant (Pmode, argblock,
241399f6 5440 argvec[count].locate.offset.constant);
5441 rtx stack_area = gen_rtx_MEM (save_mode,
5442 memory_address (save_mode, adr));
4448f543 5443
f9c6a9c3 5444 if (save_mode == BLKmode)
5445 emit_block_move (stack_area,
d2b9158b 5446 validize_mem
5447 (copy_rtx (argvec[count].save_area)),
e0deb08c 5448 (gen_int_mode
5449 (argvec[count].locate.size.constant, Pmode)),
f9c6a9c3 5450 BLOCK_OP_CALL_PARM);
5451 else
5452 emit_move_insn (stack_area, argvec[count].save_area);
4448f543 5453 }
9bdaf1ba 5454
4448f543 5455 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5456 stack_usage_map = initial_stack_usage_map;
e0deb08c 5457 stack_usage_watermark = initial_stack_usage_watermark;
4448f543 5458 }
b39693dd 5459
dd045aee 5460 free (stack_usage_map_buf);
a331ea1b 5461
20f7032f 5462 return value;
5463
5464}
5465\f
058a1b7a 5466
66d433c7 5467/* Store a single argument for a function call
5468 into the register or memory area where it must be passed.
5469 *ARG describes the argument value and where to pass it.
5470
5471 ARGBLOCK is the address of the stack-block for all the arguments,
f9e15121 5472 or 0 on a machine where arguments are pushed individually.
66d433c7 5473
5474 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
c87678e4 5475 so must be careful about how the stack is used.
66d433c7 5476
5477 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5478 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5479 that we need not worry about saving and restoring the stack.
5480
57679d39 5481 FNDECL is the declaration of the function we are calling.
c87678e4 5482
d10cfa8d 5483 Return nonzero if this arg should cause sibcall failure,
57679d39 5484 zero otherwise. */
66d433c7 5485
57679d39 5486static int
4c9e08a4 5487store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5488 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
66d433c7 5489{
19cb6b50 5490 tree pval = arg->tree_value;
66d433c7 5491 rtx reg = 0;
5492 int partial = 0;
e0deb08c 5493 poly_int64 used = 0;
5494 poly_int64 lower_bound = 0, upper_bound = 0;
57679d39 5495 int sibcall_failure = 0;
66d433c7 5496
5497 if (TREE_CODE (pval) == ERROR_MARK)
57679d39 5498 return 1;
66d433c7 5499
1b117c60 5500 /* Push a new temporary level for any temporaries we make for
5501 this argument. */
5502 push_temp_slots ();
5503
02510658 5504 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
66d433c7 5505 {
4448f543 5506 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5507 save any previous data at that location. */
5508 if (argblock && ! variable_size && arg->stack)
5509 {
ccccd62c 5510 if (ARGS_GROW_DOWNWARD)
5511 {
5512 /* stack_slot is negative, but we want to index stack_usage_map
5513 with positive values. */
5514 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
e0deb08c 5515 {
5516 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5517 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5518 }
ccccd62c 5519 else
5520 upper_bound = 0;
66d433c7 5521
ccccd62c 5522 lower_bound = upper_bound - arg->locate.size.constant;
5523 }
4448f543 5524 else
ccccd62c 5525 {
5526 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
e0deb08c 5527 {
5528 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5529 lower_bound = rtx_to_poly_int64 (offset);
5530 }
ccccd62c 5531 else
5532 lower_bound = 0;
66d433c7 5533
ccccd62c 5534 upper_bound = lower_bound + arg->locate.size.constant;
5535 }
66d433c7 5536
e0deb08c 5537 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5538 reg_parm_stack_space))
66d433c7 5539 {
241399f6 5540 /* We need to make a save area. */
e0deb08c 5541 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
517be012 5542 machine_mode save_mode
5543 = int_mode_for_size (size, 1).else_blk ();
241399f6 5544 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5545 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4448f543 5546
5547 if (save_mode == BLKmode)
5548 {
9f495e8d 5549 arg->save_area
5550 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
4448f543 5551 preserve_temp_slots (arg->save_area);
d2b9158b 5552 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5553 stack_area,
e0deb08c 5554 (gen_int_mode
5555 (arg->locate.size.constant, Pmode)),
0378dbdc 5556 BLOCK_OP_CALL_PARM);
4448f543 5557 }
5558 else
5559 {
5560 arg->save_area = gen_reg_rtx (save_mode);
5561 emit_move_insn (arg->save_area, stack_area);
5562 }
66d433c7 5563 }
5564 }
5565 }
b3caaea3 5566
66d433c7 5567 /* If this isn't going to be placed on both the stack and in registers,
5568 set up the register and number of words. */
5569 if (! arg->pass_on_stack)
04d6fcf8 5570 {
5571 if (flags & ECF_SIBCALL)
5572 reg = arg->tail_call_reg;
5573 else
5574 reg = arg->reg;
5575 partial = arg->partial;
5576 }
66d433c7 5577
231bd014 5578 /* Being passed entirely in a register. We shouldn't be called in
5579 this case. */
5580 gcc_assert (reg == 0 || partial != 0);
a0c938f0 5581
f28c7a75 5582 /* If this arg needs special alignment, don't load the registers
5583 here. */
5584 if (arg->n_aligned_regs != 0)
5585 reg = 0;
c87678e4 5586
f28c7a75 5587 /* If this is being passed partially in a register, we can't evaluate
66d433c7 5588 it directly into its stack slot. Otherwise, we can. */
5589 if (arg->value == 0)
f848041f 5590 {
f848041f 5591 /* stack_arg_under_construction is nonzero if a function argument is
5592 being evaluated directly into the outgoing argument list and
5593 expand_call must take special action to preserve the argument list
5594 if it is called recursively.
5595
5596 For scalar function arguments stack_usage_map is sufficient to
5597 determine which stack slots must be saved and restored. Scalar
5598 arguments in general have pass_on_stack == 0.
5599
5600 If this argument is initialized by a function which takes the
5601 address of the argument (a C++ constructor or a C function
5602 returning a BLKmode structure), then stack_usage_map is
5603 insufficient and expand_call must push the stack around the
5604 function call. Such arguments have pass_on_stack == 1.
5605
5606 Note that it is always safe to set stack_arg_under_construction,
5607 but this generates suboptimal code if set when not needed. */
5608
5609 if (arg->pass_on_stack)
5610 stack_arg_under_construction++;
4448f543 5611
7dbf1af4 5612 arg->value = expand_expr (pval,
5613 (partial
5614 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5615 ? NULL_RTX : arg->stack,
a35a63ff 5616 VOIDmode, EXPAND_STACK_PARM);
1c0c37a5 5617
5618 /* If we are promoting object (or for any other reason) the mode
5619 doesn't agree, convert the mode. */
5620
1560ef8f 5621 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5622 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5623 arg->value, arg->unsignedp);
1c0c37a5 5624
f848041f 5625 if (arg->pass_on_stack)
5626 stack_arg_under_construction--;
f848041f 5627 }
66d433c7 5628
63864e1c 5629 /* Check for overlap with already clobbered argument area. */
ff6c0ab2 5630 if ((flags & ECF_SIBCALL)
5631 && MEM_P (arg->value)
e0deb08c 5632 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5633 arg->locate.size.constant))
ff6c0ab2 5634 sibcall_failure = 1;
63864e1c 5635
66d433c7 5636 /* Don't allow anything left on stack from computation
5637 of argument to alloca. */
02510658 5638 if (flags & ECF_MAY_BE_ALLOCA)
66d433c7 5639 do_pending_stack_adjust ();
5640
5641 if (arg->value == arg->stack)
8a06f2d4 5642 /* If the value is already in the stack slot, we are done. */
5643 ;
1c0c37a5 5644 else if (arg->mode != BLKmode)
66d433c7 5645 {
851fc2b3 5646 unsigned int parm_align;
66d433c7 5647
5648 /* Argument is a scalar, not entirely passed in registers.
5649 (If part is passed in registers, arg->partial says how much
5650 and emit_push_insn will take care of putting it there.)
c87678e4 5651
66d433c7 5652 Push it, and if its size is less than the
5653 amount of space allocated to it,
5654 also bump stack pointer by the additional space.
5655 Note that in C the default argument promotions
5656 will prevent such mismatches. */
5657
adbaa93b 5658 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5659 ? 0 : GET_MODE_SIZE (arg->mode));
532d84ff 5660
66d433c7 5661 /* Compute how much space the push instruction will push.
5662 On many machines, pushing a byte will advance the stack
5663 pointer by a halfword. */
5664#ifdef PUSH_ROUNDING
5665 size = PUSH_ROUNDING (size);
5666#endif
5667 used = size;
5668
5669 /* Compute how much space the argument should get:
5670 round up to a multiple of the alignment for arguments. */
d7ab0e3d 5671 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5672 != PAD_NONE)
adbaa93b 5673 /* At the moment we don't (need to) support ABIs for which the
5674 padding isn't known at compile time. In principle it should
5675 be easy to add though. */
5676 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
66d433c7 5677
851fc2b3 5678 /* Compute the alignment of the pushed argument. */
5679 parm_align = arg->locate.boundary;
d7ab0e3d 5680 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5681 == PAD_DOWNWARD)
851fc2b3 5682 {
e0deb08c 5683 poly_int64 pad = used - size;
5684 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
5685 if (pad_align != 0)
5686 parm_align = MIN (parm_align, pad_align);
851fc2b3 5687 }
5688
66d433c7 5689 /* This isn't already where we want it on the stack, so put it there.
5690 This can either be done with push or copy insns. */
e0deb08c 5691 if (maybe_ne (used, 0)
532d84ff 5692 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5693 NULL_RTX, parm_align, partial, reg, used - size,
5694 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5695 reg_parm_stack_space,
5696 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
a95e5776 5697 sibcall_failure = 1;
d5c9a99f 5698
5699 /* Unless this is a partially-in-register argument, the argument is now
5700 in the stack. */
5701 if (partial == 0)
5702 arg->value = arg->stack;
66d433c7 5703 }
5704 else
5705 {
5706 /* BLKmode, at least partly to be pushed. */
5707
cf78c9ff 5708 unsigned int parm_align;
e0deb08c 5709 poly_int64 excess;
66d433c7 5710 rtx size_rtx;
5711
5712 /* Pushing a nonscalar.
5713 If part is passed in registers, PARTIAL says how much
5714 and emit_push_insn will take care of putting it there. */
5715
5716 /* Round its size up to a multiple
5717 of the allocation unit for arguments. */
5718
241399f6 5719 if (arg->locate.size.var != 0)
66d433c7 5720 {
5721 excess = 0;
241399f6 5722 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
66d433c7 5723 }
5724 else
5725 {
f054eb3c 5726 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5727 for BLKmode is careful to avoid it. */
5728 excess = (arg->locate.size.constant
532d84ff 5729 - arg_int_size_in_bytes (TREE_TYPE (pval))
f054eb3c 5730 + partial);
532d84ff 5731 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
b9c74b4d 5732 NULL_RTX, TYPE_MODE (sizetype),
5733 EXPAND_NORMAL);
66d433c7 5734 }
5735
c5dc0c32 5736 parm_align = arg->locate.boundary;
cf78c9ff 5737
5738 /* When an argument is padded down, the block is aligned to
5739 PARM_BOUNDARY, but the actual argument isn't. */
d7ab0e3d 5740 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5741 == PAD_DOWNWARD)
cf78c9ff 5742 {
241399f6 5743 if (arg->locate.size.var)
cf78c9ff 5744 parm_align = BITS_PER_UNIT;
e0deb08c 5745 else
cf78c9ff 5746 {
e0deb08c 5747 unsigned int excess_align
5748 = known_alignment (excess) * BITS_PER_UNIT;
5749 if (excess_align != 0)
5750 parm_align = MIN (parm_align, excess_align);
cf78c9ff 5751 }
5752 }
5753
e16ceb8e 5754 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
57679d39 5755 {
5756 /* emit_push_insn might not work properly if arg->value and
241399f6 5757 argblock + arg->locate.offset areas overlap. */
57679d39 5758 rtx x = arg->value;
e0deb08c 5759 poly_int64 i = 0;
57679d39 5760
7e3747b0 5761 if (strip_offset (XEXP (x, 0), &i)
5762 == crtl->args.internal_arg_pointer)
57679d39 5763 {
c62f411b 5764 /* arg.locate doesn't contain the pretend_args_size offset,
5765 it's part of argblock. Ensure we don't count it in I. */
5766 if (STACK_GROWS_DOWNWARD)
5767 i -= crtl->args.pretend_args_size;
5768 else
5769 i += crtl->args.pretend_args_size;
5770
21dda4ee 5771 /* expand_call should ensure this. */
231bd014 5772 gcc_assert (!arg->locate.offset.var
e0deb08c 5773 && arg->locate.size.var == 0);
5774 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
57679d39 5775
e0deb08c 5776 if (known_eq (arg->locate.offset.constant, i))
2ad152f7 5777 {
5778 /* Even though they appear to be at the same location,
5779 if part of the outgoing argument is in registers,
5780 they aren't really at the same location. Check for
5781 this by making sure that the incoming size is the
5782 same as the outgoing size. */
e0deb08c 5783 if (maybe_ne (arg->locate.size.constant, size_val))
57679d39 5784 sibcall_failure = 1;
5785 }
e0deb08c 5786 else if (maybe_in_range_p (arg->locate.offset.constant,
5787 i, size_val))
5788 sibcall_failure = 1;
5789 /* Use arg->locate.size.constant instead of size_rtx
5790 because we only care about the part of the argument
5791 on the stack. */
5792 else if (maybe_in_range_p (i, arg->locate.offset.constant,
5793 arg->locate.size.constant))
5794 sibcall_failure = 1;
57679d39 5795 }
5796 }
5797
532d84ff 5798 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5799 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5800 parm_align, partial, reg, excess, argblock,
5801 ARGS_SIZE_RTX (arg->locate.offset),
5802 reg_parm_stack_space,
5803 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
66d433c7 5804
d5c9a99f 5805 /* Unless this is a partially-in-register argument, the argument is now
5806 in the stack.
66d433c7 5807
d5c9a99f 5808 ??? Unlike the case above, in which we want the actual
5809 address of the data, so that we can load it directly into a
5810 register, here we want the address of the stack slot, so that
5811 it's properly aligned for word-by-word copying or something
5812 like that. It's not clear that this is always correct. */
5813 if (partial == 0)
5814 arg->value = arg->stack_slot;
5815 }
b600a907 5816
5817 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5818 {
5819 tree type = TREE_TYPE (arg->tree_value);
5820 arg->parallel_value
5821 = emit_group_load_into_temps (arg->reg, arg->value, type,
5822 int_size_in_bytes (type));
5823 }
66d433c7 5824
a35a63ff 5825 /* Mark all slots this store used. */
5826 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5827 && argblock && ! variable_size && arg->stack)
e0deb08c 5828 mark_stack_region_used (lower_bound, upper_bound);
a35a63ff 5829
66d433c7 5830 /* Once we have pushed something, pops can't safely
5831 be deferred during the rest of the arguments. */
5832 NO_DEFER_POP;
5833
0ab48139 5834 /* Free any temporary slots made in processing this argument. */
1b117c60 5835 pop_temp_slots ();
57679d39 5836
5837 return sibcall_failure;
66d433c7 5838}
890f0c17 5839
0336f0f0 5840/* Nonzero if we do not know how to pass TYPE solely in registers. */
890f0c17 5841
0336f0f0 5842bool
3754d046 5843must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
fb80456a 5844 const_tree type)
0336f0f0 5845{
5846 if (!type)
5847 return false;
5848
5849 /* If the type has variable size... */
5850 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5851 return true;
890f0c17 5852
0336f0f0 5853 /* If the type is marked as addressable (it is required
5854 to be constructed into the stack)... */
5855 if (TREE_ADDRESSABLE (type))
5856 return true;
5857
5858 return false;
5859}
890f0c17 5860
0d568ddf 5861/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
0336f0f0 5862 takes trailing padding of a structure into account. */
5863/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
890f0c17 5864
5865bool
3754d046 5866must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
890f0c17 5867{
5868 if (!type)
dceaa0b1 5869 return false;
890f0c17 5870
5871 /* If the type has variable size... */
5872 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5873 return true;
5874
5875 /* If the type is marked as addressable (it is required
5876 to be constructed into the stack)... */
5877 if (TREE_ADDRESSABLE (type))
5878 return true;
5879
532d84ff 5880 if (TYPE_EMPTY_P (type))
5881 return false;
5882
890f0c17 5883 /* If the padding and mode of the type is such that a copy into
5884 a register would put it into the wrong part of the register. */
5885 if (mode == BLKmode
5886 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
d7ab0e3d 5887 && (targetm.calls.function_arg_padding (mode, type)
5888 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
890f0c17 5889 return true;
5890
5891 return false;
5892}
3f82fe35 5893
5894/* Tell the garbage collector about GTY markers in this source file. */
5895#include "gt-calls.h"