]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/calls.cc
Testsuite, DWARF2: adjust regexp to match darwin output
[thirdparty/gcc.git] / gcc / calls.cc
CommitLineData
51bbfa0c 1/* Convert function calls to rtl insns, for GNU C compiler.
aeee4812 2 Copyright (C) 1989-2023 Free Software Foundation, Inc.
51bbfa0c 3
1322177d 4This file is part of GCC.
51bbfa0c 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
51bbfa0c 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
51bbfa0c
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
51bbfa0c
RS
19
20#include "config.h"
670ee920 21#include "system.h"
4977bab6 22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5
AM
24#include "target.h"
25#include "rtl.h"
c7131fb2
AM
26#include "tree.h"
27#include "gimple.h"
957060b5 28#include "predict.h"
4d0cdd0c 29#include "memmodel.h"
957060b5
AM
30#include "tm_p.h"
31#include "stringpool.h"
32#include "expmed.h"
33#include "optabs.h"
957060b5
AM
34#include "emit-rtl.h"
35#include "cgraph.h"
36#include "diagnostic-core.h"
40e23961 37#include "fold-const.h"
d8a2d370
DN
38#include "stor-layout.h"
39#include "varasm.h"
2fb9a547 40#include "internal-fn.h"
36566b39
PK
41#include "dojump.h"
42#include "explow.h"
43#include "calls.h"
670ee920 44#include "expr.h"
d6f4ec51 45#include "output.h"
b0c48229 46#include "langhooks.h"
b2dd096b 47#include "except.h"
6fb5fa3c 48#include "dbgcnt.h"
e9f56944 49#include "rtl-iter.h"
8bd9f164
MS
50#include "tree-vrp.h"
51#include "tree-ssanames.h"
8bd9f164 52#include "intl.h"
314e6352 53#include "stringpool.h"
54aa6b58
MS
54#include "hash-map.h"
55#include "hash-traits.h"
314e6352 56#include "attribs.h"
cc8bea0a 57#include "builtins.h"
ba206889 58#include "gimple-iterator.h"
d677a8b6 59#include "gimple-fold.h"
05d39f0d 60#include "attr-fnspec.h"
e864d395 61#include "value-query.h"
baad4c48 62#include "tree-pretty-print.h"
a25982ad 63#include "tree-eh.h"
baad4c48 64
c795bca9
BS
65/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
66#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51bbfa0c
RS
67
68/* Data structure and subroutines used within expand_call. */
69
70struct arg_data
71{
72 /* Tree node for this argument. */
73 tree tree_value;
1efe6448 74 /* Mode for value; TYPE_MODE unless promoted. */
ef4bddc2 75 machine_mode mode;
51bbfa0c
RS
76 /* Current RTL value for argument, or 0 if it isn't precomputed. */
77 rtx value;
78 /* Initially-compute RTL value for argument; only for const functions. */
79 rtx initial_value;
80 /* Register to pass this argument in, 0 if passed on stack, or an
cacbd532 81 PARALLEL if the arg is to be copied into multiple non-contiguous
51bbfa0c
RS
82 registers. */
83 rtx reg;
099e9712
JH
84 /* Register to pass this argument in when generating tail call sequence.
85 This is not the same register as for normal calls on machines with
86 register windows. */
87 rtx tail_call_reg;
8df3dbb7
RH
88 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
89 form for emit_group_move. */
90 rtx parallel_value;
84b55618
RK
91 /* If REG was promoted from the actual mode of the argument expression,
92 indicates whether the promotion is sign- or zero-extended. */
93 int unsignedp;
f0078f86
AM
94 /* Number of bytes to put in registers. 0 means put the whole arg
95 in registers. Also 0 if not passed in registers. */
51bbfa0c 96 int partial;
4c7d264e 97 /* True if argument must be passed on stack.
d64f5a78 98 Note that some arguments may be passed on the stack
4c7d264e 99 even though pass_on_stack is false, just because FUNCTION_ARG says so.
d64f5a78 100 pass_on_stack identifies arguments that *cannot* go in registers. */
4c7d264e 101 bool pass_on_stack;
e7949876
AM
102 /* Some fields packaged up for locate_and_pad_parm. */
103 struct locate_and_pad_arg_data locate;
51bbfa0c
RS
104 /* Location on the stack at which parameter should be stored. The store
105 has already been done if STACK == VALUE. */
106 rtx stack;
107 /* Location on the stack of the start of this argument slot. This can
108 differ from STACK if this arg pads downward. This location is known
c2ed6cf8 109 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
51bbfa0c 110 rtx stack_slot;
51bbfa0c
RS
111 /* Place that this stack area has been saved, if needed. */
112 rtx save_area;
4ab56118
RK
113 /* If an argument's alignment does not permit direct copying into registers,
114 copy in smaller-sized pieces into pseudos. These are stored in a
115 block pointed to by this field. The next field says how many
116 word-sized pseudos we made. */
117 rtx *aligned_regs;
118 int n_aligned_regs;
51bbfa0c
RS
119};
120
da7d8304 121/* A vector of one char per byte of stack space. A byte if nonzero if
51bbfa0c
RS
122 the corresponding stack location has been used.
123 This vector is used to prevent a function call within an argument from
124 clobbering any stack already set up. */
125static char *stack_usage_map;
126
127/* Size of STACK_USAGE_MAP. */
a20c5714
RS
128static unsigned int highest_outgoing_arg_in_use;
129
130/* Assume that any stack location at this byte index is used,
131 without checking the contents of stack_usage_map. */
132static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
2f4aa534 133
c67846f2
JJ
134/* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
135 stack location's tail call argument has been already stored into the stack.
136 This bitmap is used to prevent sibling call optimization if function tries
137 to use parent's incoming argument slots when they have been already
138 overwritten with tail call arguments. */
139static sbitmap stored_args_map;
140
a20c5714
RS
141/* Assume that any virtual-incoming location at this byte index has been
142 stored, without checking the contents of stored_args_map. */
143static unsigned HOST_WIDE_INT stored_args_watermark;
144
2f4aa534
RS
145/* stack_arg_under_construction is nonzero when an argument may be
146 initialized with a constructor call (including a C function that
147 returns a BLKmode struct) and expand_call must take special action
148 to make sure the object being constructed does not overlap the
149 argument list for the constructor call. */
0405cc0e 150static int stack_arg_under_construction;
51bbfa0c 151
d329e058 152static void precompute_register_parameters (int, struct arg_data *, int *);
4c7d264e 153static bool store_one_arg (struct arg_data *, rtx, int, int, int);
d329e058 154static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
4c7d264e
UB
155static bool finalize_must_preallocate (bool, int, struct arg_data *,
156 struct args_size *);
84b8030f 157static void precompute_arguments (int, struct arg_data *);
d329e058
AJ
158static void compute_argument_addresses (struct arg_data *, rtx, int);
159static rtx rtx_for_function_call (tree, tree);
160static void load_register_parameters (struct arg_data *, int, rtx *, int,
4c7d264e 161 int, bool *);
6ea2b70d 162static int special_function_p (const_tree, int);
4c7d264e
UB
163static bool check_sibcall_argument_overlap_1 (rtx);
164static bool check_sibcall_argument_overlap (rtx_insn *, struct arg_data *,
165 bool);
2f2b4a02 166static tree split_complex_types (tree);
21a3b983 167
f73ad30e 168#ifdef REG_PARM_STACK_SPACE
d329e058
AJ
169static rtx save_fixed_argument_area (int, rtx, int *, int *);
170static void restore_fixed_argument_area (rtx, rtx, int, int);
20efdf74 171#endif
51bbfa0c 172\f
a20c5714
RS
173/* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
174 stack region might already be in use. */
175
176static bool
177stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
178 unsigned int reg_parm_stack_space)
179{
180 unsigned HOST_WIDE_INT const_lower, const_upper;
181 const_lower = constant_lower_bound (lower_bound);
182 if (!upper_bound.is_constant (&const_upper))
183 const_upper = HOST_WIDE_INT_M1U;
184
185 if (const_upper > stack_usage_watermark)
186 return true;
187
188 /* Don't worry about things in the fixed argument area;
189 it has already been saved. */
190 const_lower = MAX (const_lower, reg_parm_stack_space);
191 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
192 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
193 if (stack_usage_map[i])
194 return true;
195 return false;
196}
197
198/* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
199 stack region are now in use. */
200
201static void
202mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
203{
204 unsigned HOST_WIDE_INT const_lower, const_upper;
205 const_lower = constant_lower_bound (lower_bound);
41d1f11f
RS
206 if (upper_bound.is_constant (&const_upper)
207 && const_upper <= highest_outgoing_arg_in_use)
a20c5714
RS
208 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
209 stack_usage_map[i] = 1;
210 else
211 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
212}
213
51bbfa0c
RS
214/* Force FUNEXP into a form suitable for the address of a CALL,
215 and return that as an rtx. Also load the static chain register
216 if FNDECL is a nested function.
217
77cac2f2
RK
218 CALL_FUSAGE points to a variable holding the prospective
219 CALL_INSN_FUNCTION_USAGE information. */
51bbfa0c 220
03dacb02 221rtx
f2d3d07e 222prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
4c640e26 223 rtx *call_fusage, int reg_parm_seen, int flags)
51bbfa0c 224{
ba228239 225 /* Make a valid memory address and copy constants through pseudo-regs,
51bbfa0c
RS
226 but not for a constant address if -fno-function-cse. */
227 if (GET_CODE (funexp) != SYMBOL_REF)
4c640e26
EB
228 {
229 /* If it's an indirect call by descriptor, generate code to perform
230 runtime identification of the pointer and load the descriptor. */
231 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
232 {
233 const int bit_val = targetm.calls.custom_function_descriptors;
234 rtx call_lab = gen_label_rtx ();
235
236 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
237 fndecl_or_type
238 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
239 fndecl_or_type);
240 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
241 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
242
84355514
AS
243 if (GET_MODE (funexp) != Pmode)
244 funexp = convert_memory_address (Pmode, funexp);
245
4c640e26
EB
246 /* Avoid long live ranges around function calls. */
247 funexp = copy_to_mode_reg (Pmode, funexp);
248
249 if (REG_P (chain))
250 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
251
252 /* Emit the runtime identification pattern. */
253 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
254 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
255 call_lab);
256
257 /* Statically predict the branch to very likely taken. */
258 rtx_insn *insn = get_last_insn ();
259 if (JUMP_P (insn))
260 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
261
262 /* Load the descriptor. */
263 rtx mem = gen_rtx_MEM (ptr_mode,
264 plus_constant (Pmode, funexp, - bit_val));
265 MEM_NOTRAP_P (mem) = 1;
266 mem = convert_memory_address (Pmode, mem);
267 emit_move_insn (chain, mem);
268
269 mem = gen_rtx_MEM (ptr_mode,
270 plus_constant (Pmode, funexp,
271 POINTER_SIZE / BITS_PER_UNIT
272 - bit_val));
273 MEM_NOTRAP_P (mem) = 1;
274 mem = convert_memory_address (Pmode, mem);
275 emit_move_insn (funexp, mem);
276
277 emit_label (call_lab);
278
279 if (REG_P (chain))
280 {
281 use_reg (call_fusage, chain);
282 STATIC_CHAIN_REG_P (chain) = 1;
283 }
284
285 /* Make sure we're not going to be overwritten below. */
286 gcc_assert (!static_chain_value);
287 }
288
289 /* If we are using registers for parameters, force the
290 function address into a register now. */
291 funexp = ((reg_parm_seen
292 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
293 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
294 : memory_address (FUNCTION_MODE, funexp));
295 }
408702b4 296 else
51bbfa0c 297 {
408702b4
RL
298 /* funexp could be a SYMBOL_REF represents a function pointer which is
299 of ptr_mode. In this case, it should be converted into address mode
300 to be a valid address for memory rtx pattern. See PR 64971. */
301 if (GET_MODE (funexp) != Pmode)
302 funexp = convert_memory_address (Pmode, funexp);
303
4c640e26 304 if (!(flags & ECF_SIBCALL))
408702b4
RL
305 {
306 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
307 funexp = force_reg (Pmode, funexp);
308 }
51bbfa0c
RS
309 }
310
f2d3d07e
RH
311 if (static_chain_value != 0
312 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
313 || DECL_STATIC_CHAIN (fndecl_or_type)))
51bbfa0c 314 {
531ca746
RH
315 rtx chain;
316
f2d3d07e 317 chain = targetm.calls.static_chain (fndecl_or_type, false);
5e89a381 318 static_chain_value = convert_memory_address (Pmode, static_chain_value);
51bbfa0c 319
531ca746
RH
320 emit_move_insn (chain, static_chain_value);
321 if (REG_P (chain))
4c640e26
EB
322 {
323 use_reg (call_fusage, chain);
324 STATIC_CHAIN_REG_P (chain) = 1;
325 }
51bbfa0c
RS
326 }
327
328 return funexp;
329}
330
331/* Generate instructions to call function FUNEXP,
332 and optionally pop the results.
333 The CALL_INSN is the first insn generated.
334
607ea900 335 FNDECL is the declaration node of the function. This is given to the
079e7538
NF
336 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
337 its own args.
2c8da025 338
079e7538
NF
339 FUNTYPE is the data type of the function. This is given to the hook
340 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
341 own args. We used to allow an identifier for library functions, but
342 that doesn't work when the return type is an aggregate type and the
343 calling convention says that the pointer to this aggregate is to be
344 popped by the callee.
51bbfa0c
RS
345
346 STACK_SIZE is the number of bytes of arguments on the stack,
c2732da3
JM
347 ROUNDED_STACK_SIZE is that number rounded up to
348 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
349 both to put into the call insn and to generate explicit popping
350 code if necessary.
51bbfa0c
RS
351
352 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
353 It is zero if this call doesn't want a structure value.
354
355 NEXT_ARG_REG is the rtx that results from executing
6783fdb7
RS
356 targetm.calls.function_arg (&args_so_far,
357 function_arg_info::end_marker ());
51bbfa0c
RS
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
363
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
366
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
370
94b25f81 371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
6d2f8887 372 denote registers used by the called function. */
f725a3ec 373
322e3e34 374static void
28ed065e 375emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
6de9cd9a 376 tree funtype ATTRIBUTE_UNUSED,
a20c5714
RS
377 poly_int64 stack_size ATTRIBUTE_UNUSED,
378 poly_int64 rounded_stack_size,
5c8e61cf 379 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
d329e058
AJ
380 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
381 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
d5cc9181 382 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
51bbfa0c 383{
a20c5714 384 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
58d745ec 385 rtx call, funmem, pat;
4c7d264e 386 bool already_popped = false;
a20c5714 387 poly_int64 n_popped = 0;
a00fe3b7
RS
388
389 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
390 patterns exist). Any popping that the callee does on return will
391 be from our caller's frame rather than ours. */
392 if (!(ecf_flags & ECF_SIBCALL))
393 {
394 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
51bbfa0c 395
fa5322fa 396#ifdef CALL_POPS_ARGS
a00fe3b7 397 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
fa5322fa 398#endif
a00fe3b7 399 }
d329e058 400
51bbfa0c
RS
401 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
402 and we don't want to load it into a register as an optimization,
403 because prepare_call_address already did it if it should be done. */
404 if (GET_CODE (funexp) != SYMBOL_REF)
405 funexp = memory_address (FUNCTION_MODE, funexp);
406
325f5379
JJ
407 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
408 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
047d33a0
AO
409 {
410 tree t = fndecl;
e79983f4 411
047d33a0
AO
412 /* Although a built-in FUNCTION_DECL and its non-__builtin
413 counterpart compare equal and get a shared mem_attrs, they
414 produce different dump output in compare-debug compilations,
415 if an entry gets garbage collected in one compilation, then
416 adds a different (but equivalent) entry, while the other
417 doesn't run the garbage collector at the same spot and then
418 shares the mem_attr with the equivalent entry. */
e79983f4
MM
419 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
420 {
421 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
422 if (t2)
423 t = t2;
424 }
425
426 set_mem_expr (funmem, t);
047d33a0 427 }
325f5379 428 else if (fntree)
e19f6650 429 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
325f5379 430
58d745ec 431 if (ecf_flags & ECF_SIBCALL)
0a1c58a2 432 {
0a1c58a2 433 if (valreg)
58d745ec
RS
434 pat = targetm.gen_sibcall_value (valreg, funmem,
435 rounded_stack_size_rtx,
436 next_arg_reg, NULL_RTX);
0a1c58a2 437 else
58d745ec 438 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
5c8e61cf
RS
439 next_arg_reg,
440 gen_int_mode (struct_value_size, Pmode));
0a1c58a2 441 }
8ac61af7
RK
442 /* If the target has "call" or "call_value" insns, then prefer them
443 if no arguments are actually popped. If the target does not have
444 "call" or "call_value" insns, then we must use the popping versions
445 even if the call has no arguments to pop. */
a20c5714 446 else if (maybe_ne (n_popped, 0)
58d745ec
RS
447 || !(valreg
448 ? targetm.have_call_value ()
449 : targetm.have_call ()))
51bbfa0c 450 {
a20c5714 451 rtx n_pop = gen_int_mode (n_popped, Pmode);
51bbfa0c
RS
452
453 /* If this subroutine pops its own args, record that in the call insn
454 if possible, for the sake of frame pointer elimination. */
2c8da025 455
51bbfa0c 456 if (valreg)
58d745ec
RS
457 pat = targetm.gen_call_value_pop (valreg, funmem,
458 rounded_stack_size_rtx,
459 next_arg_reg, n_pop);
51bbfa0c 460 else
58d745ec
RS
461 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
462 next_arg_reg, n_pop);
51bbfa0c 463
4c7d264e 464 already_popped = true;
51bbfa0c
RS
465 }
466 else
0a1c58a2
JL
467 {
468 if (valreg)
58d745ec
RS
469 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
470 next_arg_reg, NULL_RTX);
0a1c58a2 471 else
58d745ec 472 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
5c8e61cf 473 gen_int_mode (struct_value_size, Pmode));
0a1c58a2 474 }
58d745ec 475 emit_insn (pat);
51bbfa0c 476
ee960939 477 /* Find the call we just emitted. */
e67d1102 478 rtx_call_insn *call_insn = last_call_insn ();
51bbfa0c 479
325f5379
JJ
480 /* Some target create a fresh MEM instead of reusing the one provided
481 above. Set its MEM_EXPR. */
da4fdf2d
SB
482 call = get_call_rtx_from (call_insn);
483 if (call
325f5379
JJ
484 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
485 && MEM_EXPR (funmem) != NULL_TREE)
486 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
487
ee960939
OH
488 /* Put the register usage information there. */
489 add_function_usage_to (call_insn, call_fusage);
51bbfa0c
RS
490
491 /* If this is a const call, then set the insn's unchanging bit. */
becfd6e5
KZ
492 if (ecf_flags & ECF_CONST)
493 RTL_CONST_CALL_P (call_insn) = 1;
494
495 /* If this is a pure call, then set the insn's unchanging bit. */
496 if (ecf_flags & ECF_PURE)
497 RTL_PURE_CALL_P (call_insn) = 1;
498
499 /* If this is a const call, then set the insn's unchanging bit. */
500 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
501 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
51bbfa0c 502
1d65f45c
RH
503 /* Create a nothrow REG_EH_REGION note, if needed. */
504 make_reg_eh_region_note (call_insn, ecf_flags, 0);
12a22e76 505
ca3920ad 506 if (ecf_flags & ECF_NORETURN)
65c5f2a6 507 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
ca3920ad 508
19ec4092
RB
509 if (ecf_flags & ECF_RETURNS_TWICE)
510 {
511 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
512 cfun->calls_setjmp = 1;
513 }
570a98eb 514
0a1c58a2
JL
515 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
516
b1e64e0d
RS
517 /* Restore this now, so that we do defer pops for this call's args
518 if the context of the call as a whole permits. */
519 inhibit_defer_pop = old_inhibit_defer_pop;
520
a20c5714 521 if (maybe_ne (n_popped, 0))
51bbfa0c
RS
522 {
523 if (!already_popped)
e3da301d 524 CALL_INSN_FUNCTION_USAGE (call_insn)
38a448ca
RH
525 = gen_rtx_EXPR_LIST (VOIDmode,
526 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
527 CALL_INSN_FUNCTION_USAGE (call_insn));
fb5eebb9 528 rounded_stack_size -= n_popped;
a20c5714 529 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
1503a7ec 530 stack_pointer_delta -= n_popped;
2e3f842f 531
68184180 532 add_args_size_note (call_insn, stack_pointer_delta);
9a08d230 533
2e3f842f
L
534 /* If popup is needed, stack realign must use DRAP */
535 if (SUPPORTS_STACK_ALIGNMENT)
536 crtl->need_drap = true;
51bbfa0c 537 }
f8f75b16
JJ
538 /* For noreturn calls when not accumulating outgoing args force
539 REG_ARGS_SIZE note to prevent crossjumping of calls with different
540 args sizes. */
541 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
68184180 542 add_args_size_note (call_insn, stack_pointer_delta);
51bbfa0c 543
f73ad30e 544 if (!ACCUMULATE_OUTGOING_ARGS)
51bbfa0c 545 {
f73ad30e
JH
546 /* If returning from the subroutine does not automatically pop the args,
547 we need an instruction to pop them sooner or later.
548 Perhaps do it now; perhaps just record how much space to pop later.
549
550 If returning from the subroutine does pop the args, indicate that the
551 stack pointer will be changed. */
552
a20c5714 553 if (maybe_ne (rounded_stack_size, 0))
f73ad30e 554 {
9dd9bf80 555 if (ecf_flags & ECF_NORETURN)
f79a65c0
RK
556 /* Just pretend we did the pop. */
557 stack_pointer_delta -= rounded_stack_size;
558 else if (flag_defer_pop && inhibit_defer_pop == 0
7393c642 559 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
f73ad30e
JH
560 pending_stack_adjust += rounded_stack_size;
561 else
562 adjust_stack (rounded_stack_size_rtx);
563 }
51bbfa0c 564 }
f73ad30e
JH
565 /* When we accumulate outgoing args, we must avoid any stack manipulations.
566 Restore the stack pointer to its original value now. Usually
567 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
568 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
569 popping variants of functions exist as well.
570
571 ??? We may optimize similar to defer_pop above, but it is
572 probably not worthwhile.
f725a3ec 573
f73ad30e
JH
574 ??? It will be worthwhile to enable combine_stack_adjustments even for
575 such machines. */
a20c5714
RS
576 else if (maybe_ne (n_popped, 0))
577 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
51bbfa0c
RS
578}
579
25f0609b
BE
580/* Determine if the function identified by FNDECL is one with
581 special properties we wish to know about. Modify FLAGS accordingly.
20efdf74
JL
582
583 For example, if the function might return more than one time (setjmp), then
25f0609b 584 set ECF_RETURNS_TWICE.
20efdf74 585
25f0609b 586 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
20efdf74
JL
587 space from the stack such as alloca. */
588
f2d33f13 589static int
6ea2b70d 590special_function_p (const_tree fndecl, int flags)
20efdf74 591{
d5e254e1
IE
592 tree name_decl = DECL_NAME (fndecl);
593
182ce042
DM
594 if (maybe_special_function_p (fndecl)
595 && IDENTIFIER_LENGTH (name_decl) <= 11)
20efdf74 596 {
d5e254e1 597 const char *name = IDENTIFIER_POINTER (name_decl);
63ad61ed 598 const char *tname = name;
20efdf74 599
ca54603f
JL
600 /* We assume that alloca will always be called by name. It
601 makes no sense to pass it as a pointer-to-function to
602 anything that does not understand its behavior. */
4e722cf1
JJ
603 if (IDENTIFIER_LENGTH (name_decl) == 6
604 && name[0] == 'a'
605 && ! strcmp (name, "alloca"))
f2d33f13 606 flags |= ECF_MAY_BE_ALLOCA;
ca54603f 607
25f0609b 608 /* Disregard prefix _ or __. */
20efdf74
JL
609 if (name[0] == '_')
610 {
25f0609b 611 if (name[1] == '_')
20efdf74
JL
612 tname += 2;
613 else
614 tname += 1;
615 }
616
25f0609b
BE
617 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
618 if (! strcmp (tname, "setjmp")
619 || ! strcmp (tname, "sigsetjmp")
620 || ! strcmp (name, "savectx")
621 || ! strcmp (name, "vfork")
622 || ! strcmp (name, "getcontext"))
623 flags |= ECF_RETURNS_TWICE;
20efdf74 624 }
d1c38823 625
9e878cf1
EB
626 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
627 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
628 flags |= ECF_MAY_BE_ALLOCA;
4e722cf1 629
f2d33f13 630 return flags;
20efdf74
JL
631}
632
4f8cfb42
JH
633/* Return fnspec for DECL. */
634
635static attr_fnspec
636decl_fnspec (tree fndecl)
637{
638 tree attr;
639 tree type = TREE_TYPE (fndecl);
640 if (type)
641 {
642 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
643 if (attr)
644 {
645 return TREE_VALUE (TREE_VALUE (attr));
646 }
647 }
648 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
649 return builtin_fnspec (fndecl);
650 return "";
651}
652
e384e6b5
BS
653/* Similar to special_function_p; return a set of ERF_ flags for the
654 function FNDECL. */
655static int
656decl_return_flags (tree fndecl)
657{
4f8cfb42 658 attr_fnspec fnspec = decl_fnspec (fndecl);
e384e6b5 659
05d39f0d
JH
660 unsigned int arg;
661 if (fnspec.returns_arg (&arg))
662 return ERF_RETURNS_ARG | arg;
663
664 if (fnspec.returns_noalias_p ())
665 return ERF_NOALIAS;
666 return 0;
e384e6b5
BS
667}
668
4c7d264e 669/* Return true when FNDECL represents a call to setjmp. */
7393c642 670
4c7d264e 671bool
6ea2b70d 672setjmp_call_p (const_tree fndecl)
f2d33f13 673{
275311c4 674 if (DECL_IS_RETURNS_TWICE (fndecl))
4c7d264e
UB
675 return true;
676 if (special_function_p (fndecl, 0) & ECF_RETURNS_TWICE)
677 return true;
678
679 return false;
f2d33f13
JH
680}
681
726a989a 682
159e8ef0 683/* Return true if STMT may be an alloca call. */
726a989a
RB
684
685bool
159e8ef0 686gimple_maybe_alloca_call_p (const gimple *stmt)
726a989a
RB
687{
688 tree fndecl;
689
690 if (!is_gimple_call (stmt))
691 return false;
692
693 fndecl = gimple_call_fndecl (stmt);
694 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
695 return true;
696
697 return false;
698}
699
159e8ef0
BE
700/* Return true if STMT is a builtin alloca call. */
701
702bool
703gimple_alloca_call_p (const gimple *stmt)
704{
705 tree fndecl;
706
707 if (!is_gimple_call (stmt))
708 return false;
709
710 fndecl = gimple_call_fndecl (stmt);
3d78e008 711 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
159e8ef0
BE
712 switch (DECL_FUNCTION_CODE (fndecl))
713 {
9e878cf1 714 CASE_BUILT_IN_ALLOCA:
eacac712 715 return gimple_call_num_args (stmt) > 0;
159e8ef0
BE
716 default:
717 break;
718 }
719
720 return false;
721}
722
723/* Return true when exp contains a builtin alloca call. */
726a989a 724
c986baf6 725bool
6ea2b70d 726alloca_call_p (const_tree exp)
c986baf6 727{
2284b034 728 tree fndecl;
c986baf6 729 if (TREE_CODE (exp) == CALL_EXPR
2284b034 730 && (fndecl = get_callee_fndecl (exp))
159e8ef0
BE
731 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
732 switch (DECL_FUNCTION_CODE (fndecl))
733 {
9e878cf1 734 CASE_BUILT_IN_ALLOCA:
159e8ef0
BE
735 return true;
736 default:
737 break;
738 }
739
c986baf6
JH
740 return false;
741}
742
0a35513e
AH
743/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
744 function. Return FALSE otherwise. */
745
746static bool
747is_tm_builtin (const_tree fndecl)
748{
749 if (fndecl == NULL)
750 return false;
751
752 if (decl_is_tm_clone (fndecl))
753 return true;
754
755 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
756 {
757 switch (DECL_FUNCTION_CODE (fndecl))
758 {
759 case BUILT_IN_TM_COMMIT:
760 case BUILT_IN_TM_COMMIT_EH:
761 case BUILT_IN_TM_ABORT:
762 case BUILT_IN_TM_IRREVOCABLE:
763 case BUILT_IN_TM_GETTMCLONE_IRR:
764 case BUILT_IN_TM_MEMCPY:
765 case BUILT_IN_TM_MEMMOVE:
766 case BUILT_IN_TM_MEMSET:
767 CASE_BUILT_IN_TM_STORE (1):
768 CASE_BUILT_IN_TM_STORE (2):
769 CASE_BUILT_IN_TM_STORE (4):
770 CASE_BUILT_IN_TM_STORE (8):
771 CASE_BUILT_IN_TM_STORE (FLOAT):
772 CASE_BUILT_IN_TM_STORE (DOUBLE):
773 CASE_BUILT_IN_TM_STORE (LDOUBLE):
774 CASE_BUILT_IN_TM_STORE (M64):
775 CASE_BUILT_IN_TM_STORE (M128):
776 CASE_BUILT_IN_TM_STORE (M256):
777 CASE_BUILT_IN_TM_LOAD (1):
778 CASE_BUILT_IN_TM_LOAD (2):
779 CASE_BUILT_IN_TM_LOAD (4):
780 CASE_BUILT_IN_TM_LOAD (8):
781 CASE_BUILT_IN_TM_LOAD (FLOAT):
782 CASE_BUILT_IN_TM_LOAD (DOUBLE):
783 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
784 CASE_BUILT_IN_TM_LOAD (M64):
785 CASE_BUILT_IN_TM_LOAD (M128):
786 CASE_BUILT_IN_TM_LOAD (M256):
787 case BUILT_IN_TM_LOG:
788 case BUILT_IN_TM_LOG_1:
789 case BUILT_IN_TM_LOG_2:
790 case BUILT_IN_TM_LOG_4:
791 case BUILT_IN_TM_LOG_8:
792 case BUILT_IN_TM_LOG_FLOAT:
793 case BUILT_IN_TM_LOG_DOUBLE:
794 case BUILT_IN_TM_LOG_LDOUBLE:
795 case BUILT_IN_TM_LOG_M64:
796 case BUILT_IN_TM_LOG_M128:
797 case BUILT_IN_TM_LOG_M256:
798 return true;
799 default:
800 break;
801 }
802 }
803 return false;
804}
805
b5cd4ed4 806/* Detect flags (function attributes) from the function decl or type node. */
7393c642 807
4977bab6 808int
6ea2b70d 809flags_from_decl_or_type (const_tree exp)
f2d33f13
JH
810{
811 int flags = 0;
36dbb93d 812
f2d33f13
JH
813 if (DECL_P (exp))
814 {
815 /* The function exp may have the `malloc' attribute. */
36dbb93d 816 if (DECL_IS_MALLOC (exp))
f2d33f13
JH
817 flags |= ECF_MALLOC;
818
6e9a3221
AN
819 /* The function exp may have the `returns_twice' attribute. */
820 if (DECL_IS_RETURNS_TWICE (exp))
821 flags |= ECF_RETURNS_TWICE;
822
becfd6e5 823 /* Process the pure and const attributes. */
9e3920e9 824 if (TREE_READONLY (exp))
becfd6e5
KZ
825 flags |= ECF_CONST;
826 if (DECL_PURE_P (exp))
e238ccac 827 flags |= ECF_PURE;
becfd6e5
KZ
828 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
829 flags |= ECF_LOOPING_CONST_OR_PURE;
2a8f6b90 830
dcd6de6d
ZD
831 if (DECL_IS_NOVOPS (exp))
832 flags |= ECF_NOVOPS;
46a4da10
JH
833 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
834 flags |= ECF_LEAF;
cb59f689
JH
835 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
836 flags |= ECF_COLD;
dcd6de6d 837
f2d33f13
JH
838 if (TREE_NOTHROW (exp))
839 flags |= ECF_NOTHROW;
2b187c63 840
0a35513e
AH
841 if (flag_tm)
842 {
843 if (is_tm_builtin (exp))
844 flags |= ECF_TM_BUILTIN;
fe924d9f 845 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
0a35513e
AH
846 || lookup_attribute ("transaction_pure",
847 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
848 flags |= ECF_TM_PURE;
849 }
850
6de9cd9a 851 flags = special_function_p (exp, flags);
f2d33f13 852 }
0a35513e
AH
853 else if (TYPE_P (exp))
854 {
855 if (TYPE_READONLY (exp))
856 flags |= ECF_CONST;
857
858 if (flag_tm
859 && ((flags & ECF_CONST) != 0
860 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
861 flags |= ECF_TM_PURE;
862 }
17fc8d6f
AH
863 else
864 gcc_unreachable ();
f2d33f13
JH
865
866 if (TREE_THIS_VOLATILE (exp))
9e3920e9
JJ
867 {
868 flags |= ECF_NORETURN;
869 if (flags & (ECF_CONST|ECF_PURE))
870 flags |= ECF_LOOPING_CONST_OR_PURE;
871 }
f2d33f13
JH
872
873 return flags;
874}
875
f027e0a2
JM
876/* Detect flags from a CALL_EXPR. */
877
878int
fa233e34 879call_expr_flags (const_tree t)
f027e0a2
JM
880{
881 int flags;
882 tree decl = get_callee_fndecl (t);
883
884 if (decl)
885 flags = flags_from_decl_or_type (decl);
1691b2e1
TV
886 else if (CALL_EXPR_FN (t) == NULL_TREE)
887 flags = internal_fn_flags (CALL_EXPR_IFN (t));
f027e0a2
JM
888 else
889 {
4c640e26
EB
890 tree type = TREE_TYPE (CALL_EXPR_FN (t));
891 if (type && TREE_CODE (type) == POINTER_TYPE)
892 flags = flags_from_decl_or_type (TREE_TYPE (type));
f027e0a2
JM
893 else
894 flags = 0;
4c640e26
EB
895 if (CALL_EXPR_BY_DESCRIPTOR (t))
896 flags |= ECF_BY_DESCRIPTOR;
f027e0a2
JM
897 }
898
899 return flags;
900}
901
52090e4d 902/* Return true if ARG should be passed by invisible reference. */
16a16ec7
AM
903
904bool
52090e4d 905pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
16a16ec7 906{
52090e4d 907 if (tree type = arg.type)
16a16ec7
AM
908 {
909 /* If this type contains non-trivial constructors, then it is
910 forbidden for the middle-end to create any new copies. */
911 if (TREE_ADDRESSABLE (type))
912 return true;
913
914 /* GCC post 3.4 passes *all* variable sized types by reference. */
c600df9a 915 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
16a16ec7
AM
916 return true;
917
918 /* If a record type should be passed the same as its first (and only)
919 member, use the type and mode of that member. */
920 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
921 {
52090e4d
RS
922 arg.type = TREE_TYPE (first_field (type));
923 arg.mode = TYPE_MODE (arg.type);
16a16ec7
AM
924 }
925 }
926
52090e4d 927 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
16a16ec7
AM
928}
929
fde65a89
RS
930/* Return true if TYPE should be passed by reference when passed to
931 the "..." arguments of a function. */
932
933bool
934pass_va_arg_by_reference (tree type)
935{
52090e4d 936 return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
fde65a89
RS
937}
938
b12cdd6e
RS
939/* Decide whether ARG, which occurs in the state described by CA,
940 should be passed by reference. Return true if so and update
941 ARG accordingly. */
942
943bool
944apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
945{
946 if (pass_by_reference (ca, arg))
947 {
948 arg.type = build_pointer_type (arg.type);
949 arg.mode = TYPE_MODE (arg.type);
257caa55 950 arg.pass_by_reference = true;
b12cdd6e
RS
951 return true;
952 }
953 return false;
954}
955
7256c719 956/* Return true if ARG, which is passed by reference, should be callee
16a16ec7
AM
957 copied instead of caller copied. */
958
959bool
7256c719 960reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
16a16ec7 961{
7256c719 962 if (arg.type && TREE_ADDRESSABLE (arg.type))
16a16ec7 963 return false;
7256c719 964 return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
16a16ec7
AM
965}
966
967
20efdf74
JL
968/* Precompute all register parameters as described by ARGS, storing values
969 into fields within the ARGS array.
970
971 NUM_ACTUALS indicates the total number elements in the ARGS array.
972
973 Set REG_PARM_SEEN if we encounter a register parameter. */
974
975static void
27e29549
RH
976precompute_register_parameters (int num_actuals, struct arg_data *args,
977 int *reg_parm_seen)
20efdf74
JL
978{
979 int i;
980
981 *reg_parm_seen = 0;
982
983 for (i = 0; i < num_actuals; i++)
984 if (args[i].reg != 0 && ! args[i].pass_on_stack)
985 {
986 *reg_parm_seen = 1;
987
988 if (args[i].value == 0)
989 {
990 push_temp_slots ();
84217346 991 args[i].value = expand_normal (args[i].tree_value);
20efdf74
JL
992 preserve_temp_slots (args[i].value);
993 pop_temp_slots ();
20efdf74
JL
994 }
995
996 /* If we are to promote the function arg to a wider mode,
997 do it now. */
998
ac4c8f53
RS
999 machine_mode old_mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1000
1001 /* Some ABIs require scalar floating point modes to be returned
1002 in a wider scalar integer mode. We need to explicitly
1003 reinterpret to an integer mode of the correct precision
1004 before extending to the desired result. */
1005 if (SCALAR_INT_MODE_P (args[i].mode)
1006 && SCALAR_FLOAT_MODE_P (old_mode)
1007 && known_gt (GET_MODE_SIZE (args[i].mode),
1008 GET_MODE_SIZE (old_mode)))
1009 args[i].value = convert_float_to_wider_int (args[i].mode, old_mode,
1010 args[i].value);
1011 else if (args[i].mode != old_mode)
1012 args[i].value = convert_modes (args[i].mode, old_mode,
1013 args[i].value, args[i].unsignedp);
20efdf74 1014
a7adbbcb
L
1015 /* If the value is a non-legitimate constant, force it into a
1016 pseudo now. TLS symbols sometimes need a call to resolve. */
1017 if (CONSTANT_P (args[i].value)
a21b3997
DE
1018 && (!targetm.legitimate_constant_p (args[i].mode, args[i].value)
1019 || targetm.precompute_tls_p (args[i].mode, args[i].value)))
a7adbbcb
L
1020 args[i].value = force_reg (args[i].mode, args[i].value);
1021
27e29549
RH
1022 /* If we're going to have to load the value by parts, pull the
1023 parts into pseudos. The part extraction process can involve
1024 non-trivial computation. */
1025 if (GET_CODE (args[i].reg) == PARALLEL)
1026 {
1027 tree type = TREE_TYPE (args[i].tree_value);
8df3dbb7 1028 args[i].parallel_value
27e29549
RH
1029 = emit_group_load_into_temps (args[i].reg, args[i].value,
1030 type, int_size_in_bytes (type));
1031 }
1032
f725a3ec 1033 /* If the value is expensive, and we are inside an appropriately
20efdf74
JL
1034 short loop, put the value into a pseudo and then put the pseudo
1035 into the hard reg.
1036
1037 For small register classes, also do this if this call uses
1038 register parameters. This is to avoid reload conflicts while
1039 loading the parameters registers. */
1040
27e29549
RH
1041 else if ((! (REG_P (args[i].value)
1042 || (GET_CODE (args[i].value) == SUBREG
1043 && REG_P (SUBREG_REG (args[i].value)))))
1044 && args[i].mode != BLKmode
e548c9df
AM
1045 && (set_src_cost (args[i].value, args[i].mode,
1046 optimize_insn_for_speed_p ())
1047 > COSTS_N_INSNS (1))
42db504c
SB
1048 && ((*reg_parm_seen
1049 && targetm.small_register_classes_for_mode_p (args[i].mode))
27e29549 1050 || optimize))
20efdf74
JL
1051 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1052 }
1053}
1054
f73ad30e 1055#ifdef REG_PARM_STACK_SPACE
20efdf74
JL
1056
1057 /* The argument list is the property of the called routine and it
1058 may clobber it. If the fixed area has been used for previous
1059 parameters, we must save and restore it. */
3bdf5ad1 1060
20efdf74 1061static rtx
d329e058 1062save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
20efdf74 1063{
a20c5714
RS
1064 unsigned int low;
1065 unsigned int high;
20efdf74 1066
b820d2b8
AM
1067 /* Compute the boundary of the area that needs to be saved, if any. */
1068 high = reg_parm_stack_space;
6dad9361
TS
1069 if (ARGS_GROW_DOWNWARD)
1070 high += 1;
1071
b820d2b8
AM
1072 if (high > highest_outgoing_arg_in_use)
1073 high = highest_outgoing_arg_in_use;
20efdf74 1074
b820d2b8 1075 for (low = 0; low < high; low++)
a20c5714 1076 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
b820d2b8
AM
1077 {
1078 int num_to_save;
ef4bddc2 1079 machine_mode save_mode;
b820d2b8 1080 int delta;
0a81f074 1081 rtx addr;
b820d2b8
AM
1082 rtx stack_area;
1083 rtx save_area;
20efdf74 1084
b820d2b8
AM
1085 while (stack_usage_map[--high] == 0)
1086 ;
20efdf74 1087
b820d2b8
AM
1088 *low_to_save = low;
1089 *high_to_save = high;
1090
1091 num_to_save = high - low + 1;
20efdf74 1092
b820d2b8
AM
1093 /* If we don't have the required alignment, must do this
1094 in BLKmode. */
fffbab82
RS
1095 scalar_int_mode imode;
1096 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1097 && (low & (MIN (GET_MODE_SIZE (imode),
1098 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1099 save_mode = imode;
1100 else
b820d2b8 1101 save_mode = BLKmode;
20efdf74 1102
6dad9361
TS
1103 if (ARGS_GROW_DOWNWARD)
1104 delta = -high;
1105 else
1106 delta = low;
1107
0a81f074
RS
1108 addr = plus_constant (Pmode, argblock, delta);
1109 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
8ac61af7 1110
b820d2b8
AM
1111 set_mem_align (stack_area, PARM_BOUNDARY);
1112 if (save_mode == BLKmode)
1113 {
9474e8ab 1114 save_area = assign_stack_temp (BLKmode, num_to_save);
b820d2b8
AM
1115 emit_block_move (validize_mem (save_area), stack_area,
1116 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1117 }
1118 else
1119 {
1120 save_area = gen_reg_rtx (save_mode);
1121 emit_move_insn (save_area, stack_area);
1122 }
8ac61af7 1123
b820d2b8
AM
1124 return save_area;
1125 }
1126
1127 return NULL_RTX;
20efdf74
JL
1128}
1129
1130static void
d329e058 1131restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
20efdf74 1132{
ef4bddc2 1133 machine_mode save_mode = GET_MODE (save_area);
b820d2b8 1134 int delta;
0a81f074 1135 rtx addr, stack_area;
b820d2b8 1136
6dad9361
TS
1137 if (ARGS_GROW_DOWNWARD)
1138 delta = -high_to_save;
1139 else
1140 delta = low_to_save;
1141
0a81f074
RS
1142 addr = plus_constant (Pmode, argblock, delta);
1143 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
b820d2b8 1144 set_mem_align (stack_area, PARM_BOUNDARY);
20efdf74
JL
1145
1146 if (save_mode != BLKmode)
1147 emit_move_insn (stack_area, save_area);
1148 else
44bb111a
RH
1149 emit_block_move (stack_area, validize_mem (save_area),
1150 GEN_INT (high_to_save - low_to_save + 1),
1151 BLOCK_OP_CALL_PARM);
20efdf74 1152}
19652adf 1153#endif /* REG_PARM_STACK_SPACE */
f725a3ec 1154
20efdf74
JL
1155/* If any elements in ARGS refer to parameters that are to be passed in
1156 registers, but not in memory, and whose alignment does not permit a
1157 direct copy into registers. Copy the values into a group of pseudos
f725a3ec 1158 which we will later copy into the appropriate hard registers.
8e6a59fe
MM
1159
1160 Pseudos for each unaligned argument will be stored into the array
1161 args[argnum].aligned_regs. The caller is responsible for deallocating
1162 the aligned_regs array if it is nonzero. */
1163
20efdf74 1164static void
d329e058 1165store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
20efdf74
JL
1166{
1167 int i, j;
f725a3ec 1168
20efdf74
JL
1169 for (i = 0; i < num_actuals; i++)
1170 if (args[i].reg != 0 && ! args[i].pass_on_stack
a7973050 1171 && GET_CODE (args[i].reg) != PARALLEL
20efdf74 1172 && args[i].mode == BLKmode
852d22b4
EB
1173 && MEM_P (args[i].value)
1174 && (MEM_ALIGN (args[i].value)
20efdf74
JL
1175 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1176 {
1177 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
6e985040 1178 int endian_correction = 0;
20efdf74 1179
78a52f11
RH
1180 if (args[i].partial)
1181 {
1182 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1183 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1184 }
1185 else
1186 {
1187 args[i].n_aligned_regs
1188 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1189 }
1190
5ed6ace5 1191 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
20efdf74 1192
6e985040
AM
1193 /* Structures smaller than a word are normally aligned to the
1194 least significant byte. On a BYTES_BIG_ENDIAN machine,
20efdf74
JL
1195 this means we must skip the empty high order bytes when
1196 calculating the bit offset. */
6e985040
AM
1197 if (bytes < UNITS_PER_WORD
1198#ifdef BLOCK_REG_PADDING
1199 && (BLOCK_REG_PADDING (args[i].mode,
1200 TREE_TYPE (args[i].tree_value), 1)
76b0cbf8 1201 == PAD_DOWNWARD)
6e985040
AM
1202#else
1203 && BYTES_BIG_ENDIAN
1204#endif
1205 )
1206 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
20efdf74
JL
1207
1208 for (j = 0; j < args[i].n_aligned_regs; j++)
1209 {
1210 rtx reg = gen_reg_rtx (word_mode);
1211 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1212 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
20efdf74
JL
1213
1214 args[i].aligned_regs[j] = reg;
c6285bd7 1215 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
f96bf49a 1216 word_mode, word_mode, false, NULL);
20efdf74
JL
1217
1218 /* There is no need to restrict this code to loading items
1219 in TYPE_ALIGN sized hunks. The bitfield instructions can
1220 load up entire word sized registers efficiently.
1221
1222 ??? This may not be needed anymore.
1223 We use to emit a clobber here but that doesn't let later
1224 passes optimize the instructions we emit. By storing 0 into
1225 the register later passes know the first AND to zero out the
1226 bitfield being set in the register is unnecessary. The store
1227 of 0 will be deleted as will at least the first AND. */
1228
1229 emit_move_insn (reg, const0_rtx);
1230
1231 bytes -= bitsize / BITS_PER_UNIT;
1169e45d 1232 store_bit_field (reg, bitsize, endian_correction, 0, 0,
13f44099 1233 word_mode, word, false, false);
20efdf74
JL
1234 }
1235 }
1236}
1237
9a385c2d
DM
1238/* Issue an error if CALL_EXPR was flagged as requiring
1239 tall-call optimization. */
1240
18963d3b 1241void
9a385c2d
DM
1242maybe_complain_about_tail_call (tree call_expr, const char *reason)
1243{
1244 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1245 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1246 return;
1247
1248 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1249}
1250
d7cdf113 1251/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
b8698a0f 1252 CALL_EXPR EXP.
d7cdf113
JL
1253
1254 NUM_ACTUALS is the total number of parameters.
1255
1256 N_NAMED_ARGS is the total number of named arguments.
1257
078a18a4
SL
1258 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1259 value, or null.
1260
d7cdf113
JL
1261 FNDECL is the tree code for the target of this call (if known)
1262
1263 ARGS_SO_FAR holds state needed by the target to know where to place
1264 the next argument.
1265
1266 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1267 for arguments which are passed in registers.
1268
1269 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1270 and may be modified by this routine.
1271
4c7d264e
UB
1272 OLD_PENDING_ADJ and FLAGS are pointers to integer flags which
1273 may be modified by this routine.
1274
1275 MUST_PREALLOCATE is a pointer to bool which may be
1276 modified by this routine.
dd292d0a 1277
6de9cd9a
DN
1278 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1279 that requires allocation of stack space.
1280
dd292d0a
MM
1281 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1282 the thunked-to function. */
d7cdf113
JL
1283
1284static void
d329e058
AJ
1285initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1286 struct arg_data *args,
1287 struct args_size *args_size,
1288 int n_named_args ATTRIBUTE_UNUSED,
078a18a4 1289 tree exp, tree struct_value_addr_value,
45769134 1290 tree fndecl, tree fntype,
d5cc9181 1291 cumulative_args_t args_so_far,
d329e058 1292 int reg_parm_stack_space,
a20c5714
RS
1293 rtx *old_stack_level,
1294 poly_int64_pod *old_pending_adj,
4c7d264e 1295 bool *must_preallocate, int *ecf_flags,
6de9cd9a 1296 bool *may_tailcall, bool call_from_thunk_p)
d7cdf113 1297{
d5cc9181 1298 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
db3927fb 1299 location_t loc = EXPR_LOCATION (exp);
d7cdf113
JL
1300
1301 /* Count arg position in order args appear. */
1302 int argpos;
1303
1304 int i;
f725a3ec 1305
d7cdf113
JL
1306 args_size->constant = 0;
1307 args_size->var = 0;
1308
1309 /* In this loop, we consider args in the order they are written.
3d9684ae 1310 We fill up ARGS from the back. */
d7cdf113 1311
3d9684ae 1312 i = num_actuals - 1;
078a18a4 1313 {
31db0fe0 1314 int j = i;
078a18a4
SL
1315 call_expr_arg_iterator iter;
1316 tree arg;
1317
1318 if (struct_value_addr_value)
1319 {
1320 args[j].tree_value = struct_value_addr_value;
3d9684ae 1321 j--;
078a18a4 1322 }
afc610db 1323 argpos = 0;
078a18a4
SL
1324 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1325 {
1326 tree argtype = TREE_TYPE (arg);
d5e254e1 1327
078a18a4
SL
1328 if (targetm.calls.split_complex_arg
1329 && argtype
1330 && TREE_CODE (argtype) == COMPLEX_TYPE
1331 && targetm.calls.split_complex_arg (argtype))
1332 {
1333 tree subtype = TREE_TYPE (argtype);
078a18a4 1334 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
3d9684ae 1335 j--;
078a18a4
SL
1336 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1337 }
1338 else
1339 args[j].tree_value = arg;
3d9684ae 1340 j--;
afc610db 1341 argpos++;
078a18a4
SL
1342 }
1343 }
1344
d7cdf113 1345 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
3d9684ae 1346 for (argpos = 0; argpos < num_actuals; i--, argpos++)
d7cdf113 1347 {
078a18a4 1348 tree type = TREE_TYPE (args[i].tree_value);
d7cdf113 1349 int unsignedp;
d7cdf113 1350
d7cdf113 1351 /* Replace erroneous argument with constant zero. */
d0f062fb 1352 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
d7cdf113
JL
1353 args[i].tree_value = integer_zero_node, type = integer_type_node;
1354
ebf0bf7f
JJ
1355 /* If TYPE is a transparent union or record, pass things the way
1356 we would pass the first field of the union or record. We have
1357 already verified that the modes are the same. */
920ea3b8 1358 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
ebf0bf7f 1359 type = TREE_TYPE (first_field (type));
d7cdf113
JL
1360
1361 /* Decide where to pass this arg.
1362
1363 args[i].reg is nonzero if all or part is passed in registers.
1364
1365 args[i].partial is nonzero if part but not all is passed in registers,
78a52f11 1366 and the exact value says how many bytes are passed in registers.
d7cdf113 1367
4c7d264e 1368 args[i].pass_on_stack is true if the argument must at least be
d7cdf113
JL
1369 computed on the stack. It may then be loaded back into registers
1370 if args[i].reg is nonzero.
1371
1372 These decisions are driven by the FUNCTION_... macros and must agree
e53b6e56 1373 with those made by function.cc. */
d7cdf113
JL
1374
1375 /* See if this argument should be passed by invisible reference. */
cf0d189e
RS
1376 function_arg_info arg (type, argpos < n_named_args);
1377 if (pass_by_reference (args_so_far_pnt, arg))
d7cdf113 1378 {
defafb78
EB
1379 const bool callee_copies
1380 = reference_callee_copied (args_so_far_pnt, arg);
1381 tree base;
1382
1383 /* If we're compiling a thunk, pass directly the address of an object
1384 already in memory, instead of making a copy. Likewise if we want
1385 to make the copy in the callee instead of the caller. */
1386 if ((call_from_thunk_p || callee_copies)
316bdb2e
RB
1387 && TREE_CODE (args[i].tree_value) != WITH_SIZE_EXPR
1388 && ((base = get_base_address (args[i].tree_value)), true)
defafb78
EB
1389 && TREE_CODE (base) != SSA_NAME
1390 && (!DECL_P (base) || MEM_P (DECL_RTL (base))))
d7cdf113 1391 {
006e317a
JH
1392 /* We may have turned the parameter value into an SSA name.
1393 Go back to the original parameter so we can take the
1394 address. */
1395 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1396 {
1397 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1398 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1399 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1400 }
fe8dd12e
JH
1401 /* Argument setup code may have copied the value to register. We
1402 revert that optimization now because the tail call code must
1403 use the original location. */
1404 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1405 && !MEM_P (DECL_RTL (args[i].tree_value))
1406 && DECL_INCOMING_RTL (args[i].tree_value)
1407 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1408 set_decl_rtl (args[i].tree_value,
1409 DECL_INCOMING_RTL (args[i].tree_value));
1410
c4b9a87e
ER
1411 mark_addressable (args[i].tree_value);
1412
9969aaf6
RH
1413 /* We can't use sibcalls if a callee-copied argument is
1414 stored in the current function's frame. */
1415 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
9a385c2d
DM
1416 {
1417 *may_tailcall = false;
1418 maybe_complain_about_tail_call (exp,
1419 "a callee-copied argument is"
cefc0906 1420 " stored in the current"
9a385c2d
DM
1421 " function's frame");
1422 }
9fd47435 1423
db3927fb
AH
1424 args[i].tree_value = build_fold_addr_expr_loc (loc,
1425 args[i].tree_value);
9969aaf6
RH
1426 type = TREE_TYPE (args[i].tree_value);
1427
becfd6e5
KZ
1428 if (*ecf_flags & ECF_CONST)
1429 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
f21add07 1430 }
d7cdf113
JL
1431 else
1432 {
1433 /* We make a copy of the object and pass the address to the
1434 function being called. */
1435 rtx copy;
1436
d0f062fb 1437 if (!COMPLETE_TYPE_P (type)
b38f3813
EB
1438 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1439 || (flag_stack_check == GENERIC_STACK_CHECK
1440 && compare_tree_int (TYPE_SIZE_UNIT (type),
1441 STACK_CHECK_MAX_VAR_SIZE) > 0))
d7cdf113
JL
1442 {
1443 /* This is a variable-sized object. Make space on the stack
1444 for it. */
078a18a4 1445 rtx size_rtx = expr_size (args[i].tree_value);
d7cdf113
JL
1446
1447 if (*old_stack_level == 0)
1448 {
9eac0f2a 1449 emit_stack_save (SAVE_BLOCK, old_stack_level);
d7cdf113
JL
1450 *old_pending_adj = pending_stack_adjust;
1451 pending_stack_adjust = 0;
1452 }
1453
d3c12306
EB
1454 /* We can pass TRUE as the 4th argument because we just
1455 saved the stack pointer and will restore it right after
1456 the call. */
3a42502d
RH
1457 copy = allocate_dynamic_stack_space (size_rtx,
1458 TYPE_ALIGN (type),
1459 TYPE_ALIGN (type),
9e878cf1
EB
1460 max_int_size_in_bytes
1461 (type),
3a42502d
RH
1462 true);
1463 copy = gen_rtx_MEM (BLKmode, copy);
3bdf5ad1 1464 set_mem_attributes (copy, type, 1);
d7cdf113
JL
1465 }
1466 else
9474e8ab 1467 copy = assign_temp (type, 1, 0);
d7cdf113 1468
ee45a32d 1469 store_expr (args[i].tree_value, copy, 0, false, false);
d7cdf113 1470
becfd6e5
KZ
1471 /* Just change the const function to pure and then let
1472 the next test clear the pure based on
1473 callee_copies. */
1474 if (*ecf_flags & ECF_CONST)
1475 {
1476 *ecf_flags &= ~ECF_CONST;
1477 *ecf_flags |= ECF_PURE;
1478 }
1479
1480 if (!callee_copies && *ecf_flags & ECF_PURE)
1481 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
9969aaf6
RH
1482
1483 args[i].tree_value
db3927fb 1484 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
9969aaf6 1485 type = TREE_TYPE (args[i].tree_value);
6de9cd9a 1486 *may_tailcall = false;
9a385c2d
DM
1487 maybe_complain_about_tail_call (exp,
1488 "argument must be passed"
1489 " by copying");
d7cdf113 1490 }
257caa55 1491 arg.pass_by_reference = true;
d7cdf113
JL
1492 }
1493
8df83eae 1494 unsignedp = TYPE_UNSIGNED (type);
cf0d189e
RS
1495 arg.type = type;
1496 arg.mode
1497 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1498 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
d7cdf113
JL
1499
1500 args[i].unsignedp = unsignedp;
cf0d189e 1501 args[i].mode = arg.mode;
7d167afd 1502
974aedcc
MP
1503 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
1504
6783fdb7 1505 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
3c07301f 1506
7d167afd
JJ
1507 /* If this is a sibling call and the machine has register windows, the
1508 register window has to be unwinded before calling the routine, so
1509 arguments have to go into the incoming registers. */
3c07301f
NF
1510 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1511 args[i].tail_call_reg
6783fdb7 1512 = targetm.calls.function_incoming_arg (args_so_far, arg);
3c07301f
NF
1513 else
1514 args[i].tail_call_reg = args[i].reg;
7d167afd 1515
d7cdf113 1516 if (args[i].reg)
a7c81bc1 1517 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
d7cdf113 1518
0ffef200 1519 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
d7cdf113
JL
1520
1521 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1522 it means that we are to pass this arg in the register(s) designated
1523 by the PARALLEL, but also to pass it in the stack. */
1524 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1525 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
4c7d264e 1526 args[i].pass_on_stack = true;
d7cdf113
JL
1527
1528 /* If this is an addressable type, we must preallocate the stack
1529 since we must evaluate the object into its final location.
1530
1531 If this is to be passed in both registers and the stack, it is simpler
1532 to preallocate. */
1533 if (TREE_ADDRESSABLE (type)
1534 || (args[i].pass_on_stack && args[i].reg != 0))
4c7d264e 1535 *must_preallocate = true;
d7cdf113 1536
d7cdf113 1537 /* Compute the stack-size of this argument. */
31db0fe0 1538 if (args[i].reg == 0 || args[i].partial != 0
d5e254e1
IE
1539 || reg_parm_stack_space > 0
1540 || args[i].pass_on_stack)
cf0d189e 1541 locate_and_pad_parm (arg.mode, type,
d7cdf113
JL
1542#ifdef STACK_PARMS_IN_REG_PARM_AREA
1543 1,
1544#else
1545 args[i].reg != 0,
1546#endif
2e4ceca5 1547 reg_parm_stack_space,
e7949876
AM
1548 args[i].pass_on_stack ? 0 : args[i].partial,
1549 fndecl, args_size, &args[i].locate);
648bb159
RS
1550#ifdef BLOCK_REG_PADDING
1551 else
1552 /* The argument is passed entirely in registers. See at which
1553 end it should be padded. */
1554 args[i].locate.where_pad =
cf0d189e 1555 BLOCK_REG_PADDING (arg.mode, type,
648bb159
RS
1556 int_size_in_bytes (type) <= UNITS_PER_WORD);
1557#endif
f725a3ec 1558
d7cdf113
JL
1559 /* Update ARGS_SIZE, the total stack space for args so far. */
1560
e7949876
AM
1561 args_size->constant += args[i].locate.size.constant;
1562 if (args[i].locate.size.var)
1563 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
d7cdf113
JL
1564
1565 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1566 have been used, etc. */
1567
6930c98c
RS
1568 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
1569 promoted_mode used for function_arg above. However, the
e53b6e56 1570 corresponding handling of incoming arguments in function.cc
6930c98c 1571 does pass the promoted mode. */
cf0d189e
RS
1572 arg.mode = TYPE_MODE (type);
1573 targetm.calls.function_arg_advance (args_so_far, arg);
d7cdf113
JL
1574 }
1575}
1576
599f37b6
JL
1577/* Update ARGS_SIZE to contain the total size for the argument block.
1578 Return the original constant component of the argument block's size.
1579
1580 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1581 for arguments passed in registers. */
1582
a20c5714 1583static poly_int64
d329e058
AJ
1584compute_argument_block_size (int reg_parm_stack_space,
1585 struct args_size *args_size,
033df0b9 1586 tree fndecl ATTRIBUTE_UNUSED,
5d059ed9 1587 tree fntype ATTRIBUTE_UNUSED,
d329e058 1588 int preferred_stack_boundary ATTRIBUTE_UNUSED)
599f37b6 1589{
a20c5714 1590 poly_int64 unadjusted_args_size = args_size->constant;
599f37b6 1591
f73ad30e
JH
1592 /* For accumulate outgoing args mode we don't need to align, since the frame
1593 will be already aligned. Align to STACK_BOUNDARY in order to prevent
f5143c46 1594 backends from generating misaligned frame sizes. */
f73ad30e
JH
1595 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1596 preferred_stack_boundary = STACK_BOUNDARY;
f73ad30e 1597
599f37b6
JL
1598 /* Compute the actual size of the argument block required. The variable
1599 and constant sizes must be combined, the size may have to be rounded,
1600 and there may be a minimum required size. */
1601
1602 if (args_size->var)
1603 {
1604 args_size->var = ARGS_SIZE_TREE (*args_size);
1605 args_size->constant = 0;
1606
c2f8b491
JH
1607 preferred_stack_boundary /= BITS_PER_UNIT;
1608 if (preferred_stack_boundary > 1)
1503a7ec
JH
1609 {
1610 /* We don't handle this case yet. To handle it correctly we have
f5143c46 1611 to add the delta, round and subtract the delta.
1503a7ec 1612 Currently no machine description requires this support. */
a20c5714
RS
1613 gcc_assert (multiple_p (stack_pointer_delta,
1614 preferred_stack_boundary));
1503a7ec
JH
1615 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1616 }
599f37b6
JL
1617
1618 if (reg_parm_stack_space > 0)
1619 {
1620 args_size->var
1621 = size_binop (MAX_EXPR, args_size->var,
fed3cef0 1622 ssize_int (reg_parm_stack_space));
599f37b6 1623
599f37b6
JL
1624 /* The area corresponding to register parameters is not to count in
1625 the size of the block we need. So make the adjustment. */
5d059ed9 1626 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b
KT
1627 args_size->var
1628 = size_binop (MINUS_EXPR, args_size->var,
1629 ssize_int (reg_parm_stack_space));
599f37b6
JL
1630 }
1631 }
1632 else
1633 {
c2f8b491 1634 preferred_stack_boundary /= BITS_PER_UNIT;
0a1c58a2
JL
1635 if (preferred_stack_boundary < 1)
1636 preferred_stack_boundary = 1;
a20c5714
RS
1637 args_size->constant = (aligned_upper_bound (args_size->constant
1638 + stack_pointer_delta,
1639 preferred_stack_boundary)
1503a7ec 1640 - stack_pointer_delta);
599f37b6 1641
a20c5714
RS
1642 args_size->constant = upper_bound (args_size->constant,
1643 reg_parm_stack_space);
599f37b6 1644
5d059ed9 1645 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 1646 args_size->constant -= reg_parm_stack_space;
599f37b6
JL
1647 }
1648 return unadjusted_args_size;
1649}
1650
19832c77 1651/* Precompute parameters as needed for a function call.
cc0b1adc 1652
f2d33f13 1653 FLAGS is mask of ECF_* constants.
cc0b1adc 1654
cc0b1adc
JL
1655 NUM_ACTUALS is the number of arguments.
1656
f725a3ec
KH
1657 ARGS is an array containing information for each argument; this
1658 routine fills in the INITIAL_VALUE and VALUE fields for each
1659 precomputed argument. */
cc0b1adc
JL
1660
1661static void
84b8030f 1662precompute_arguments (int num_actuals, struct arg_data *args)
cc0b1adc
JL
1663{
1664 int i;
1665
3638733b 1666 /* If this is a libcall, then precompute all arguments so that we do not
82c82743 1667 get extraneous instructions emitted as part of the libcall sequence. */
6a4e56a9
JJ
1668
1669 /* If we preallocated the stack space, and some arguments must be passed
1670 on the stack, then we must precompute any parameter which contains a
1671 function call which will store arguments on the stack.
1672 Otherwise, evaluating the parameter may clobber previous parameters
1673 which have already been stored into the stack. (we have code to avoid
1674 such case by saving the outgoing stack arguments, but it results in
1675 worse code) */
84b8030f 1676 if (!ACCUMULATE_OUTGOING_ARGS)
82c82743 1677 return;
7ae4ad28 1678
cc0b1adc 1679 for (i = 0; i < num_actuals; i++)
82c82743 1680 {
cde0f3fd 1681 tree type;
ef4bddc2 1682 machine_mode mode;
ddef6bc7 1683
84b8030f 1684 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
6a4e56a9
JJ
1685 continue;
1686
82c82743 1687 /* If this is an addressable type, we cannot pre-evaluate it. */
cde0f3fd
PB
1688 type = TREE_TYPE (args[i].tree_value);
1689 gcc_assert (!TREE_ADDRESSABLE (type));
cc0b1adc 1690
82c82743 1691 args[i].initial_value = args[i].value
84217346 1692 = expand_normal (args[i].tree_value);
cc0b1adc 1693
cde0f3fd 1694 mode = TYPE_MODE (type);
82c82743
RH
1695 if (mode != args[i].mode)
1696 {
cde0f3fd 1697 int unsignedp = args[i].unsignedp;
82c82743
RH
1698 args[i].value
1699 = convert_modes (args[i].mode, mode,
1700 args[i].value, args[i].unsignedp);
cde0f3fd 1701
82c82743
RH
1702 /* CSE will replace this only if it contains args[i].value
1703 pseudo, so convert it down to the declared mode using
1704 a SUBREG. */
1705 if (REG_P (args[i].value)
cde0f3fd
PB
1706 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1707 && promote_mode (type, mode, &unsignedp) != args[i].mode)
82c82743
RH
1708 {
1709 args[i].initial_value
1710 = gen_lowpart_SUBREG (mode, args[i].value);
1711 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
27be0c32 1712 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
82c82743 1713 }
82c82743
RH
1714 }
1715 }
cc0b1adc
JL
1716}
1717
0f9b3ea6
JL
1718/* Given the current state of MUST_PREALLOCATE and information about
1719 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1720 compute and return the final value for MUST_PREALLOCATE. */
1721
4c7d264e
UB
1722static bool
1723finalize_must_preallocate (bool must_preallocate, int num_actuals,
5039610b 1724 struct arg_data *args, struct args_size *args_size)
0f9b3ea6
JL
1725{
1726 /* See if we have or want to preallocate stack space.
1727
1728 If we would have to push a partially-in-regs parm
1729 before other stack parms, preallocate stack space instead.
1730
1731 If the size of some parm is not a multiple of the required stack
1732 alignment, we must preallocate.
1733
1734 If the total size of arguments that would otherwise create a copy in
1735 a temporary (such as a CALL) is more than half the total argument list
1736 size, preallocation is faster.
1737
1738 Another reason to preallocate is if we have a machine (like the m88k)
1739 where stack alignment is required to be maintained between every
1740 pair of insns, not just when the call is made. However, we assume here
1741 that such machines either do not have push insns (and hence preallocation
1742 would occur anyway) or the problem is taken care of with
1743 PUSH_ROUNDING. */
1744
1745 if (! must_preallocate)
1746 {
4c7d264e 1747 bool partial_seen = false;
a20c5714 1748 poly_int64 copy_to_evaluate_size = 0;
0f9b3ea6
JL
1749 int i;
1750
1751 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1752 {
1753 if (args[i].partial > 0 && ! args[i].pass_on_stack)
4c7d264e 1754 partial_seen = true;
0f9b3ea6 1755 else if (partial_seen && args[i].reg == 0)
4c7d264e 1756 must_preallocate = true;
0f9b3ea6
JL
1757
1758 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1759 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1760 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1761 || TREE_CODE (args[i].tree_value) == COND_EXPR
1762 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1763 copy_to_evaluate_size
1764 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1765 }
1766
a20c5714
RS
1767 if (maybe_ne (args_size->constant, 0)
1768 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
4c7d264e 1769 must_preallocate = true;
0f9b3ea6
JL
1770 }
1771 return must_preallocate;
1772}
599f37b6 1773
a45bdd02
JL
1774/* If we preallocated stack space, compute the address of each argument
1775 and store it into the ARGS array.
1776
f725a3ec 1777 We need not ensure it is a valid memory address here; it will be
a45bdd02
JL
1778 validized when it is used.
1779
1780 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1781
1782static void
d329e058 1783compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
a45bdd02
JL
1784{
1785 if (argblock)
1786 {
1787 rtx arg_reg = argblock;
a20c5714
RS
1788 int i;
1789 poly_int64 arg_offset = 0;
a45bdd02
JL
1790
1791 if (GET_CODE (argblock) == PLUS)
a20c5714
RS
1792 {
1793 arg_reg = XEXP (argblock, 0);
1794 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
1795 }
a45bdd02
JL
1796
1797 for (i = 0; i < num_actuals; i++)
1798 {
e7949876
AM
1799 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1800 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
a45bdd02 1801 rtx addr;
bfc45551 1802 unsigned int align, boundary;
a20c5714 1803 poly_uint64 units_on_stack = 0;
ef4bddc2 1804 machine_mode partial_mode = VOIDmode;
a45bdd02
JL
1805
1806 /* Skip this parm if it will not be passed on the stack. */
7816b87e
JC
1807 if (! args[i].pass_on_stack
1808 && args[i].reg != 0
1809 && args[i].partial == 0)
a45bdd02
JL
1810 continue;
1811
5b8b4a88
JJ
1812 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
1813 continue;
1814
a708f4b6 1815 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
0a81f074 1816 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
1817
1818 if (args[i].partial != 0)
1819 {
1820 /* Only part of the parameter is being passed on the stack.
1821 Generate a simple memory reference of the correct size. */
1822 units_on_stack = args[i].locate.size.constant;
a20c5714 1823 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
f4b31647 1824 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
7816b87e 1825 args[i].stack = gen_rtx_MEM (partial_mode, addr);
f5541398 1826 set_mem_size (args[i].stack, units_on_stack);
7816b87e
JC
1827 }
1828 else
1829 {
1830 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1831 set_mem_attributes (args[i].stack,
1832 TREE_TYPE (args[i].tree_value), 1);
1833 }
bfc45551
AM
1834 align = BITS_PER_UNIT;
1835 boundary = args[i].locate.boundary;
a20c5714 1836 poly_int64 offset_val;
76b0cbf8 1837 if (args[i].locate.where_pad != PAD_DOWNWARD)
bfc45551 1838 align = boundary;
a20c5714 1839 else if (poly_int_rtx_p (offset, &offset_val))
bfc45551 1840 {
a20c5714
RS
1841 align = least_bit_hwi (boundary);
1842 unsigned int offset_align
1843 = known_alignment (offset_val) * BITS_PER_UNIT;
1844 if (offset_align != 0)
1845 align = MIN (align, offset_align);
bfc45551
AM
1846 }
1847 set_mem_align (args[i].stack, align);
a45bdd02 1848
a708f4b6 1849 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
0a81f074 1850 addr = plus_constant (Pmode, addr, arg_offset);
7816b87e
JC
1851
1852 if (args[i].partial != 0)
1853 {
1854 /* Only part of the parameter is being passed on the stack.
1855 Generate a simple memory reference of the correct size.
1856 */
1857 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
f5541398 1858 set_mem_size (args[i].stack_slot, units_on_stack);
7816b87e
JC
1859 }
1860 else
1861 {
1862 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1863 set_mem_attributes (args[i].stack_slot,
1864 TREE_TYPE (args[i].tree_value), 1);
1865 }
bfc45551 1866 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
7ab923cc
JJ
1867
1868 /* Function incoming arguments may overlap with sibling call
1869 outgoing arguments and we cannot allow reordering of reads
1870 from function arguments with stores to outgoing arguments
1871 of sibling calls. */
ba4828e0
RK
1872 set_mem_alias_set (args[i].stack, 0);
1873 set_mem_alias_set (args[i].stack_slot, 0);
a45bdd02
JL
1874 }
1875 }
1876}
f725a3ec 1877
a45bdd02
JL
1878/* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1879 in a call instruction.
1880
1881 FNDECL is the tree node for the target function. For an indirect call
1882 FNDECL will be NULL_TREE.
1883
09e2bf48 1884 ADDR is the operand 0 of CALL_EXPR for this call. */
a45bdd02
JL
1885
1886static rtx
d329e058 1887rtx_for_function_call (tree fndecl, tree addr)
a45bdd02
JL
1888{
1889 rtx funexp;
1890
1891 /* Get the function to call, in the form of RTL. */
1892 if (fndecl)
1893 {
ad960f56 1894 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
bbee5843 1895 TREE_USED (fndecl) = 1;
a45bdd02
JL
1896
1897 /* Get a SYMBOL_REF rtx for the function address. */
1898 funexp = XEXP (DECL_RTL (fndecl), 0);
1899 }
1900 else
1901 /* Generate an rtx (probably a pseudo-register) for the address. */
1902 {
1903 push_temp_slots ();
84217346 1904 funexp = expand_normal (addr);
f725a3ec 1905 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
a45bdd02
JL
1906 }
1907 return funexp;
1908}
1909
4b522b8f
TV
1910/* Return the static chain for this function, if any. */
1911
1912rtx
1913rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
1914{
1915 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
1916 return NULL;
1917
1918 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
1919}
1920
5275901c
JJ
1921/* Internal state for internal_arg_pointer_based_exp and its helpers. */
1922static struct
1923{
1924 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1925 or NULL_RTX if none has been scanned yet. */
48810515 1926 rtx_insn *scan_start;
5275901c
JJ
1927 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1928 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1929 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1930 with fixed offset, or PC if this is with variable or unknown offset. */
9771b263 1931 vec<rtx> cache;
5275901c
JJ
1932} internal_arg_pointer_exp_state;
1933
e9f56944 1934static rtx internal_arg_pointer_based_exp (const_rtx, bool);
5275901c
JJ
1935
1936/* Helper function for internal_arg_pointer_based_exp. Scan insns in
1937 the tail call sequence, starting with first insn that hasn't been
1938 scanned yet, and note for each pseudo on the LHS whether it is based
1939 on crtl->args.internal_arg_pointer or not, and what offset from that
1940 that pointer it has. */
1941
1942static void
1943internal_arg_pointer_based_exp_scan (void)
1944{
48810515 1945 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
5275901c
JJ
1946
1947 if (scan_start == NULL_RTX)
1948 insn = get_insns ();
1949 else
1950 insn = NEXT_INSN (scan_start);
1951
1952 while (insn)
1953 {
1954 rtx set = single_set (insn);
1955 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1956 {
1957 rtx val = NULL_RTX;
1958 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1959 /* Punt on pseudos set multiple times. */
9771b263
DN
1960 if (idx < internal_arg_pointer_exp_state.cache.length ()
1961 && (internal_arg_pointer_exp_state.cache[idx]
5275901c
JJ
1962 != NULL_RTX))
1963 val = pc_rtx;
1964 else
1965 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1966 if (val != NULL_RTX)
1967 {
9771b263 1968 if (idx >= internal_arg_pointer_exp_state.cache.length ())
c3284718 1969 internal_arg_pointer_exp_state.cache
cb3874dc 1970 .safe_grow_cleared (idx + 1, true);
9771b263 1971 internal_arg_pointer_exp_state.cache[idx] = val;
5275901c
JJ
1972 }
1973 }
1974 if (NEXT_INSN (insn) == NULL_RTX)
1975 scan_start = insn;
1976 insn = NEXT_INSN (insn);
1977 }
1978
1979 internal_arg_pointer_exp_state.scan_start = scan_start;
1980}
1981
5275901c
JJ
1982/* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1983 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1984 it with fixed offset, or PC if this is with variable or unknown offset.
1985 TOPLEVEL is true if the function is invoked at the topmost level. */
1986
1987static rtx
e9f56944 1988internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
5275901c
JJ
1989{
1990 if (CONSTANT_P (rtl))
1991 return NULL_RTX;
1992
1993 if (rtl == crtl->args.internal_arg_pointer)
1994 return const0_rtx;
1995
1996 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
1997 return NULL_RTX;
1998
a20c5714
RS
1999 poly_int64 offset;
2000 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
5275901c
JJ
2001 {
2002 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2003 if (val == NULL_RTX || val == pc_rtx)
2004 return val;
a20c5714 2005 return plus_constant (Pmode, val, offset);
5275901c
JJ
2006 }
2007
2008 /* When called at the topmost level, scan pseudo assignments in between the
2009 last scanned instruction in the tail call sequence and the latest insn
2010 in that sequence. */
2011 if (toplevel)
2012 internal_arg_pointer_based_exp_scan ();
2013
2014 if (REG_P (rtl))
2015 {
2016 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
9771b263
DN
2017 if (idx < internal_arg_pointer_exp_state.cache.length ())
2018 return internal_arg_pointer_exp_state.cache[idx];
5275901c
JJ
2019
2020 return NULL_RTX;
2021 }
2022
e9f56944
RS
2023 subrtx_iterator::array_type array;
2024 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2025 {
2026 const_rtx x = *iter;
2027 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2028 return pc_rtx;
2029 if (MEM_P (x))
2030 iter.skip_subrtxes ();
2031 }
5275901c
JJ
2032
2033 return NULL_RTX;
2034}
2035
a20c5714
RS
2036/* Return true if SIZE bytes starting from address ADDR might overlap an
2037 already-clobbered argument area. This function is used to determine
2038 if we should give up a sibcall. */
07eef816
KH
2039
2040static bool
a20c5714 2041mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
07eef816 2042{
a20c5714
RS
2043 poly_int64 i;
2044 unsigned HOST_WIDE_INT start, end;
5275901c 2045 rtx val;
07eef816 2046
a20c5714
RS
2047 if (bitmap_empty_p (stored_args_map)
2048 && stored_args_watermark == HOST_WIDE_INT_M1U)
4189fb53 2049 return false;
5275901c
JJ
2050 val = internal_arg_pointer_based_exp (addr, true);
2051 if (val == NULL_RTX)
2052 return false;
a20c5714 2053 else if (!poly_int_rtx_p (val, &i))
6c3cb698 2054 return true;
a20c5714
RS
2055
2056 if (known_eq (size, 0U))
2057 return false;
76e048a8
KT
2058
2059 if (STACK_GROWS_DOWNWARD)
2060 i -= crtl->args.pretend_args_size;
2061 else
2062 i += crtl->args.pretend_args_size;
2063
6dad9361
TS
2064 if (ARGS_GROW_DOWNWARD)
2065 i = -i - size;
2066
a20c5714
RS
2067 /* We can ignore any references to the function's pretend args,
2068 which at this point would manifest as negative values of I. */
2069 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2070 return false;
07eef816 2071
a20c5714
RS
2072 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
2073 if (!(i + size).is_constant (&end))
2074 end = HOST_WIDE_INT_M1U;
2075
2076 if (end > stored_args_watermark)
2077 return true;
2078
2079 end = MIN (end, SBITMAP_SIZE (stored_args_map));
2080 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2081 if (bitmap_bit_p (stored_args_map, k))
2082 return true;
07eef816
KH
2083
2084 return false;
2085}
2086
21a3b983
JL
2087/* Do the register loads required for any wholly-register parms or any
2088 parms which are passed both on the stack and in a register. Their
f725a3ec 2089 expressions were already evaluated.
21a3b983
JL
2090
2091 Mark all register-parms as living through the call, putting these USE
d329e058
AJ
2092 insns in the CALL_INSN_FUNCTION_USAGE field.
2093
40b0345d 2094 When IS_SIBCALL, perform the check_sibcall_argument_overlap
0cdca92b 2095 checking, setting *SIBCALL_FAILURE if appropriate. */
21a3b983
JL
2096
2097static void
d329e058
AJ
2098load_register_parameters (struct arg_data *args, int num_actuals,
2099 rtx *call_fusage, int flags, int is_sibcall,
4c7d264e 2100 bool *sibcall_failure)
21a3b983
JL
2101{
2102 int i, j;
2103
21a3b983 2104 for (i = 0; i < num_actuals; i++)
21a3b983 2105 {
099e9712
JH
2106 rtx reg = ((flags & ECF_SIBCALL)
2107 ? args[i].tail_call_reg : args[i].reg);
21a3b983
JL
2108 if (reg)
2109 {
6e985040
AM
2110 int partial = args[i].partial;
2111 int nregs;
95fe7b48
RS
2112 poly_int64 size = 0;
2113 HOST_WIDE_INT const_size = 0;
48810515 2114 rtx_insn *before_arg = get_last_insn ();
ed6fd2ae
RS
2115 tree tree_value = args[i].tree_value;
2116 tree type = TREE_TYPE (tree_value);
920ea3b8 2117 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
72834792 2118 type = TREE_TYPE (first_field (type));
f0078f86
AM
2119 /* Set non-negative if we must move a word at a time, even if
2120 just one word (e.g, partial == 4 && mode == DFmode). Set
2121 to -1 if we just use a normal move insn. This value can be
2122 zero if the argument is a zero size structure. */
6e985040 2123 nregs = -1;
78a52f11
RH
2124 if (GET_CODE (reg) == PARALLEL)
2125 ;
2126 else if (partial)
2127 {
2128 gcc_assert (partial % UNITS_PER_WORD == 0);
2129 nregs = partial / UNITS_PER_WORD;
2130 }
72834792 2131 else if (TYPE_MODE (type) == BLKmode)
6e985040 2132 {
95fe7b48
RS
2133 /* Variable-sized parameters should be described by a
2134 PARALLEL instead. */
72834792 2135 const_size = int_size_in_bytes (type);
95fe7b48
RS
2136 gcc_assert (const_size >= 0);
2137 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2138 size = const_size;
6e985040
AM
2139 }
2140 else
2141 size = GET_MODE_SIZE (args[i].mode);
21a3b983
JL
2142
2143 /* Handle calls that pass values in multiple non-contiguous
2144 locations. The Irix 6 ABI has examples of this. */
2145
2146 if (GET_CODE (reg) == PARALLEL)
8df3dbb7 2147 emit_group_move (reg, args[i].parallel_value);
21a3b983
JL
2148
2149 /* If simple case, just do move. If normal partial, store_one_arg
2150 has already loaded the register for us. In all other cases,
2151 load the register(s) from memory. */
2152
9206d736
AM
2153 else if (nregs == -1)
2154 {
2155 emit_move_insn (reg, args[i].value);
6e985040 2156#ifdef BLOCK_REG_PADDING
9206d736
AM
2157 /* Handle case where we have a value that needs shifting
2158 up to the msb. eg. a QImode value and we're padding
2159 upward on a BYTES_BIG_ENDIAN machine. */
95fe7b48
RS
2160 if (args[i].locate.where_pad
2161 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
9206d736 2162 {
95fe7b48
RS
2163 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2164 if (maybe_lt (size, UNITS_PER_WORD))
2165 {
2166 rtx x;
2167 poly_int64 shift
2168 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2169
2170 /* Assigning REG here rather than a temp makes
2171 CALL_FUSAGE report the whole reg as used.
2172 Strictly speaking, the call only uses SIZE
2173 bytes at the msb end, but it doesn't seem worth
2174 generating rtl to say that. */
2175 reg = gen_rtx_REG (word_mode, REGNO (reg));
2176 x = expand_shift (LSHIFT_EXPR, word_mode,
2177 reg, shift, reg, 1);
2178 if (x != reg)
2179 emit_move_insn (reg, x);
2180 }
9206d736 2181 }
6e985040 2182#endif
9206d736 2183 }
21a3b983
JL
2184
2185 /* If we have pre-computed the values to put in the registers in
2186 the case of non-aligned structures, copy them in now. */
2187
2188 else if (args[i].n_aligned_regs != 0)
2189 for (j = 0; j < args[i].n_aligned_regs; j++)
2190 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2191 args[i].aligned_regs[j]);
2192
ed6fd2ae
RS
2193 /* If we need a single register and the source is a constant
2194 VAR_DECL with a simple constructor, expand that constructor
2195 via a pseudo rather than read from (possibly misaligned)
2196 memory. PR middle-end/95126. */
2197 else if (nregs == 1
2198 && partial == 0
2199 && !args[i].pass_on_stack
2200 && VAR_P (tree_value)
2201 && TREE_READONLY (tree_value)
2202 && !TREE_SIDE_EFFECTS (tree_value)
2203 && immediate_const_ctor_p (DECL_INITIAL (tree_value)))
2204 {
2205 rtx target = gen_reg_rtx (word_mode);
c00e1e3a
RS
2206 store_constructor (DECL_INITIAL (tree_value), target, 0,
2207 int_expr_size (DECL_INITIAL (tree_value)),
2208 false);
ed6fd2ae 2209 reg = gen_rtx_REG (word_mode, REGNO (reg));
c00e1e3a 2210 emit_move_insn (reg, target);
ed6fd2ae 2211 }
3b2ee170 2212 else if (partial == 0 || args[i].pass_on_stack)
6e985040 2213 {
95fe7b48
RS
2214 /* SIZE and CONST_SIZE are 0 for partial arguments and
2215 the size of a BLKmode type otherwise. */
2216 gcc_checking_assert (known_eq (size, const_size));
1a8cb155 2217 rtx mem = validize_mem (copy_rtx (args[i].value));
6e985040 2218
3b2ee170
IS
2219 /* Check for overlap with already clobbered argument area,
2220 providing that this has non-zero size. */
07eef816 2221 if (is_sibcall
95fe7b48 2222 && const_size != 0
a20c5714 2223 && (mem_might_overlap_already_clobbered_arg_p
95fe7b48 2224 (XEXP (args[i].value, 0), const_size)))
4c7d264e 2225 *sibcall_failure = true;
07eef816 2226
95fe7b48 2227 if (const_size % UNITS_PER_WORD == 0
984b2054
AM
2228 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2229 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2230 else
2231 {
2232 if (nregs > 1)
2233 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2234 args[i].mode);
2235 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2236 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
95fe7b48 2237 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
ee45a32d 2238 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
f96bf49a
JW
2239 word_mode, word_mode, false,
2240 NULL);
984b2054
AM
2241 if (BYTES_BIG_ENDIAN)
2242 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2243 BITS_PER_WORD - bitsize, dest, 1);
2244 if (x != dest)
2245 emit_move_insn (dest, x);
2246 }
2247
6e985040 2248 /* Handle a BLKmode that needs shifting. */
95fe7b48 2249 if (nregs == 1 && const_size < UNITS_PER_WORD
03ca1672 2250#ifdef BLOCK_REG_PADDING
76b0cbf8 2251 && args[i].locate.where_pad == PAD_DOWNWARD
03ca1672
UW
2252#else
2253 && BYTES_BIG_ENDIAN
2254#endif
984b2054 2255 )
6e985040 2256 {
984b2054 2257 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
95fe7b48 2258 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
984b2054
AM
2259 enum tree_code dir = (BYTES_BIG_ENDIAN
2260 ? RSHIFT_EXPR : LSHIFT_EXPR);
2261 rtx x;
6e985040 2262
984b2054
AM
2263 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2264 if (x != dest)
2265 emit_move_insn (dest, x);
6e985040 2266 }
6e985040 2267 }
21a3b983 2268
0cdca92b
DJ
2269 /* When a parameter is a block, and perhaps in other cases, it is
2270 possible that it did a load from an argument slot that was
32dd366d 2271 already clobbered. */
0cdca92b 2272 if (is_sibcall
4c7d264e
UB
2273 && check_sibcall_argument_overlap (before_arg, &args[i], false))
2274 *sibcall_failure = true;
0cdca92b 2275
21a3b983
JL
2276 /* Handle calls that pass values in multiple non-contiguous
2277 locations. The Irix 6 ABI has examples of this. */
2278 if (GET_CODE (reg) == PARALLEL)
2279 use_group_regs (call_fusage, reg);
2280 else if (nregs == -1)
72834792 2281 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
faa00334
AO
2282 else if (nregs > 0)
2283 use_regs (call_fusage, REGNO (reg), nregs);
21a3b983
JL
2284 }
2285 }
2286}
2287
739fb049
MM
2288/* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2289 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2290 bytes, then we would need to push some additional bytes to pad the
a20c5714 2291 arguments. So, we try to compute an adjust to the stack pointer for an
ce48579b
RH
2292 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2293 bytes. Then, when the arguments are pushed the stack will be perfectly
a20c5714 2294 aligned.
739fb049 2295
a20c5714
RS
2296 Return true if this optimization is possible, storing the adjustment
2297 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2298 bytes that should be popped after the call. */
2299
2300static bool
2301combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
2302 poly_int64 unadjusted_args_size,
d329e058 2303 struct args_size *args_size,
95899b34 2304 unsigned int preferred_unit_stack_boundary)
739fb049
MM
2305{
2306 /* The number of bytes to pop so that the stack will be
2307 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
a20c5714 2308 poly_int64 adjustment;
739fb049
MM
2309 /* The alignment of the stack after the arguments are pushed, if we
2310 just pushed the arguments without adjust the stack here. */
95899b34 2311 unsigned HOST_WIDE_INT unadjusted_alignment;
739fb049 2312
a20c5714
RS
2313 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2314 preferred_unit_stack_boundary,
2315 &unadjusted_alignment))
2316 return false;
739fb049
MM
2317
2318 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2319 as possible -- leaving just enough left to cancel out the
2320 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2321 PENDING_STACK_ADJUST is non-negative, and congruent to
2322 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2323
2324 /* Begin by trying to pop all the bytes. */
a20c5714
RS
2325 unsigned HOST_WIDE_INT tmp_misalignment;
2326 if (!known_misalignment (pending_stack_adjust,
2327 preferred_unit_stack_boundary,
2328 &tmp_misalignment))
2329 return false;
2330 unadjusted_alignment -= tmp_misalignment;
739fb049
MM
2331 adjustment = pending_stack_adjust;
2332 /* Push enough additional bytes that the stack will be aligned
2333 after the arguments are pushed. */
0aae1572
NS
2334 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2335 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
f725a3ec 2336
a20c5714
RS
2337 /* We need to know whether the adjusted argument size
2338 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2339 or a deallocation. */
2340 if (!ordered_p (adjustment, unadjusted_args_size))
2341 return false;
2342
739fb049
MM
2343 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2344 bytes after the call. The right number is the entire
2345 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2346 by the arguments in the first place. */
f725a3ec 2347 args_size->constant
739fb049
MM
2348 = pending_stack_adjust - adjustment + unadjusted_args_size;
2349
a20c5714
RS
2350 *adjustment_out = adjustment;
2351 return true;
739fb049
MM
2352}
2353
c67846f2
JJ
2354/* Scan X expression if it does not dereference any argument slots
2355 we already clobbered by tail call arguments (as noted in stored_args_map
2356 bitmap).
4c7d264e
UB
2357 Return true if X expression dereferences such argument slots,
2358 false otherwise. */
c67846f2 2359
4c7d264e 2360static bool
d329e058 2361check_sibcall_argument_overlap_1 (rtx x)
c67846f2
JJ
2362{
2363 RTX_CODE code;
2364 int i, j;
c67846f2
JJ
2365 const char *fmt;
2366
2367 if (x == NULL_RTX)
4c7d264e 2368 return false;
c67846f2
JJ
2369
2370 code = GET_CODE (x);
2371
6c3cb698
KY
2372 /* We need not check the operands of the CALL expression itself. */
2373 if (code == CALL)
4c7d264e 2374 return false;
6c3cb698 2375
c67846f2 2376 if (code == MEM)
a20c5714
RS
2377 return (mem_might_overlap_already_clobbered_arg_p
2378 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
c67846f2 2379
f725a3ec 2380 /* Scan all subexpressions. */
c67846f2
JJ
2381 fmt = GET_RTX_FORMAT (code);
2382 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2383 {
2384 if (*fmt == 'e')
f725a3ec
KH
2385 {
2386 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
4c7d264e 2387 return true;
f725a3ec 2388 }
c67846f2 2389 else if (*fmt == 'E')
f725a3ec
KH
2390 {
2391 for (j = 0; j < XVECLEN (x, i); j++)
2392 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
4c7d264e 2393 return true;
f725a3ec 2394 }
c67846f2 2395 }
4c7d264e 2396 return false;
c67846f2
JJ
2397}
2398
2399/* Scan sequence after INSN if it does not dereference any argument slots
2400 we already clobbered by tail call arguments (as noted in stored_args_map
0cdca92b 2401 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
4c7d264e
UB
2402 stored_args_map bitmap afterwards (when ARG is a register
2403 MARK_STORED_ARGS_MAP should be false). Return true if sequence after
2404 INSN dereferences such argument slots, false otherwise. */
c67846f2 2405
4c7d264e 2406static bool
48810515 2407check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
4c7d264e 2408 bool mark_stored_args_map)
f725a3ec 2409{
a20c5714
RS
2410 poly_uint64 low, high;
2411 unsigned HOST_WIDE_INT const_low, const_high;
c67846f2
JJ
2412
2413 if (insn == NULL_RTX)
2414 insn = get_insns ();
2415 else
2416 insn = NEXT_INSN (insn);
2417
2418 for (; insn; insn = NEXT_INSN (insn))
f725a3ec
KH
2419 if (INSN_P (insn)
2420 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
c67846f2
JJ
2421 break;
2422
0cdca92b
DJ
2423 if (mark_stored_args_map)
2424 {
6dad9361
TS
2425 if (ARGS_GROW_DOWNWARD)
2426 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2427 else
2428 low = arg->locate.slot_offset.constant;
a20c5714 2429 high = low + arg->locate.size.constant;
d60eab50 2430
a20c5714
RS
2431 const_low = constant_lower_bound (low);
2432 if (high.is_constant (&const_high))
2433 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
2434 bitmap_set_bit (stored_args_map, i);
2435 else
2436 stored_args_watermark = MIN (stored_args_watermark, const_low);
0cdca92b 2437 }
c67846f2
JJ
2438 return insn != NULL_RTX;
2439}
2440
bef5d8b6
RS
2441/* Given that a function returns a value of mode MODE at the most
2442 significant end of hard register VALUE, shift VALUE left or right
2443 as specified by LEFT_P. Return true if some action was needed. */
c988af2b 2444
bef5d8b6 2445bool
ef4bddc2 2446shift_return_value (machine_mode mode, bool left_p, rtx value)
c988af2b 2447{
bef5d8b6 2448 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
abd3c800 2449 machine_mode value_mode = GET_MODE (value);
73a699ae
RS
2450 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
2451
2452 if (known_eq (shift, 0))
bef5d8b6
RS
2453 return false;
2454
2455 /* Use ashr rather than lshr for right shifts. This is for the benefit
2456 of the MIPS port, which requires SImode values to be sign-extended
2457 when stored in 64-bit registers. */
abd3c800
RS
2458 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
2459 value, gen_int_shift_amount (value_mode, shift),
2460 value, 1, OPTAB_WIDEN))
bef5d8b6
RS
2461 gcc_unreachable ();
2462 return true;
c988af2b
RS
2463}
2464
3fb30019
RS
2465/* If X is a likely-spilled register value, copy it to a pseudo
2466 register and return that register. Return X otherwise. */
2467
2468static rtx
2469avoid_likely_spilled_reg (rtx x)
2470{
82d6e6fc 2471 rtx new_rtx;
3fb30019
RS
2472
2473 if (REG_P (x)
2474 && HARD_REGISTER_P (x)
07b8f0a8 2475 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3fb30019
RS
2476 {
2477 /* Make sure that we generate a REG rather than a CONCAT.
2478 Moves into CONCATs can need nontrivial instructions,
2479 and the whole point of this function is to avoid
2480 using the hard register directly in such a situation. */
2481 generating_concat_p = 0;
82d6e6fc 2482 new_rtx = gen_reg_rtx (GET_MODE (x));
3fb30019 2483 generating_concat_p = 1;
82d6e6fc
KG
2484 emit_move_insn (new_rtx, x);
2485 return new_rtx;
3fb30019
RS
2486 }
2487 return x;
2488}
2489
b40d90e6
DM
2490/* Helper function for expand_call.
2491 Return false is EXP is not implementable as a sibling call. */
2492
2493static bool
2494can_implement_as_sibling_call_p (tree exp,
2495 rtx structure_value_addr,
2496 tree funtype,
b40d90e6
DM
2497 tree fndecl,
2498 int flags,
2499 tree addr,
2500 const args_size &args_size)
2501{
2502 if (!targetm.have_sibcall_epilogue ())
9a385c2d
DM
2503 {
2504 maybe_complain_about_tail_call
2505 (exp,
2506 "machine description does not have"
2507 " a sibcall_epilogue instruction pattern");
2508 return false;
2509 }
b40d90e6
DM
2510
2511 /* Doing sibling call optimization needs some work, since
2512 structure_value_addr can be allocated on the stack.
2513 It does not seem worth the effort since few optimizable
2514 sibling calls will return a structure. */
2515 if (structure_value_addr != NULL_RTX)
9a385c2d
DM
2516 {
2517 maybe_complain_about_tail_call (exp, "callee returns a structure");
2518 return false;
2519 }
b40d90e6 2520
b40d90e6
DM
2521 /* Check whether the target is able to optimize the call
2522 into a sibcall. */
2523 if (!targetm.function_ok_for_sibcall (fndecl, exp))
9a385c2d
DM
2524 {
2525 maybe_complain_about_tail_call (exp,
2526 "target is not able to optimize the"
2527 " call into a sibling call");
2528 return false;
2529 }
b40d90e6
DM
2530
2531 /* Functions that do not return exactly once may not be sibcall
2532 optimized. */
9a385c2d
DM
2533 if (flags & ECF_RETURNS_TWICE)
2534 {
2535 maybe_complain_about_tail_call (exp, "callee returns twice");
2536 return false;
2537 }
2538 if (flags & ECF_NORETURN)
2539 {
2540 maybe_complain_about_tail_call (exp, "callee does not return");
2541 return false;
2542 }
b40d90e6
DM
2543
2544 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
9a385c2d
DM
2545 {
2546 maybe_complain_about_tail_call (exp, "volatile function type");
2547 return false;
2548 }
b40d90e6 2549
cef0c0bb
AM
2550 /* __sanitizer_cov_trace_pc is supposed to inspect its return address
2551 to identify the caller, and therefore should not be tailcalled. */
2552 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2553 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SANITIZER_COV_TRACE_PC)
2554 {
2555 /* No need for maybe_complain_about_tail_call here:
2556 the call is synthesized by the compiler. */
2557 return false;
2558 }
2559
b40d90e6
DM
2560 /* If the called function is nested in the current one, it might access
2561 some of the caller's arguments, but could clobber them beforehand if
2562 the argument areas are shared. */
2563 if (fndecl && decl_function_context (fndecl) == current_function_decl)
9a385c2d
DM
2564 {
2565 maybe_complain_about_tail_call (exp, "nested function");
2566 return false;
2567 }
b40d90e6
DM
2568
2569 /* If this function requires more stack slots than the current
2570 function, we cannot change it into a sibling call.
2571 crtl->args.pretend_args_size is not part of the
2572 stack allocated by our caller. */
a20c5714
RS
2573 if (maybe_gt (args_size.constant,
2574 crtl->args.size - crtl->args.pretend_args_size))
9a385c2d
DM
2575 {
2576 maybe_complain_about_tail_call (exp,
2577 "callee required more stack slots"
2578 " than the caller");
2579 return false;
2580 }
b40d90e6
DM
2581
2582 /* If the callee pops its own arguments, then it must pop exactly
2583 the same number of arguments as the current function. */
a20c5714
RS
2584 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
2585 args_size.constant),
2586 targetm.calls.return_pops_args (current_function_decl,
2587 TREE_TYPE
2588 (current_function_decl),
2589 crtl->args.size)))
9a385c2d
DM
2590 {
2591 maybe_complain_about_tail_call (exp,
2592 "inconsistent number of"
2593 " popped arguments");
2594 return false;
2595 }
b40d90e6
DM
2596
2597 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
9a385c2d
DM
2598 {
2599 maybe_complain_about_tail_call (exp, "frontend does not support"
2600 " sibling call");
2601 return false;
2602 }
b40d90e6
DM
2603
2604 /* All checks passed. */
2605 return true;
2606}
2607
957ed738
L
2608/* Update stack alignment when the parameter is passed in the stack
2609 since the outgoing parameter requires extra alignment on the calling
2610 function side. */
2611
2612static void
2613update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
2614{
2615 if (crtl->stack_alignment_needed < locate->boundary)
2616 crtl->stack_alignment_needed = locate->boundary;
2617 if (crtl->preferred_stack_boundary < locate->boundary)
2618 crtl->preferred_stack_boundary = locate->boundary;
2619}
2620
5039610b 2621/* Generate all the code for a CALL_EXPR exp
51bbfa0c
RS
2622 and return an rtx for its value.
2623 Store the value in TARGET (specified as an rtx) if convenient.
2624 If the value is stored in TARGET then TARGET is returned.
2625 If IGNORE is nonzero, then we ignore the value of the function call. */
2626
2627rtx
d329e058 2628expand_call (tree exp, rtx target, int ignore)
51bbfa0c 2629{
0a1c58a2
JL
2630 /* Nonzero if we are currently expanding a call. */
2631 static int currently_expanding_call = 0;
2632
51bbfa0c
RS
2633 /* RTX for the function to be called. */
2634 rtx funexp;
0a1c58a2 2635 /* Sequence of insns to perform a normal "call". */
48810515 2636 rtx_insn *normal_call_insns = NULL;
6de9cd9a 2637 /* Sequence of insns to perform a tail "call". */
48810515 2638 rtx_insn *tail_call_insns = NULL;
51bbfa0c
RS
2639 /* Data type of the function. */
2640 tree funtype;
ded9bf77 2641 tree type_arg_types;
28ed065e 2642 tree rettype;
51bbfa0c
RS
2643 /* Declaration of the function being called,
2644 or 0 if the function is computed (not known by name). */
2645 tree fndecl = 0;
57782ad8
MM
2646 /* The type of the function being called. */
2647 tree fntype;
6de9cd9a 2648 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
9a385c2d 2649 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
0a1c58a2 2650 int pass;
51bbfa0c
RS
2651
2652 /* Register in which non-BLKmode value will be returned,
2653 or 0 if no value or if value is BLKmode. */
2654 rtx valreg;
2655 /* Address where we should return a BLKmode value;
2656 0 if value not BLKmode. */
2657 rtx structure_value_addr = 0;
2658 /* Nonzero if that address is being passed by treating it as
2659 an extra, implicit first parameter. Otherwise,
2660 it is passed by being copied directly into struct_value_rtx. */
2661 int structure_value_addr_parm = 0;
078a18a4
SL
2662 /* Holds the value of implicit argument for the struct value. */
2663 tree structure_value_addr_value = NULL_TREE;
51bbfa0c
RS
2664 /* Size of aggregate value wanted, or zero if none wanted
2665 or if we are using the non-reentrant PCC calling convention
2666 or expecting the value in registers. */
5c8e61cf 2667 poly_int64 struct_value_size = 0;
4c7d264e 2668 /* True if called function returns an aggregate in memory PCC style,
51bbfa0c 2669 by returning the address of where to find it. */
4c7d264e 2670 bool pcc_struct_value = false;
61f71b34 2671 rtx struct_value = 0;
51bbfa0c
RS
2672
2673 /* Number of actual parameters in this call, including struct value addr. */
2674 int num_actuals;
2675 /* Number of named args. Args after this are anonymous ones
2676 and they must all go on the stack. */
2677 int n_named_args;
078a18a4
SL
2678 /* Number of complex actual arguments that need to be split. */
2679 int num_complex_actuals = 0;
51bbfa0c
RS
2680
2681 /* Vector of information about each argument.
2682 Arguments are numbered in the order they will be pushed,
2683 not the order they are written. */
2684 struct arg_data *args;
2685
2686 /* Total size in bytes of all the stack-parms scanned so far. */
2687 struct args_size args_size;
099e9712 2688 struct args_size adjusted_args_size;
51bbfa0c 2689 /* Size of arguments before any adjustments (such as rounding). */
a20c5714 2690 poly_int64 unadjusted_args_size;
51bbfa0c 2691 /* Data on reg parms scanned so far. */
d5cc9181
JR
2692 CUMULATIVE_ARGS args_so_far_v;
2693 cumulative_args_t args_so_far;
51bbfa0c
RS
2694 /* Nonzero if a reg parm has been scanned. */
2695 int reg_parm_seen;
2696
4c7d264e 2697 /* True if we must avoid push-insns in the args for this call.
51bbfa0c
RS
2698 If stack space is allocated for register parameters, but not by the
2699 caller, then it is preallocated in the fixed part of the stack frame.
2700 So the entire argument block must then be preallocated (i.e., we
2701 ignore PUSH_ROUNDING in that case). */
4c7d264e 2702 bool must_preallocate = !targetm.calls.push_argument (0);
51bbfa0c 2703
f72aed24 2704 /* Size of the stack reserved for parameter registers. */
6f90e075
JW
2705 int reg_parm_stack_space = 0;
2706
51bbfa0c
RS
2707 /* Address of space preallocated for stack parms
2708 (on machines that lack push insns), or 0 if space not preallocated. */
2709 rtx argblock = 0;
2710
e384e6b5 2711 /* Mask of ECF_ and ERF_ flags. */
f2d33f13 2712 int flags = 0;
e384e6b5 2713 int return_flags = 0;
f73ad30e 2714#ifdef REG_PARM_STACK_SPACE
51bbfa0c 2715 /* Define the boundary of the register parm stack space that needs to be
b820d2b8
AM
2716 saved, if any. */
2717 int low_to_save, high_to_save;
51bbfa0c
RS
2718 rtx save_area = 0; /* Place that it is saved */
2719#endif
2720
a20c5714 2721 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
51bbfa0c 2722 char *initial_stack_usage_map = stack_usage_map;
a20c5714 2723 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
d9725c41 2724 char *stack_usage_map_buf = NULL;
51bbfa0c 2725
a20c5714 2726 poly_int64 old_stack_allocated;
38afb23f
OH
2727
2728 /* State variables to track stack modifications. */
51bbfa0c 2729 rtx old_stack_level = 0;
38afb23f 2730 int old_stack_arg_under_construction = 0;
a20c5714 2731 poly_int64 old_pending_adj = 0;
51bbfa0c 2732 int old_inhibit_defer_pop = inhibit_defer_pop;
38afb23f
OH
2733
2734 /* Some stack pointer alterations we make are performed via
2735 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2736 which we then also need to save/restore along the way. */
a20c5714 2737 poly_int64 old_stack_pointer_delta = 0;
38afb23f 2738
0a1c58a2 2739 rtx call_fusage;
5039610b 2740 tree addr = CALL_EXPR_FN (exp);
b3694847 2741 int i;
739fb049 2742 /* The alignment of the stack, in bits. */
95899b34 2743 unsigned HOST_WIDE_INT preferred_stack_boundary;
739fb049 2744 /* The alignment of the stack, in bytes. */
95899b34 2745 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
6de9cd9a
DN
2746 /* The static chain value to use for this call. */
2747 rtx static_chain_value;
f2d33f13
JH
2748 /* See if this is "nothrow" function call. */
2749 if (TREE_NOTHROW (exp))
2750 flags |= ECF_NOTHROW;
2751
6de9cd9a
DN
2752 /* See if we can find a DECL-node for the actual function, and get the
2753 function attributes (flags) from the function decl or type node. */
39b0dce7
JM
2754 fndecl = get_callee_fndecl (exp);
2755 if (fndecl)
51bbfa0c 2756 {
57782ad8 2757 fntype = TREE_TYPE (fndecl);
39b0dce7 2758 flags |= flags_from_decl_or_type (fndecl);
e384e6b5 2759 return_flags |= decl_return_flags (fndecl);
51bbfa0c 2760 }
39b0dce7 2761 else
72954a4f 2762 {
28ed065e 2763 fntype = TREE_TYPE (TREE_TYPE (addr));
57782ad8 2764 flags |= flags_from_decl_or_type (fntype);
4c640e26
EB
2765 if (CALL_EXPR_BY_DESCRIPTOR (exp))
2766 flags |= ECF_BY_DESCRIPTOR;
72954a4f 2767 }
28ed065e 2768 rettype = TREE_TYPE (exp);
7393c642 2769
57782ad8 2770 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
61f71b34 2771
8c6a8269
RS
2772 /* Warn if this value is an aggregate type,
2773 regardless of which calling convention we are using for it. */
28ed065e 2774 if (AGGREGATE_TYPE_P (rettype))
ccf08a6e 2775 warning (OPT_Waggregate_return, "function call has aggregate value");
8c6a8269 2776
becfd6e5
KZ
2777 /* If the result of a non looping pure or const function call is
2778 ignored (or void), and none of its arguments are volatile, we can
2779 avoid expanding the call and just evaluate the arguments for
2780 side-effects. */
8c6a8269 2781 if ((flags & (ECF_CONST | ECF_PURE))
becfd6e5 2782 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
8ebf6b99 2783 && (flags & ECF_NOTHROW)
8c6a8269 2784 && (ignore || target == const0_rtx
28ed065e 2785 || TYPE_MODE (rettype) == VOIDmode))
8c6a8269
RS
2786 {
2787 bool volatilep = false;
2788 tree arg;
078a18a4 2789 call_expr_arg_iterator iter;
8c6a8269 2790
078a18a4
SL
2791 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2792 if (TREE_THIS_VOLATILE (arg))
8c6a8269
RS
2793 {
2794 volatilep = true;
2795 break;
2796 }
2797
2798 if (! volatilep)
2799 {
078a18a4
SL
2800 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2801 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8c6a8269
RS
2802 return const0_rtx;
2803 }
2804 }
2805
6f90e075 2806#ifdef REG_PARM_STACK_SPACE
5d059ed9 2807 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
6f90e075 2808#endif
6f90e075 2809
5d059ed9 2810 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
967b4653 2811 && reg_parm_stack_space > 0 && targetm.calls.push_argument (0))
4c7d264e 2812 must_preallocate = true;
e5e809f4 2813
51bbfa0c
RS
2814 /* Set up a place to return a structure. */
2815
2816 /* Cater to broken compilers. */
d47d0a8d 2817 if (aggregate_value_p (exp, fntype))
51bbfa0c
RS
2818 {
2819 /* This call returns a big structure. */
84b8030f 2820 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
51bbfa0c
RS
2821
2822#ifdef PCC_STATIC_STRUCT_RETURN
9e7b1d0a 2823 {
4c7d264e 2824 pcc_struct_value = true;
9e7b1d0a
RS
2825 }
2826#else /* not PCC_STATIC_STRUCT_RETURN */
2827 {
5c8e61cf
RS
2828 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
2829 struct_value_size = -1;
51bbfa0c 2830
391756ad
EB
2831 /* Even if it is semantically safe to use the target as the return
2832 slot, it may be not sufficiently aligned for the return type. */
2833 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
2834 && target
2835 && MEM_P (target)
ffc8b52f
JJ
2836 /* If rettype is addressable, we may not create a temporary.
2837 If target is properly aligned at runtime and the compiler
2838 just doesn't know about it, it will work fine, otherwise it
2839 will be UB. */
2840 && (TREE_ADDRESSABLE (rettype)
2841 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
2842 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
2843 MEM_ALIGN (target)))))
9e7b1d0a
RS
2844 structure_value_addr = XEXP (target, 0);
2845 else
2846 {
9e7b1d0a
RS
2847 /* For variable-sized objects, we must be called with a target
2848 specified. If we were to allocate space on the stack here,
2849 we would have no way of knowing when to free it. */
9474e8ab 2850 rtx d = assign_temp (rettype, 1, 1);
4361b41d 2851 structure_value_addr = XEXP (d, 0);
9e7b1d0a
RS
2852 target = 0;
2853 }
2854 }
2855#endif /* not PCC_STATIC_STRUCT_RETURN */
51bbfa0c
RS
2856 }
2857
099e9712 2858 /* Figure out the amount to which the stack should be aligned. */
099e9712 2859 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
b255a036
JH
2860 if (fndecl)
2861 {
3dafb85c 2862 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
17b29c0a
L
2863 /* Without automatic stack alignment, we can't increase preferred
2864 stack boundary. With automatic stack alignment, it is
2865 unnecessary since unless we can guarantee that all callers will
2866 align the outgoing stack properly, callee has to align its
2867 stack anyway. */
2868 if (i
2869 && i->preferred_incoming_stack_boundary
2870 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
b255a036
JH
2871 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2872 }
099e9712
JH
2873
2874 /* Operand 0 is a pointer-to-function; get the type of the function. */
09e2bf48 2875 funtype = TREE_TYPE (addr);
366de0ce 2876 gcc_assert (POINTER_TYPE_P (funtype));
099e9712
JH
2877 funtype = TREE_TYPE (funtype);
2878
078a18a4
SL
2879 /* Count whether there are actual complex arguments that need to be split
2880 into their real and imaginary parts. Munge the type_arg_types
2881 appropriately here as well. */
42ba5130 2882 if (targetm.calls.split_complex_arg)
ded9bf77 2883 {
078a18a4
SL
2884 call_expr_arg_iterator iter;
2885 tree arg;
2886 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2887 {
2888 tree type = TREE_TYPE (arg);
2889 if (type && TREE_CODE (type) == COMPLEX_TYPE
2890 && targetm.calls.split_complex_arg (type))
2891 num_complex_actuals++;
2892 }
ded9bf77 2893 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
ded9bf77
AH
2894 }
2895 else
2896 type_arg_types = TYPE_ARG_TYPES (funtype);
2897
099e9712 2898 if (flags & ECF_MAY_BE_ALLOCA)
e3b5732b 2899 cfun->calls_alloca = 1;
099e9712
JH
2900
2901 /* If struct_value_rtx is 0, it means pass the address
078a18a4
SL
2902 as if it were an extra parameter. Put the argument expression
2903 in structure_value_addr_value. */
61f71b34 2904 if (structure_value_addr && struct_value == 0)
099e9712
JH
2905 {
2906 /* If structure_value_addr is a REG other than
2907 virtual_outgoing_args_rtx, we can use always use it. If it
2908 is not a REG, we must always copy it into a register.
2909 If it is virtual_outgoing_args_rtx, we must copy it to another
2910 register in some cases. */
f8cfc6aa 2911 rtx temp = (!REG_P (structure_value_addr)
099e9712
JH
2912 || (ACCUMULATE_OUTGOING_ARGS
2913 && stack_arg_under_construction
2914 && structure_value_addr == virtual_outgoing_args_rtx)
7ae4ad28 2915 ? copy_addr_to_reg (convert_memory_address
57782ad8 2916 (Pmode, structure_value_addr))
099e9712
JH
2917 : structure_value_addr);
2918
078a18a4
SL
2919 structure_value_addr_value =
2920 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
31db0fe0 2921 structure_value_addr_parm = 1;
099e9712
JH
2922 }
2923
2924 /* Count the arguments and set NUM_ACTUALS. */
b1879fb8
JJ
2925 num_actuals
2926 = call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
099e9712
JH
2927
2928 /* Compute number of named args.
3a4d587b
AM
2929 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2930
2931 if (type_arg_types != 0)
2932 n_named_args
2933 = (list_length (type_arg_types)
2934 /* Count the struct value address, if it is passed as a parm. */
2935 + structure_value_addr_parm);
b1879fb8
JJ
2936 else if (TYPE_NO_NAMED_ARGS_STDARG_P (funtype))
2937 n_named_args = 0;
3a4d587b
AM
2938 else
2939 /* If we know nothing, treat all args as named. */
2940 n_named_args = num_actuals;
2941
2942 /* Start updating where the next arg would go.
2943
2944 On some machines (such as the PA) indirect calls have a different
2945 calling convention than normal calls. The fourth argument in
2946 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2947 or not. */
d5cc9181
JR
2948 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2949 args_so_far = pack_cumulative_args (&args_so_far_v);
3a4d587b
AM
2950
2951 /* Now possibly adjust the number of named args.
099e9712 2952 Normally, don't include the last named arg if anonymous args follow.
3a179764
KH
2953 We do include the last named arg if
2954 targetm.calls.strict_argument_naming() returns nonzero.
099e9712
JH
2955 (If no anonymous args follow, the result of list_length is actually
2956 one too large. This is harmless.)
2957
4ac8340c 2958 If targetm.calls.pretend_outgoing_varargs_named() returns
3a179764
KH
2959 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2960 this machine will be able to place unnamed args that were passed
2961 in registers into the stack. So treat all args as named. This
2962 allows the insns emitting for a specific argument list to be
2963 independent of the function declaration.
4ac8340c
KH
2964
2965 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2966 we do not have any reliable way to pass unnamed args in
2967 registers, so we must force them into memory. */
099e9712 2968
3a4d587b 2969 if (type_arg_types != 0
d5cc9181 2970 && targetm.calls.strict_argument_naming (args_so_far))
3a4d587b
AM
2971 ;
2972 else if (type_arg_types != 0
d5cc9181 2973 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3a4d587b
AM
2974 /* Don't include the last named arg. */
2975 --n_named_args;
b1879fb8
JJ
2976 else if (TYPE_NO_NAMED_ARGS_STDARG_P (funtype))
2977 n_named_args = 0;
099e9712 2978 else
3a4d587b 2979 /* Treat all args as named. */
099e9712
JH
2980 n_named_args = num_actuals;
2981
099e9712 2982 /* Make a vector to hold all the information about each arg. */
765fc0f7 2983 args = XCNEWVEC (struct arg_data, num_actuals);
099e9712 2984
d80d2d2a
KH
2985 /* Build up entries in the ARGS array, compute the size of the
2986 arguments into ARGS_SIZE, etc. */
099e9712 2987 initialize_argument_information (num_actuals, args, &args_size,
078a18a4 2988 n_named_args, exp,
45769134 2989 structure_value_addr_value, fndecl, fntype,
d5cc9181 2990 args_so_far, reg_parm_stack_space,
099e9712 2991 &old_stack_level, &old_pending_adj,
dd292d0a 2992 &must_preallocate, &flags,
6de9cd9a 2993 &try_tail_call, CALL_FROM_THUNK_P (exp));
099e9712
JH
2994
2995 if (args_size.var)
4c7d264e 2996 must_preallocate = true;
099e9712
JH
2997
2998 /* Now make final decision about preallocating stack space. */
2999 must_preallocate = finalize_must_preallocate (must_preallocate,
3000 num_actuals, args,
3001 &args_size);
3002
3003 /* If the structure value address will reference the stack pointer, we
3004 must stabilize it. We don't need to do this if we know that we are
3005 not going to adjust the stack pointer in processing this call. */
3006
3007 if (structure_value_addr
3008 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3009 || reg_mentioned_p (virtual_outgoing_args_rtx,
3010 structure_value_addr))
3011 && (args_size.var
a20c5714
RS
3012 || (!ACCUMULATE_OUTGOING_ARGS
3013 && maybe_ne (args_size.constant, 0))))
099e9712 3014 structure_value_addr = copy_to_reg (structure_value_addr);
0a1c58a2 3015
7ae4ad28 3016 /* Tail calls can make things harder to debug, and we've traditionally
194c7c45 3017 pushed these optimizations into -O2. Don't try if we're already
fb158467 3018 expanding a call, as that means we're an argument. Don't try if
3fbd86b1 3019 there's cleanups, as we know there's code to follow the call. */
099e9712 3020 if (currently_expanding_call++ != 0
44662f68 3021 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
6de9cd9a 3022 || args_size.var
6fb5fa3c 3023 || dbg_cnt (tail_call) == false)
6de9cd9a 3024 try_tail_call = 0;
099e9712 3025
4b8e35f1
JJ
3026 /* Workaround buggy C/C++ wrappers around Fortran routines with
3027 character(len=constant) arguments if the hidden string length arguments
3028 are passed on the stack; if the callers forget to pass those arguments,
3029 attempting to tail call in such routines leads to stack corruption.
3030 Avoid tail calls in functions where at least one such hidden string
3031 length argument is passed (partially or fully) on the stack in the
3032 caller and the callee needs to pass any arguments on the stack.
3033 See PR90329. */
3034 if (try_tail_call && maybe_ne (args_size.constant, 0))
3035 for (tree arg = DECL_ARGUMENTS (current_function_decl);
3036 arg; arg = DECL_CHAIN (arg))
3037 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
3038 {
3039 subrtx_iterator::array_type array;
3040 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
3041 if (MEM_P (*iter))
3042 {
3043 try_tail_call = 0;
3044 break;
3045 }
3046 }
3047
9a385c2d
DM
3048 /* If the user has marked the function as requiring tail-call
3049 optimization, attempt it. */
3050 if (must_tail_call)
3051 try_tail_call = 1;
3052
099e9712 3053 /* Rest of purposes for tail call optimizations to fail. */
b40d90e6 3054 if (try_tail_call)
9a385c2d
DM
3055 try_tail_call = can_implement_as_sibling_call_p (exp,
3056 structure_value_addr,
3057 funtype,
9a385c2d 3058 fndecl,
b40d90e6 3059 flags, addr, args_size);
497eb8c3 3060
c69cd1f5
JJ
3061 /* Check if caller and callee disagree in promotion of function
3062 return value. */
3063 if (try_tail_call)
3064 {
ef4bddc2
RS
3065 machine_mode caller_mode, caller_promoted_mode;
3066 machine_mode callee_mode, callee_promoted_mode;
c69cd1f5
JJ
3067 int caller_unsignedp, callee_unsignedp;
3068 tree caller_res = DECL_RESULT (current_function_decl);
3069
3070 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
cde0f3fd 3071 caller_mode = DECL_MODE (caller_res);
c69cd1f5 3072 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
cde0f3fd
PB
3073 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3074 caller_promoted_mode
3075 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3076 &caller_unsignedp,
3077 TREE_TYPE (current_function_decl), 1);
3078 callee_promoted_mode
666e3ceb 3079 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
cde0f3fd 3080 &callee_unsignedp,
666e3ceb 3081 funtype, 1);
c69cd1f5
JJ
3082 if (caller_mode != VOIDmode
3083 && (caller_promoted_mode != callee_promoted_mode
3084 || ((caller_mode != caller_promoted_mode
3085 || callee_mode != callee_promoted_mode)
3086 && (caller_unsignedp != callee_unsignedp
bd4288c0 3087 || partial_subreg_p (caller_mode, callee_mode)))))
9a385c2d
DM
3088 {
3089 try_tail_call = 0;
3090 maybe_complain_about_tail_call (exp,
3091 "caller and callee disagree in"
3092 " promotion of function"
3093 " return value");
3094 }
c69cd1f5
JJ
3095 }
3096
01973e26
L
3097 /* Ensure current function's preferred stack boundary is at least
3098 what we need. Stack alignment may also increase preferred stack
3099 boundary. */
957ed738
L
3100 for (i = 0; i < num_actuals; i++)
3101 if (reg_parm_stack_space > 0
3102 || args[i].reg == 0
3103 || args[i].partial != 0
3104 || args[i].pass_on_stack)
3105 update_stack_alignment_for_call (&args[i].locate);
b5f772ce 3106 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
cb91fab0 3107 crtl->preferred_stack_boundary = preferred_stack_boundary;
01973e26
L
3108 else
3109 preferred_stack_boundary = crtl->preferred_stack_boundary;
c2f8b491 3110
099e9712 3111 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
497eb8c3 3112
3cf3da88
EB
3113 if (flag_callgraph_info)
3114 record_final_call (fndecl, EXPR_LOCATION (exp));
3115
0a1c58a2
JL
3116 /* We want to make two insn chains; one for a sibling call, the other
3117 for a normal call. We will select one of the two chains after
3118 initial RTL generation is complete. */
b820d2b8 3119 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
0a1c58a2 3120 {
4c7d264e 3121 bool sibcall_failure = false;
6adbb51e 3122 bool normal_failure = false;
f5143c46 3123 /* We want to emit any pending stack adjustments before the tail
0a1c58a2 3124 recursion "call". That way we know any adjustment after the tail
7ae4ad28 3125 recursion call can be ignored if we indeed use the tail
0a1c58a2 3126 call expansion. */
7f2f0a01 3127 saved_pending_stack_adjust save;
48810515
DM
3128 rtx_insn *insns, *before_call, *after_args;
3129 rtx next_arg_reg;
39842893 3130
0a1c58a2
JL
3131 if (pass == 0)
3132 {
0a1c58a2
JL
3133 /* State variables we need to save and restore between
3134 iterations. */
7f2f0a01 3135 save_pending_stack_adjust (&save);
0a1c58a2 3136 }
f2d33f13
JH
3137 if (pass)
3138 flags &= ~ECF_SIBCALL;
3139 else
3140 flags |= ECF_SIBCALL;
51bbfa0c 3141
0a1c58a2 3142 /* Other state variables that we must reinitialize each time
f2d33f13 3143 through the loop (that are not initialized by the loop itself). */
0a1c58a2
JL
3144 argblock = 0;
3145 call_fusage = 0;
fa76d9e0 3146
f725a3ec 3147 /* Start a new sequence for the normal call case.
51bbfa0c 3148
0a1c58a2
JL
3149 From this point on, if the sibling call fails, we want to set
3150 sibcall_failure instead of continuing the loop. */
3151 start_sequence ();
eecb6f50 3152
0a1c58a2
JL
3153 /* Don't let pending stack adjusts add up to too much.
3154 Also, do all pending adjustments now if there is any chance
3155 this might be a call to alloca or if we are expanding a sibling
9dd9bf80 3156 call sequence.
63579539
DJ
3157 Also do the adjustments before a throwing call, otherwise
3158 exception handling can fail; PR 19225. */
a20c5714
RS
3159 if (maybe_ge (pending_stack_adjust, 32)
3160 || (maybe_ne (pending_stack_adjust, 0)
9dd9bf80 3161 && (flags & ECF_MAY_BE_ALLOCA))
a20c5714 3162 || (maybe_ne (pending_stack_adjust, 0)
63579539 3163 && flag_exceptions && !(flags & ECF_NOTHROW))
0a1c58a2
JL
3164 || pass == 0)
3165 do_pending_stack_adjust ();
51bbfa0c 3166
0a1c58a2 3167 /* Precompute any arguments as needed. */
f8a097cd 3168 if (pass)
84b8030f 3169 precompute_arguments (num_actuals, args);
51bbfa0c 3170
0a1c58a2
JL
3171 /* Now we are about to start emitting insns that can be deleted
3172 if a libcall is deleted. */
84b8030f 3173 if (pass && (flags & ECF_MALLOC))
0a1c58a2 3174 start_sequence ();
51bbfa0c 3175
a25982ad
L
3176 /* Check the canary value for sibcall or function which doesn't
3177 return and could throw. */
3178 if ((pass == 0
3179 || ((flags & ECF_NORETURN) != 0 && tree_could_throw_p (exp)))
87a5dc2d
JW
3180 && crtl->stack_protect_guard
3181 && targetm.stack_protect_runtime_enabled_p ())
b755446c
RH
3182 stack_protect_epilogue ();
3183
099e9712 3184 adjusted_args_size = args_size;
ce48579b
RH
3185 /* Compute the actual size of the argument block required. The variable
3186 and constant sizes must be combined, the size may have to be rounded,
3187 and there may be a minimum required size. When generating a sibcall
3188 pattern, do not round up, since we'll be re-using whatever space our
3189 caller provided. */
3190 unadjusted_args_size
f725a3ec
KH
3191 = compute_argument_block_size (reg_parm_stack_space,
3192 &adjusted_args_size,
5d059ed9 3193 fndecl, fntype,
ce48579b
RH
3194 (pass == 0 ? 0
3195 : preferred_stack_boundary));
3196
f725a3ec 3197 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
ce48579b 3198
f8a097cd 3199 /* The argument block when performing a sibling call is the
c22cacf3 3200 incoming argument block. */
f8a097cd 3201 if (pass == 0)
c67846f2 3202 {
2e3f842f 3203 argblock = crtl->args.internal_arg_pointer;
76e048a8
KT
3204 if (STACK_GROWS_DOWNWARD)
3205 argblock
3206 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3207 else
3208 argblock
3209 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3210
a20c5714
RS
3211 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
3212 stored_args_map = sbitmap_alloc (map_size);
f61e445a 3213 bitmap_clear (stored_args_map);
a20c5714 3214 stored_args_watermark = HOST_WIDE_INT_M1U;
c67846f2 3215 }
ce48579b 3216
0a1c58a2
JL
3217 /* If we have no actual push instructions, or shouldn't use them,
3218 make space for all args right now. */
099e9712 3219 else if (adjusted_args_size.var != 0)
51bbfa0c 3220 {
0a1c58a2
JL
3221 if (old_stack_level == 0)
3222 {
9eac0f2a 3223 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f 3224 old_stack_pointer_delta = stack_pointer_delta;
0a1c58a2
JL
3225 old_pending_adj = pending_stack_adjust;
3226 pending_stack_adjust = 0;
0a1c58a2
JL
3227 /* stack_arg_under_construction says whether a stack arg is
3228 being constructed at the old stack level. Pushing the stack
3229 gets a clean outgoing argument block. */
3230 old_stack_arg_under_construction = stack_arg_under_construction;
3231 stack_arg_under_construction = 0;
0a1c58a2 3232 }
099e9712 3233 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
a11e0df4 3234 if (flag_stack_usage_info)
d3c12306 3235 current_function_has_unbounded_dynamic_stack_size = 1;
51bbfa0c 3236 }
0a1c58a2
JL
3237 else
3238 {
3239 /* Note that we must go through the motions of allocating an argument
3240 block even if the size is zero because we may be storing args
3241 in the area reserved for register arguments, which may be part of
3242 the stack frame. */
26a258fe 3243
a20c5714 3244 poly_int64 needed = adjusted_args_size.constant;
51bbfa0c 3245
0a1c58a2
JL
3246 /* Store the maximum argument space used. It will be pushed by
3247 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3248 checking). */
51bbfa0c 3249
a20c5714
RS
3250 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3251 needed);
51bbfa0c 3252
0a1c58a2
JL
3253 if (must_preallocate)
3254 {
f73ad30e
JH
3255 if (ACCUMULATE_OUTGOING_ARGS)
3256 {
f8a097cd
JH
3257 /* Since the stack pointer will never be pushed, it is
3258 possible for the evaluation of a parm to clobber
3259 something we have already written to the stack.
3260 Since most function calls on RISC machines do not use
3261 the stack, this is uncommon, but must work correctly.
26a258fe 3262
f73ad30e 3263 Therefore, we save any area of the stack that was already
f8a097cd
JH
3264 written and that we are using. Here we set up to do this
3265 by making a new stack usage map from the old one. The
f725a3ec 3266 actual save will be done by store_one_arg.
26a258fe 3267
f73ad30e
JH
3268 Another approach might be to try to reorder the argument
3269 evaluations to avoid this conflicting stack usage. */
26a258fe 3270
f8a097cd
JH
3271 /* Since we will be writing into the entire argument area,
3272 the map must be allocated for its entire size, not just
3273 the part that is the responsibility of the caller. */
5d059ed9 3274 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 3275 needed += reg_parm_stack_space;
51bbfa0c 3276
a20c5714 3277 poly_int64 limit = needed;
6dad9361 3278 if (ARGS_GROW_DOWNWARD)
a20c5714
RS
3279 limit += 1;
3280
3281 /* For polynomial sizes, this is the maximum possible
3282 size needed for arguments with a constant size
3283 and offset. */
3284 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
3285 highest_outgoing_arg_in_use
3286 = MAX (initial_highest_arg_in_use, const_limit);
6dad9361 3287
04695783 3288 free (stack_usage_map_buf);
5ed6ace5 3289 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 3290 stack_usage_map = stack_usage_map_buf;
51bbfa0c 3291
f73ad30e 3292 if (initial_highest_arg_in_use)
2e09e75a
JM
3293 memcpy (stack_usage_map, initial_stack_usage_map,
3294 initial_highest_arg_in_use);
2f4aa534 3295
f73ad30e 3296 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 3297 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
3298 (highest_outgoing_arg_in_use
3299 - initial_highest_arg_in_use));
3300 needed = 0;
2f4aa534 3301
f8a097cd
JH
3302 /* The address of the outgoing argument list must not be
3303 copied to a register here, because argblock would be left
3304 pointing to the wrong place after the call to
f725a3ec 3305 allocate_dynamic_stack_space below. */
2f4aa534 3306
f73ad30e 3307 argblock = virtual_outgoing_args_rtx;
f725a3ec 3308 }
f73ad30e 3309 else
26a258fe 3310 {
a20c5714
RS
3311 /* Try to reuse some or all of the pending_stack_adjust
3312 to get this space. */
3313 if (inhibit_defer_pop == 0
3314 && (combine_pending_stack_adjustment_and_call
3315 (&needed,
3316 unadjusted_args_size,
3317 &adjusted_args_size,
3318 preferred_unit_stack_boundary)))
0a1c58a2 3319 {
ce48579b
RH
3320 /* combine_pending_stack_adjustment_and_call computes
3321 an adjustment before the arguments are allocated.
3322 Account for them and see whether or not the stack
3323 needs to go up or down. */
3324 needed = unadjusted_args_size - needed;
3325
a20c5714
RS
3326 /* Checked by
3327 combine_pending_stack_adjustment_and_call. */
3328 gcc_checking_assert (ordered_p (needed, 0));
3329 if (maybe_lt (needed, 0))
f73ad30e 3330 {
ce48579b
RH
3331 /* We're releasing stack space. */
3332 /* ??? We can avoid any adjustment at all if we're
3333 already aligned. FIXME. */
3334 pending_stack_adjust = -needed;
3335 do_pending_stack_adjust ();
f73ad30e
JH
3336 needed = 0;
3337 }
f725a3ec 3338 else
ce48579b
RH
3339 /* We need to allocate space. We'll do that in
3340 push_block below. */
3341 pending_stack_adjust = 0;
0a1c58a2 3342 }
ce48579b
RH
3343
3344 /* Special case this because overhead of `push_block' in
3345 this case is non-trivial. */
a20c5714 3346 if (known_eq (needed, 0))
f73ad30e 3347 argblock = virtual_outgoing_args_rtx;
0a1c58a2 3348 else
d892f288 3349 {
a20c5714
RS
3350 rtx needed_rtx = gen_int_mode (needed, Pmode);
3351 argblock = push_block (needed_rtx, 0, 0);
6dad9361
TS
3352 if (ARGS_GROW_DOWNWARD)
3353 argblock = plus_constant (Pmode, argblock, needed);
d892f288 3354 }
f73ad30e 3355
f8a097cd
JH
3356 /* We only really need to call `copy_to_reg' in the case
3357 where push insns are going to be used to pass ARGBLOCK
3358 to a function call in ARGS. In that case, the stack
3359 pointer changes value from the allocation point to the
3360 call point, and hence the value of
3361 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3362 as well always do it. */
f73ad30e 3363 argblock = copy_to_reg (argblock);
38afb23f
OH
3364 }
3365 }
3366 }
0a1c58a2 3367
38afb23f
OH
3368 if (ACCUMULATE_OUTGOING_ARGS)
3369 {
3370 /* The save/restore code in store_one_arg handles all
3371 cases except one: a constructor call (including a C
3372 function returning a BLKmode struct) to initialize
3373 an argument. */
3374 if (stack_arg_under_construction)
3375 {
ac294f0b 3376 rtx push_size
a20c5714
RS
3377 = (gen_int_mode
3378 (adjusted_args_size.constant
3379 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3380 : TREE_TYPE (fndecl))
3381 ? 0 : reg_parm_stack_space), Pmode));
38afb23f
OH
3382 if (old_stack_level == 0)
3383 {
9eac0f2a 3384 emit_stack_save (SAVE_BLOCK, &old_stack_level);
38afb23f
OH
3385 old_stack_pointer_delta = stack_pointer_delta;
3386 old_pending_adj = pending_stack_adjust;
3387 pending_stack_adjust = 0;
3388 /* stack_arg_under_construction says whether a stack
3389 arg is being constructed at the old stack level.
3390 Pushing the stack gets a clean outgoing argument
3391 block. */
3392 old_stack_arg_under_construction
3393 = stack_arg_under_construction;
3394 stack_arg_under_construction = 0;
3395 /* Make a new map for the new argument list. */
04695783 3396 free (stack_usage_map_buf);
b9eae1a9 3397 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 3398 stack_usage_map = stack_usage_map_buf;
38afb23f 3399 highest_outgoing_arg_in_use = 0;
a20c5714 3400 stack_usage_watermark = HOST_WIDE_INT_M1U;
f73ad30e 3401 }
d3c12306
EB
3402 /* We can pass TRUE as the 4th argument because we just
3403 saved the stack pointer and will restore it right after
3404 the call. */
9e878cf1
EB
3405 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
3406 -1, true);
0a1c58a2 3407 }
bfbf933a 3408
38afb23f
OH
3409 /* If argument evaluation might modify the stack pointer,
3410 copy the address of the argument list to a register. */
3411 for (i = 0; i < num_actuals; i++)
3412 if (args[i].pass_on_stack)
3413 {
3414 argblock = copy_addr_to_reg (argblock);
3415 break;
3416 }
3417 }
d329e058 3418
0a1c58a2 3419 compute_argument_addresses (args, argblock, num_actuals);
bfbf933a 3420
5ba53785
UB
3421 /* Stack is properly aligned, pops can't safely be deferred during
3422 the evaluation of the arguments. */
3423 NO_DEFER_POP;
3424
ac4ee457
UB
3425 /* Precompute all register parameters. It isn't safe to compute
3426 anything once we have started filling any specific hard regs.
3427 TLS symbols sometimes need a call to resolve. Precompute
3428 register parameters before any stack pointer manipulation
3429 to avoid unaligned stack in the called function. */
3430 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3431
5ba53785
UB
3432 OK_DEFER_POP;
3433
3d9684ae
JG
3434 /* Perform stack alignment before the first push (the last arg). */
3435 if (argblock == 0
a20c5714
RS
3436 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
3437 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4e217aed 3438 {
0a1c58a2
JL
3439 /* When the stack adjustment is pending, we get better code
3440 by combining the adjustments. */
a20c5714
RS
3441 if (maybe_ne (pending_stack_adjust, 0)
3442 && ! inhibit_defer_pop
3443 && (combine_pending_stack_adjustment_and_call
3444 (&pending_stack_adjust,
3445 unadjusted_args_size,
3446 &adjusted_args_size,
3447 preferred_unit_stack_boundary)))
3448 do_pending_stack_adjust ();
0a1c58a2 3449 else if (argblock == 0)
a20c5714
RS
3450 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
3451 - unadjusted_args_size,
3452 Pmode));
0a1c58a2 3453 }
ebcd0b57
JH
3454 /* Now that the stack is properly aligned, pops can't safely
3455 be deferred during the evaluation of the arguments. */
3456 NO_DEFER_POP;
51bbfa0c 3457
d3c12306
EB
3458 /* Record the maximum pushed stack space size. We need to delay
3459 doing it this far to take into account the optimization done
3460 by combine_pending_stack_adjustment_and_call. */
a11e0df4 3461 if (flag_stack_usage_info
d3c12306
EB
3462 && !ACCUMULATE_OUTGOING_ARGS
3463 && pass
3464 && adjusted_args_size.var == 0)
3465 {
a20c5714
RS
3466 poly_int64 pushed = (adjusted_args_size.constant
3467 + pending_stack_adjust);
3468 current_function_pushed_stack_size
3469 = upper_bound (current_function_pushed_stack_size, pushed);
d3c12306
EB
3470 }
3471
09e2bf48 3472 funexp = rtx_for_function_call (fndecl, addr);
51bbfa0c 3473
5039610b
SL
3474 if (CALL_EXPR_STATIC_CHAIN (exp))
3475 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
6de9cd9a
DN
3476 else
3477 static_chain_value = 0;
3478
f73ad30e 3479#ifdef REG_PARM_STACK_SPACE
0a1c58a2
JL
3480 /* Save the fixed argument area if it's part of the caller's frame and
3481 is clobbered by argument setup for this call. */
f8a097cd 3482 if (ACCUMULATE_OUTGOING_ARGS && pass)
f73ad30e
JH
3483 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3484 &low_to_save, &high_to_save);
b94301c2 3485#endif
51bbfa0c 3486
0a1c58a2
JL
3487 /* Now store (and compute if necessary) all non-register parms.
3488 These come before register parms, since they can require block-moves,
3489 which could clobber the registers used for register parms.
3490 Parms which have partial registers are not stored here,
3491 but we do preallocate space here if they want that. */
51bbfa0c 3492
0a1c58a2 3493 for (i = 0; i < num_actuals; i++)
0196c95e 3494 {
31db0fe0 3495 if (args[i].reg == 0 || args[i].pass_on_stack)
0196c95e 3496 {
48810515 3497 rtx_insn *before_arg = get_last_insn ();
0196c95e 3498
ddc923b5
MP
3499 /* We don't allow passing huge (> 2^30 B) arguments
3500 by value. It would cause an overflow later on. */
a20c5714 3501 if (constant_lower_bound (adjusted_args_size.constant)
ddc923b5
MP
3502 >= (1 << (HOST_BITS_PER_INT - 2)))
3503 {
3504 sorry ("passing too large argument on stack");
a717376e 3505 /* Don't worry about stack clean-up. */
6adbb51e 3506 if (pass == 0)
4c7d264e 3507 sibcall_failure = true;
6adbb51e
JJ
3508 else
3509 normal_failure = true;
ddc923b5
MP
3510 continue;
3511 }
3512
0196c95e
JJ
3513 if (store_one_arg (&args[i], argblock, flags,
3514 adjusted_args_size.var != 0,
3515 reg_parm_stack_space)
3516 || (pass == 0
3517 && check_sibcall_argument_overlap (before_arg,
4c7d264e
UB
3518 &args[i], true)))
3519 sibcall_failure = true;
0196c95e
JJ
3520 }
3521
2b1c5433 3522 if (args[i].stack)
7d810276
JJ
3523 call_fusage
3524 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3525 gen_rtx_USE (VOIDmode, args[i].stack),
3526 call_fusage);
0196c95e 3527 }
0a1c58a2
JL
3528
3529 /* If we have a parm that is passed in registers but not in memory
3530 and whose alignment does not permit a direct copy into registers,
3531 make a group of pseudos that correspond to each register that we
3532 will later fill. */
3533 if (STRICT_ALIGNMENT)
3534 store_unaligned_arguments_into_pseudos (args, num_actuals);
3535
3536 /* Now store any partially-in-registers parm.
3537 This is the last place a block-move can happen. */
3538 if (reg_parm_seen)
3539 for (i = 0; i < num_actuals; i++)
3540 if (args[i].partial != 0 && ! args[i].pass_on_stack)
c67846f2 3541 {
48810515 3542 rtx_insn *before_arg = get_last_insn ();
c67846f2 3543
99206968
KT
3544 /* On targets with weird calling conventions (e.g. PA) it's
3545 hard to ensure that all cases of argument overlap between
3546 stack and registers work. Play it safe and bail out. */
3547 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
3548 {
4c7d264e 3549 sibcall_failure = true;
99206968
KT
3550 break;
3551 }
3552
4c6b3b2a
JJ
3553 if (store_one_arg (&args[i], argblock, flags,
3554 adjusted_args_size.var != 0,
3555 reg_parm_stack_space)
3556 || (pass == 0
3557 && check_sibcall_argument_overlap (before_arg,
4c7d264e
UB
3558 &args[i], true)))
3559 sibcall_failure = true;
c67846f2 3560 }
51bbfa0c 3561
2f21e1ba
BS
3562 bool any_regs = false;
3563 for (i = 0; i < num_actuals; i++)
3564 if (args[i].reg != NULL_RTX)
3565 {
3566 any_regs = true;
3567 targetm.calls.call_args (args[i].reg, funtype);
3568 }
3569 if (!any_regs)
3570 targetm.calls.call_args (pc_rtx, funtype);
3571
3572 /* Figure out the register where the value, if any, will come back. */
3573 valreg = 0;
2f21e1ba
BS
3574 if (TYPE_MODE (rettype) != VOIDmode
3575 && ! structure_value_addr)
3576 {
3577 if (pcc_struct_value)
31db0fe0
ML
3578 valreg = hard_function_value (build_pointer_type (rettype),
3579 fndecl, NULL, (pass == 0));
2f21e1ba 3580 else
31db0fe0
ML
3581 valreg = hard_function_value (rettype, fndecl, fntype,
3582 (pass == 0));
2f21e1ba
BS
3583
3584 /* If VALREG is a PARALLEL whose first member has a zero
3585 offset, use that. This is for targets such as m68k that
3586 return the same value in multiple places. */
3587 if (GET_CODE (valreg) == PARALLEL)
3588 {
3589 rtx elem = XVECEXP (valreg, 0, 0);
3590 rtx where = XEXP (elem, 0);
3591 rtx offset = XEXP (elem, 1);
3592 if (offset == const0_rtx
3593 && GET_MODE (where) == GET_MODE (valreg))
3594 valreg = where;
3595 }
3596 }
3597
0a1c58a2
JL
3598 /* If register arguments require space on the stack and stack space
3599 was not preallocated, allocate stack space here for arguments
3600 passed in registers. */
5d059ed9 3601 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
81464b2c 3602 && !ACCUMULATE_OUTGOING_ARGS
4c7d264e 3603 && !must_preallocate && reg_parm_stack_space > 0)
0a1c58a2 3604 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
756e0e12 3605
0a1c58a2
JL
3606 /* Pass the function the address in which to return a
3607 structure value. */
3608 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3609 {
7ae4ad28 3610 structure_value_addr
5ae6cd0d 3611 = convert_memory_address (Pmode, structure_value_addr);
61f71b34 3612 emit_move_insn (struct_value,
0a1c58a2
JL
3613 force_reg (Pmode,
3614 force_operand (structure_value_addr,
3615 NULL_RTX)));
3616
f8cfc6aa 3617 if (REG_P (struct_value))
61f71b34 3618 use_reg (&call_fusage, struct_value);
0a1c58a2 3619 }
c2939b57 3620
05e6ee93 3621 after_args = get_last_insn ();
78bcf3dc
EB
3622 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
3623 static_chain_value, &call_fusage,
3624 reg_parm_seen, flags);
6b8805cf 3625
0cdca92b
DJ
3626 load_register_parameters (args, num_actuals, &call_fusage, flags,
3627 pass == 0, &sibcall_failure);
f725a3ec 3628
0a1c58a2
JL
3629 /* Save a pointer to the last insn before the call, so that we can
3630 later safely search backwards to find the CALL_INSN. */
3631 before_call = get_last_insn ();
51bbfa0c 3632
7d167afd
JJ
3633 /* Set up next argument register. For sibling calls on machines
3634 with register windows this should be the incoming register. */
7d167afd 3635 if (pass == 0)
6783fdb7
RS
3636 next_arg_reg = targetm.calls.function_incoming_arg
3637 (args_so_far, function_arg_info::end_marker ());
7d167afd 3638 else
6783fdb7
RS
3639 next_arg_reg = targetm.calls.function_arg
3640 (args_so_far, function_arg_info::end_marker ());
7d167afd 3641
e384e6b5
BS
3642 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
3643 {
3644 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3d9684ae 3645 arg_nr = num_actuals - arg_nr - 1;
b3681f13
TV
3646 if (arg_nr >= 0
3647 && arg_nr < num_actuals
3648 && args[arg_nr].reg
e384e6b5
BS
3649 && valreg
3650 && REG_P (valreg)
3651 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
3652 call_fusage
3653 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
f7df4a84 3654 gen_rtx_SET (valreg, args[arg_nr].reg),
e384e6b5
BS
3655 call_fusage);
3656 }
0a1c58a2
JL
3657 /* All arguments and registers used for the call must be set up by
3658 now! */
3659
ce48579b 3660 /* Stack must be properly aligned now. */
366de0ce 3661 gcc_assert (!pass
a20c5714
RS
3662 || multiple_p (stack_pointer_delta,
3663 preferred_unit_stack_boundary));
ebcd0b57 3664
0a1c58a2 3665 /* Generate the actual call instruction. */
6de9cd9a 3666 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
099e9712 3667 adjusted_args_size.constant, struct_value_size,
7d167afd 3668 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
d5cc9181 3669 flags, args_so_far);
0a1c58a2 3670
1e288103 3671 if (flag_ipa_ra)
4f660b15 3672 {
48810515
DM
3673 rtx_call_insn *last;
3674 rtx datum = NULL_RTX;
4f660b15
RO
3675 if (fndecl != NULL_TREE)
3676 {
3677 datum = XEXP (DECL_RTL (fndecl), 0);
3678 gcc_assert (datum != NULL_RTX
3679 && GET_CODE (datum) == SYMBOL_REF);
3680 }
3681 last = last_call_insn ();
3682 add_reg_note (last, REG_CALL_DECL, datum);
3683 }
3684
05e6ee93
MM
3685 /* If the call setup or the call itself overlaps with anything
3686 of the argument setup we probably clobbered our call address.
3687 In that case we can't do sibcalls. */
3688 if (pass == 0
4c7d264e
UB
3689 && check_sibcall_argument_overlap (after_args, 0, false))
3690 sibcall_failure = true;
05e6ee93 3691
bef5d8b6
RS
3692 /* If a non-BLKmode value is returned at the most significant end
3693 of a register, shift the register right by the appropriate amount
3694 and update VALREG accordingly. BLKmode values are handled by the
3695 group load/store machinery below. */
3696 if (!structure_value_addr
3697 && !pcc_struct_value
66de4d7c 3698 && TYPE_MODE (rettype) != VOIDmode
28ed065e 3699 && TYPE_MODE (rettype) != BLKmode
66de4d7c 3700 && REG_P (valreg)
28ed065e 3701 && targetm.calls.return_in_msb (rettype))
bef5d8b6 3702 {
28ed065e 3703 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4c7d264e 3704 sibcall_failure = true;
28ed065e 3705 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
bef5d8b6
RS
3706 }
3707
84b8030f 3708 if (pass && (flags & ECF_MALLOC))
0a1c58a2
JL
3709 {
3710 rtx temp = gen_reg_rtx (GET_MODE (valreg));
48810515 3711 rtx_insn *last, *insns;
0a1c58a2 3712
f725a3ec 3713 /* The return value from a malloc-like function is a pointer. */
28ed065e 3714 if (TREE_CODE (rettype) == POINTER_TYPE)
d154bfa2 3715 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
0a1c58a2
JL
3716
3717 emit_move_insn (temp, valreg);
3718
67914693 3719 /* The return value from a malloc-like function cannot alias
0a1c58a2
JL
3720 anything else. */
3721 last = get_last_insn ();
65c5f2a6 3722 add_reg_note (last, REG_NOALIAS, temp);
0a1c58a2
JL
3723
3724 /* Write out the sequence. */
3725 insns = get_insns ();
3726 end_sequence ();
2f937369 3727 emit_insn (insns);
0a1c58a2
JL
3728 valreg = temp;
3729 }
51bbfa0c 3730
6fb5fa3c 3731 /* For calls to `setjmp', etc., inform
e53b6e56 3732 function.cc:setjmp_warnings that it should complain if
6fb5fa3c
DB
3733 nonvolatile values are live. For functions that cannot
3734 return, inform flow that control does not fall through. */
51bbfa0c 3735
6e14af16 3736 if ((flags & ECF_NORETURN) || pass == 0)
c2939b57 3737 {
570a98eb 3738 /* The barrier must be emitted
0a1c58a2
JL
3739 immediately after the CALL_INSN. Some ports emit more
3740 than just a CALL_INSN above, so we must search for it here. */
51bbfa0c 3741
48810515 3742 rtx_insn *last = get_last_insn ();
4b4bf941 3743 while (!CALL_P (last))
0a1c58a2
JL
3744 {
3745 last = PREV_INSN (last);
3746 /* There was no CALL_INSN? */
366de0ce 3747 gcc_assert (last != before_call);
0a1c58a2 3748 }
51bbfa0c 3749
570a98eb 3750 emit_barrier_after (last);
8af61113 3751
f451eeef
JS
3752 /* Stack adjustments after a noreturn call are dead code.
3753 However when NO_DEFER_POP is in effect, we must preserve
3754 stack_pointer_delta. */
3755 if (inhibit_defer_pop == 0)
3756 {
3757 stack_pointer_delta = old_stack_allocated;
3758 pending_stack_adjust = 0;
3759 }
0a1c58a2 3760 }
51bbfa0c 3761
0a1c58a2 3762 /* If value type not void, return an rtx for the value. */
51bbfa0c 3763
28ed065e 3764 if (TYPE_MODE (rettype) == VOIDmode
0a1c58a2 3765 || ignore)
b5cd4ed4 3766 target = const0_rtx;
0a1c58a2
JL
3767 else if (structure_value_addr)
3768 {
3c0cb5de 3769 if (target == 0 || !MEM_P (target))
0a1c58a2 3770 {
3bdf5ad1 3771 target
28ed065e
MM
3772 = gen_rtx_MEM (TYPE_MODE (rettype),
3773 memory_address (TYPE_MODE (rettype),
3bdf5ad1 3774 structure_value_addr));
28ed065e 3775 set_mem_attributes (target, rettype, 1);
0a1c58a2
JL
3776 }
3777 }
3778 else if (pcc_struct_value)
cacbd532 3779 {
0a1c58a2
JL
3780 /* This is the special C++ case where we need to
3781 know what the true target was. We take care to
3782 never use this value more than once in one expression. */
28ed065e 3783 target = gen_rtx_MEM (TYPE_MODE (rettype),
0a1c58a2 3784 copy_to_reg (valreg));
28ed065e 3785 set_mem_attributes (target, rettype, 1);
cacbd532 3786 }
0a1c58a2
JL
3787 /* Handle calls that return values in multiple non-contiguous locations.
3788 The Irix 6 ABI has examples of this. */
3789 else if (GET_CODE (valreg) == PARALLEL)
3790 {
6de9cd9a 3791 if (target == 0)
5ef0b50d 3792 target = emit_group_move_into_temps (valreg);
1d1b7dc4
RS
3793 else if (rtx_equal_p (target, valreg))
3794 ;
3795 else if (GET_CODE (target) == PARALLEL)
3796 /* Handle the result of a emit_group_move_into_temps
3797 call in the previous pass. */
3798 emit_group_move (target, valreg);
3799 else
28ed065e
MM
3800 emit_group_store (target, valreg, rettype,
3801 int_size_in_bytes (rettype));
0a1c58a2
JL
3802 }
3803 else if (target
28ed065e 3804 && GET_MODE (target) == TYPE_MODE (rettype)
0a1c58a2
JL
3805 && GET_MODE (target) == GET_MODE (valreg))
3806 {
51caaefe
EB
3807 bool may_overlap = false;
3808
f2d18690
KK
3809 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3810 reg to a plain register. */
3fb30019
RS
3811 if (!REG_P (target) || HARD_REGISTER_P (target))
3812 valreg = avoid_likely_spilled_reg (valreg);
f2d18690 3813
51caaefe
EB
3814 /* If TARGET is a MEM in the argument area, and we have
3815 saved part of the argument area, then we can't store
3816 directly into TARGET as it may get overwritten when we
3817 restore the argument save area below. Don't work too
3818 hard though and simply force TARGET to a register if it
3819 is a MEM; the optimizer is quite likely to sort it out. */
3820 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3821 for (i = 0; i < num_actuals; i++)
3822 if (args[i].save_area)
3823 {
3824 may_overlap = true;
3825 break;
3826 }
0219237c 3827
51caaefe
EB
3828 if (may_overlap)
3829 target = copy_to_reg (valreg);
3830 else
3831 {
3832 /* TARGET and VALREG cannot be equal at this point
3833 because the latter would not have
3834 REG_FUNCTION_VALUE_P true, while the former would if
3835 it were referring to the same register.
3836
3837 If they refer to the same register, this move will be
3838 a no-op, except when function inlining is being
3839 done. */
3840 emit_move_insn (target, valreg);
3841
3842 /* If we are setting a MEM, this code must be executed.
3843 Since it is emitted after the call insn, sibcall
3844 optimization cannot be performed in that case. */
3845 if (MEM_P (target))
4c7d264e 3846 sibcall_failure = true;
51caaefe 3847 }
0a1c58a2 3848 }
0a1c58a2 3849 else
3fb30019 3850 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
51bbfa0c 3851
cde0f3fd
PB
3852 /* If we promoted this return value, make the proper SUBREG.
3853 TARGET might be const0_rtx here, so be careful. */
3854 if (REG_P (target)
28ed065e
MM
3855 && TYPE_MODE (rettype) != BLKmode
3856 && GET_MODE (target) != TYPE_MODE (rettype))
61f71b34 3857 {
28ed065e 3858 tree type = rettype;
cde0f3fd 3859 int unsignedp = TYPE_UNSIGNED (type);
ac4c8f53 3860 machine_mode ret_mode = TYPE_MODE (type);
ef4bddc2 3861 machine_mode pmode;
cde0f3fd
PB
3862
3863 /* Ensure we promote as expected, and get the new unsignedness. */
ac4c8f53 3864 pmode = promote_function_mode (type, ret_mode, &unsignedp,
cde0f3fd
PB
3865 funtype, 1);
3866 gcc_assert (GET_MODE (target) == pmode);
3867
ac4c8f53
RS
3868 if (SCALAR_INT_MODE_P (pmode)
3869 && SCALAR_FLOAT_MODE_P (ret_mode)
3870 && known_gt (GET_MODE_SIZE (pmode), GET_MODE_SIZE (ret_mode)))
3871 target = convert_wider_int_to_float (ret_mode, pmode, target);
3872 else
3873 {
3874 target = gen_lowpart_SUBREG (ret_mode, target);
3875 SUBREG_PROMOTED_VAR_P (target) = 1;
3876 SUBREG_PROMOTED_SET (target, unsignedp);
3877 }
61f71b34 3878 }
84b55618 3879
0a1c58a2
JL
3880 /* If size of args is variable or this was a constructor call for a stack
3881 argument, restore saved stack-pointer value. */
51bbfa0c 3882
9dd9bf80 3883 if (old_stack_level)
0a1c58a2 3884 {
48810515 3885 rtx_insn *prev = get_last_insn ();
9a08d230 3886
9eac0f2a 3887 emit_stack_restore (SAVE_BLOCK, old_stack_level);
38afb23f 3888 stack_pointer_delta = old_stack_pointer_delta;
9a08d230 3889
faf7a23d 3890 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
9a08d230 3891
0a1c58a2 3892 pending_stack_adjust = old_pending_adj;
d25cee4d 3893 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
0a1c58a2
JL
3894 stack_arg_under_construction = old_stack_arg_under_construction;
3895 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3896 stack_usage_map = initial_stack_usage_map;
a20c5714 3897 stack_usage_watermark = initial_stack_usage_watermark;
4c7d264e 3898 sibcall_failure = true;
0a1c58a2 3899 }
f8a097cd 3900 else if (ACCUMULATE_OUTGOING_ARGS && pass)
0a1c58a2 3901 {
51bbfa0c 3902#ifdef REG_PARM_STACK_SPACE
0a1c58a2 3903 if (save_area)
b820d2b8
AM
3904 restore_fixed_argument_area (save_area, argblock,
3905 high_to_save, low_to_save);
b94301c2 3906#endif
51bbfa0c 3907
0a1c58a2
JL
3908 /* If we saved any argument areas, restore them. */
3909 for (i = 0; i < num_actuals; i++)
3910 if (args[i].save_area)
3911 {
ef4bddc2 3912 machine_mode save_mode = GET_MODE (args[i].save_area);
0a1c58a2
JL
3913 rtx stack_area
3914 = gen_rtx_MEM (save_mode,
3915 memory_address (save_mode,
3916 XEXP (args[i].stack_slot, 0)));
3917
3918 if (save_mode != BLKmode)
3919 emit_move_insn (stack_area, args[i].save_area);
3920 else
44bb111a 3921 emit_block_move (stack_area, args[i].save_area,
a20c5714
RS
3922 (gen_int_mode
3923 (args[i].locate.size.constant, Pmode)),
44bb111a 3924 BLOCK_OP_CALL_PARM);
0a1c58a2 3925 }
51bbfa0c 3926
0a1c58a2
JL
3927 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3928 stack_usage_map = initial_stack_usage_map;
a20c5714 3929 stack_usage_watermark = initial_stack_usage_watermark;
0a1c58a2 3930 }
51bbfa0c 3931
d33606c3
EB
3932 /* If this was alloca, record the new stack level. */
3933 if (flags & ECF_MAY_BE_ALLOCA)
3934 record_new_stack_level ();
51bbfa0c 3935
0a1c58a2
JL
3936 /* Free up storage we no longer need. */
3937 for (i = 0; i < num_actuals; ++i)
04695783 3938 free (args[i].aligned_regs);
0a1c58a2 3939
2f21e1ba
BS
3940 targetm.calls.end_call_args ();
3941
0a1c58a2
JL
3942 insns = get_insns ();
3943 end_sequence ();
3944
3945 if (pass == 0)
3946 {
3947 tail_call_insns = insns;
3948
0a1c58a2
JL
3949 /* Restore the pending stack adjustment now that we have
3950 finished generating the sibling call sequence. */
1503a7ec 3951
7f2f0a01 3952 restore_pending_stack_adjust (&save);
099e9712
JH
3953
3954 /* Prepare arg structure for next iteration. */
f725a3ec 3955 for (i = 0; i < num_actuals; i++)
099e9712
JH
3956 {
3957 args[i].value = 0;
3958 args[i].aligned_regs = 0;
3959 args[i].stack = 0;
3960 }
c67846f2
JJ
3961
3962 sbitmap_free (stored_args_map);
48810515 3963 internal_arg_pointer_exp_state.scan_start = NULL;
9771b263 3964 internal_arg_pointer_exp_state.cache.release ();
0a1c58a2
JL
3965 }
3966 else
38afb23f
OH
3967 {
3968 normal_call_insns = insns;
3969
3970 /* Verify that we've deallocated all the stack we used. */
6e14af16 3971 gcc_assert ((flags & ECF_NORETURN)
6adbb51e 3972 || normal_failure
a20c5714
RS
3973 || known_eq (old_stack_allocated,
3974 stack_pointer_delta
3975 - pending_stack_adjust));
6adbb51e
JJ
3976 if (normal_failure)
3977 normal_call_insns = NULL;
38afb23f 3978 }
fadb729c
JJ
3979
3980 /* If something prevents making this a sibling call,
3981 zero out the sequence. */
3982 if (sibcall_failure)
48810515 3983 tail_call_insns = NULL;
6de9cd9a
DN
3984 else
3985 break;
0a1c58a2
JL
3986 }
3987
1ea7e6ad 3988 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
6de9cd9a
DN
3989 arguments too, as argument area is now clobbered by the call. */
3990 if (tail_call_insns)
0a1c58a2 3991 {
6de9cd9a 3992 emit_insn (tail_call_insns);
e3b5732b 3993 crtl->tail_call_emit = true;
0a1c58a2
JL
3994 }
3995 else
9a385c2d
DM
3996 {
3997 emit_insn (normal_call_insns);
3998 if (try_tail_call)
3999 /* Ideally we'd emit a message for all of the ways that it could
4000 have failed. */
4001 maybe_complain_about_tail_call (exp, "tail call production failed");
4002 }
51bbfa0c 4003
0a1c58a2 4004 currently_expanding_call--;
8e6a59fe 4005
04695783 4006 free (stack_usage_map_buf);
765fc0f7 4007 free (args);
51bbfa0c
RS
4008 return target;
4009}
ded9bf77 4010
6de9cd9a
DN
4011/* A sibling call sequence invalidates any REG_EQUIV notes made for
4012 this function's incoming arguments.
4013
4014 At the start of RTL generation we know the only REG_EQUIV notes
29d51cdb
SB
4015 in the rtl chain are those for incoming arguments, so we can look
4016 for REG_EQUIV notes between the start of the function and the
4017 NOTE_INSN_FUNCTION_BEG.
6de9cd9a
DN
4018
4019 This is (slight) overkill. We could keep track of the highest
4020 argument we clobber and be more selective in removing notes, but it
4021 does not seem to be worth the effort. */
29d51cdb 4022
6de9cd9a
DN
4023void
4024fixup_tail_calls (void)
4025{
48810515 4026 rtx_insn *insn;
29d51cdb
SB
4027
4028 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4029 {
a31830a7
SB
4030 rtx note;
4031
29d51cdb
SB
4032 /* There are never REG_EQUIV notes for the incoming arguments
4033 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
4034 if (NOTE_P (insn)
a38e7aa5 4035 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
29d51cdb
SB
4036 break;
4037
a31830a7
SB
4038 note = find_reg_note (insn, REG_EQUIV, 0);
4039 if (note)
4040 remove_note (insn, note);
4041 note = find_reg_note (insn, REG_EQUIV, 0);
4042 gcc_assert (!note);
29d51cdb 4043 }
6de9cd9a
DN
4044}
4045
ded9bf77
AH
4046/* Traverse a list of TYPES and expand all complex types into their
4047 components. */
2f2b4a02 4048static tree
ded9bf77
AH
4049split_complex_types (tree types)
4050{
4051 tree p;
4052
42ba5130
RH
4053 /* Before allocating memory, check for the common case of no complex. */
4054 for (p = types; p; p = TREE_CHAIN (p))
4055 {
4056 tree type = TREE_VALUE (p);
4057 if (TREE_CODE (type) == COMPLEX_TYPE
4058 && targetm.calls.split_complex_arg (type))
c22cacf3 4059 goto found;
42ba5130
RH
4060 }
4061 return types;
4062
4063 found:
ded9bf77
AH
4064 types = copy_list (types);
4065
4066 for (p = types; p; p = TREE_CHAIN (p))
4067 {
4068 tree complex_type = TREE_VALUE (p);
4069
42ba5130
RH
4070 if (TREE_CODE (complex_type) == COMPLEX_TYPE
4071 && targetm.calls.split_complex_arg (complex_type))
ded9bf77
AH
4072 {
4073 tree next, imag;
4074
4075 /* Rewrite complex type with component type. */
4076 TREE_VALUE (p) = TREE_TYPE (complex_type);
4077 next = TREE_CHAIN (p);
4078
4079 /* Add another component type for the imaginary part. */
4080 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4081 TREE_CHAIN (p) = imag;
4082 TREE_CHAIN (imag) = next;
4083
4084 /* Skip the newly created node. */
4085 p = TREE_CHAIN (p);
4086 }
4087 }
4088
4089 return types;
4090}
51bbfa0c 4091\f
db69559b
RS
4092/* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4093 for a value of mode OUTMODE,
4094 with NARGS different arguments, passed as ARGS.
4095 Store the return value if RETVAL is nonzero: store it in VALUE if
4096 VALUE is nonnull, otherwise pick a convenient location. In either
4097 case return the location of the stored value.
8ac61af7 4098
db69559b
RS
4099 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4100 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4101 other types of library calls. */
4102
4103rtx
d329e058
AJ
4104emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4105 enum libcall_type fn_type,
db69559b 4106 machine_mode outmode, int nargs, rtx_mode_t *args)
43bc5f13 4107{
3c0fca12
RH
4108 /* Total size in bytes of all the stack-parms scanned so far. */
4109 struct args_size args_size;
4110 /* Size of arguments before any adjustments (such as rounding). */
4111 struct args_size original_args_size;
b3694847 4112 int argnum;
3c0fca12 4113 rtx fun;
81464b2c
KT
4114 /* Todo, choose the correct decl type of orgfun. Sadly this information
4115 isn't present here, so we default to native calling abi here. */
033df0b9 4116 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5d059ed9 4117 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3c0fca12 4118 int count;
3c0fca12 4119 rtx argblock = 0;
d5cc9181
JR
4120 CUMULATIVE_ARGS args_so_far_v;
4121 cumulative_args_t args_so_far;
f725a3ec
KH
4122 struct arg
4123 {
4124 rtx value;
ef4bddc2 4125 machine_mode mode;
f725a3ec
KH
4126 rtx reg;
4127 int partial;
e7949876 4128 struct locate_and_pad_arg_data locate;
f725a3ec
KH
4129 rtx save_area;
4130 };
3c0fca12
RH
4131 struct arg *argvec;
4132 int old_inhibit_defer_pop = inhibit_defer_pop;
4133 rtx call_fusage = 0;
4134 rtx mem_value = 0;
5591ee6f 4135 rtx valreg;
4c7d264e 4136 bool pcc_struct_value = false;
cf098191 4137 poly_int64 struct_value_size = 0;
52a11cbf 4138 int flags;
3c0fca12 4139 int reg_parm_stack_space = 0;
a20c5714 4140 poly_int64 needed;
48810515 4141 rtx_insn *before_call;
0ed4bf92 4142 bool have_push_fusage;
b0c48229 4143 tree tfom; /* type_for_mode (outmode, 0) */
3c0fca12 4144
f73ad30e 4145#ifdef REG_PARM_STACK_SPACE
3c0fca12
RH
4146 /* Define the boundary of the register parm stack space that needs to be
4147 save, if any. */
726a989a 4148 int low_to_save = 0, high_to_save = 0;
f725a3ec 4149 rtx save_area = 0; /* Place that it is saved. */
3c0fca12
RH
4150#endif
4151
3c0fca12 4152 /* Size of the stack reserved for parameter registers. */
a20c5714 4153 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3c0fca12 4154 char *initial_stack_usage_map = stack_usage_map;
a20c5714 4155 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
d9725c41 4156 char *stack_usage_map_buf = NULL;
3c0fca12 4157
61f71b34
DD
4158 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4159
3c0fca12 4160#ifdef REG_PARM_STACK_SPACE
3c0fca12 4161 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3c0fca12
RH
4162#endif
4163
0529235d 4164 /* By default, library functions cannot throw. */
52a11cbf
RH
4165 flags = ECF_NOTHROW;
4166
9555a122
RH
4167 switch (fn_type)
4168 {
4169 case LCT_NORMAL:
53d4257f 4170 break;
9555a122 4171 case LCT_CONST:
53d4257f
JH
4172 flags |= ECF_CONST;
4173 break;
9555a122 4174 case LCT_PURE:
53d4257f 4175 flags |= ECF_PURE;
9555a122 4176 break;
9555a122
RH
4177 case LCT_NORETURN:
4178 flags |= ECF_NORETURN;
4179 break;
4180 case LCT_THROW:
0529235d 4181 flags &= ~ECF_NOTHROW;
9555a122 4182 break;
9defc9b7
RH
4183 case LCT_RETURNS_TWICE:
4184 flags = ECF_RETURNS_TWICE;
4185 break;
9555a122 4186 }
3c0fca12
RH
4187 fun = orgfun;
4188
3c0fca12
RH
4189 /* Ensure current function's preferred stack boundary is at least
4190 what we need. */
cb91fab0
JH
4191 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4192 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3c0fca12
RH
4193
4194 /* If this kind of value comes back in memory,
4195 decide where in memory it should come back. */
b0c48229 4196 if (outmode != VOIDmode)
3c0fca12 4197 {
ae2bcd98 4198 tfom = lang_hooks.types.type_for_mode (outmode, 0);
61f71b34 4199 if (aggregate_value_p (tfom, 0))
b0c48229 4200 {
3c0fca12 4201#ifdef PCC_STATIC_STRUCT_RETURN
b0c48229 4202 rtx pointer_reg
1d636cc6 4203 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
b0c48229 4204 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4c7d264e 4205 pcc_struct_value = true;
b0c48229
NB
4206 if (value == 0)
4207 value = gen_reg_rtx (outmode);
3c0fca12 4208#else /* not PCC_STATIC_STRUCT_RETURN */
b0c48229 4209 struct_value_size = GET_MODE_SIZE (outmode);
3c0cb5de 4210 if (value != 0 && MEM_P (value))
b0c48229
NB
4211 mem_value = value;
4212 else
9474e8ab 4213 mem_value = assign_temp (tfom, 1, 1);
3c0fca12 4214#endif
b0c48229 4215 /* This call returns a big structure. */
84b8030f 4216 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
b0c48229 4217 }
3c0fca12 4218 }
b0c48229
NB
4219 else
4220 tfom = void_type_node;
3c0fca12
RH
4221
4222 /* ??? Unfinished: must pass the memory address as an argument. */
4223
4224 /* Copy all the libcall-arguments out of the varargs data
4225 and into a vector ARGVEC.
4226
4227 Compute how to pass each argument. We only support a very small subset
4228 of the full argument passing conventions to limit complexity here since
4229 library functions shouldn't have many args. */
4230
f883e0a7 4231 argvec = XALLOCAVEC (struct arg, nargs + 1);
703ad42b 4232 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3c0fca12 4233
97fc4caf 4234#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
d5cc9181 4235 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
97fc4caf 4236#else
d5cc9181 4237 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
97fc4caf 4238#endif
d5cc9181 4239 args_so_far = pack_cumulative_args (&args_so_far_v);
3c0fca12
RH
4240
4241 args_size.constant = 0;
4242 args_size.var = 0;
4243
4244 count = 0;
4245
4246 push_temp_slots ();
4247
4248 /* If there's a structure value address to be passed,
4249 either pass it in the special place, or pass it as an extra argument. */
61f71b34 4250 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3c0fca12
RH
4251 {
4252 rtx addr = XEXP (mem_value, 0);
c22cacf3 4253
3c0fca12
RH
4254 nargs++;
4255
ee88d9aa
MK
4256 /* Make sure it is a reasonable operand for a move or push insn. */
4257 if (!REG_P (addr) && !MEM_P (addr)
1a627b35
RS
4258 && !(CONSTANT_P (addr)
4259 && targetm.legitimate_constant_p (Pmode, addr)))
ee88d9aa
MK
4260 addr = force_operand (addr, NULL_RTX);
4261
3c0fca12
RH
4262 argvec[count].value = addr;
4263 argvec[count].mode = Pmode;
4264 argvec[count].partial = 0;
4265
a7c81bc1 4266 function_arg_info ptr_arg (Pmode, /*named=*/true);
6783fdb7 4267 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
a7c81bc1 4268 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
3c0fca12
RH
4269
4270 locate_and_pad_parm (Pmode, NULL_TREE,
a4d5044f 4271#ifdef STACK_PARMS_IN_REG_PARM_AREA
c22cacf3 4272 1,
a4d5044f
CM
4273#else
4274 argvec[count].reg != 0,
4275#endif
2e4ceca5
UW
4276 reg_parm_stack_space, 0,
4277 NULL_TREE, &args_size, &argvec[count].locate);
3c0fca12 4278
3c0fca12
RH
4279 if (argvec[count].reg == 0 || argvec[count].partial != 0
4280 || reg_parm_stack_space > 0)
e7949876 4281 args_size.constant += argvec[count].locate.size.constant;
3c0fca12 4282
6930c98c 4283 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
3c0fca12
RH
4284
4285 count++;
4286 }
4287
db69559b 4288 for (unsigned int i = 0; count < nargs; i++, count++)
3c0fca12 4289 {
db69559b 4290 rtx val = args[i].first;
cf0d189e 4291 function_arg_info arg (args[i].second, /*named=*/true);
5e617be8 4292 int unsigned_p = 0;
3c0fca12
RH
4293
4294 /* We cannot convert the arg value to the mode the library wants here;
4295 must do it earlier where we know the signedness of the arg. */
cf0d189e
RS
4296 gcc_assert (arg.mode != BLKmode
4297 && (GET_MODE (val) == arg.mode
4298 || GET_MODE (val) == VOIDmode));
3c0fca12 4299
ee88d9aa
MK
4300 /* Make sure it is a reasonable operand for a move or push insn. */
4301 if (!REG_P (val) && !MEM_P (val)
cf0d189e
RS
4302 && !(CONSTANT_P (val)
4303 && targetm.legitimate_constant_p (arg.mode, val)))
ee88d9aa
MK
4304 val = force_operand (val, NULL_RTX);
4305
cf0d189e 4306 if (pass_by_reference (&args_so_far_v, arg))
3c0fca12 4307 {
f474c6f8 4308 rtx slot;
cf0d189e 4309 int must_copy = !reference_callee_copied (&args_so_far_v, arg);
f474c6f8 4310
becfd6e5
KZ
4311 /* If this was a CONST function, it is now PURE since it now
4312 reads memory. */
99a32567
DM
4313 if (flags & ECF_CONST)
4314 {
4315 flags &= ~ECF_CONST;
4316 flags |= ECF_PURE;
4317 }
4318
e0c68ce9 4319 if (MEM_P (val) && !must_copy)
c4b9a87e
ER
4320 {
4321 tree val_expr = MEM_EXPR (val);
4322 if (val_expr)
4323 mark_addressable (val_expr);
4324 slot = val;
4325 }
9969aaf6 4326 else
f474c6f8 4327 {
cf0d189e 4328 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
9474e8ab 4329 1, 1);
f474c6f8
AO
4330 emit_move_insn (slot, val);
4331 }
1da68f56 4332
6b5273c3
AO
4333 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4334 gen_rtx_USE (VOIDmode, slot),
4335 call_fusage);
f474c6f8
AO
4336 if (must_copy)
4337 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4338 gen_rtx_CLOBBER (VOIDmode,
4339 slot),
4340 call_fusage);
4341
cf0d189e 4342 arg.mode = Pmode;
257caa55 4343 arg.pass_by_reference = true;
f474c6f8 4344 val = force_operand (XEXP (slot, 0), NULL_RTX);
3c0fca12 4345 }
3c0fca12 4346
cf0d189e
RS
4347 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
4348 NULL_TREE, 0);
4349 argvec[count].mode = arg.mode;
4350 argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
4351 unsigned_p);
6783fdb7 4352 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
3c0fca12 4353
3c0fca12 4354 argvec[count].partial
a7c81bc1 4355 = targetm.calls.arg_partial_bytes (args_so_far, arg);
3c0fca12 4356
3576f984
RS
4357 if (argvec[count].reg == 0
4358 || argvec[count].partial != 0
4359 || reg_parm_stack_space > 0)
4360 {
cf0d189e 4361 locate_and_pad_parm (arg.mode, NULL_TREE,
a4d5044f 4362#ifdef STACK_PARMS_IN_REG_PARM_AREA
3576f984 4363 1,
a4d5044f 4364#else
3576f984
RS
4365 argvec[count].reg != 0,
4366#endif
2e4ceca5 4367 reg_parm_stack_space, argvec[count].partial,
3576f984
RS
4368 NULL_TREE, &args_size, &argvec[count].locate);
4369 args_size.constant += argvec[count].locate.size.constant;
4370 gcc_assert (!argvec[count].locate.size.var);
4371 }
4372#ifdef BLOCK_REG_PADDING
4373 else
4374 /* The argument is passed entirely in registers. See at which
4375 end it should be padded. */
4376 argvec[count].locate.where_pad =
cf0d189e
RS
4377 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
4378 known_le (GET_MODE_SIZE (arg.mode),
4379 UNITS_PER_WORD));
a4d5044f 4380#endif
3c0fca12 4381
6930c98c 4382 targetm.calls.function_arg_advance (args_so_far, arg);
3c0fca12 4383 }
3c0fca12 4384
957ed738
L
4385 for (int i = 0; i < nargs; i++)
4386 if (reg_parm_stack_space > 0
4387 || argvec[i].reg == 0
4388 || argvec[i].partial != 0)
4389 update_stack_alignment_for_call (&argvec[i].locate);
4390
3c0fca12
RH
4391 /* If this machine requires an external definition for library
4392 functions, write one out. */
4393 assemble_external_libcall (fun);
4394
4395 original_args_size = args_size;
a20c5714
RS
4396 args_size.constant = (aligned_upper_bound (args_size.constant
4397 + stack_pointer_delta,
4398 STACK_BYTES)
4399 - stack_pointer_delta);
3c0fca12 4400
a20c5714
RS
4401 args_size.constant = upper_bound (args_size.constant,
4402 reg_parm_stack_space);
3c0fca12 4403
5d059ed9 4404 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 4405 args_size.constant -= reg_parm_stack_space;
3c0fca12 4406
a20c5714
RS
4407 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4408 args_size.constant);
3c0fca12 4409
a11e0df4 4410 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
d3c12306 4411 {
a20c5714
RS
4412 poly_int64 pushed = args_size.constant + pending_stack_adjust;
4413 current_function_pushed_stack_size
4414 = upper_bound (current_function_pushed_stack_size, pushed);
d3c12306
EB
4415 }
4416
f73ad30e
JH
4417 if (ACCUMULATE_OUTGOING_ARGS)
4418 {
4419 /* Since the stack pointer will never be pushed, it is possible for
4420 the evaluation of a parm to clobber something we have already
4421 written to the stack. Since most function calls on RISC machines
4422 do not use the stack, this is uncommon, but must work correctly.
3c0fca12 4423
f73ad30e
JH
4424 Therefore, we save any area of the stack that was already written
4425 and that we are using. Here we set up to do this by making a new
4426 stack usage map from the old one.
3c0fca12 4427
f73ad30e
JH
4428 Another approach might be to try to reorder the argument
4429 evaluations to avoid this conflicting stack usage. */
3c0fca12 4430
f73ad30e 4431 needed = args_size.constant;
3c0fca12 4432
f73ad30e
JH
4433 /* Since we will be writing into the entire argument area, the
4434 map must be allocated for its entire size, not just the part that
4435 is the responsibility of the caller. */
5d059ed9 4436 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
ac294f0b 4437 needed += reg_parm_stack_space;
3c0fca12 4438
a20c5714 4439 poly_int64 limit = needed;
6dad9361 4440 if (ARGS_GROW_DOWNWARD)
a20c5714
RS
4441 limit += 1;
4442
4443 /* For polynomial sizes, this is the maximum possible size needed
4444 for arguments with a constant size and offset. */
4445 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4446 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4447 const_limit);
6dad9361 4448
5ed6ace5 4449 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
d9725c41 4450 stack_usage_map = stack_usage_map_buf;
3c0fca12 4451
f73ad30e 4452 if (initial_highest_arg_in_use)
2e09e75a
JM
4453 memcpy (stack_usage_map, initial_stack_usage_map,
4454 initial_highest_arg_in_use);
3c0fca12 4455
f73ad30e 4456 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
961192e1 4457 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
f73ad30e
JH
4458 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4459 needed = 0;
3c0fca12 4460
c39ada04 4461 /* We must be careful to use virtual regs before they're instantiated,
c22cacf3 4462 and real regs afterwards. Loop optimization, for example, can create
c39ada04
DD
4463 new libcalls after we've instantiated the virtual regs, and if we
4464 use virtuals anyway, they won't match the rtl patterns. */
3c0fca12 4465
c39ada04 4466 if (virtuals_instantiated)
0a81f074
RS
4467 argblock = plus_constant (Pmode, stack_pointer_rtx,
4468 STACK_POINTER_OFFSET);
c39ada04
DD
4469 else
4470 argblock = virtual_outgoing_args_rtx;
f73ad30e
JH
4471 }
4472 else
4473 {
967b4653 4474 if (!targetm.calls.push_argument (0))
a20c5714 4475 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
f73ad30e 4476 }
3c0fca12 4477
3d9684ae 4478 /* We push args individually in reverse order, perform stack alignment
3c0fca12 4479 before the first push (the last arg). */
3d9684ae 4480 if (argblock == 0)
a20c5714
RS
4481 anti_adjust_stack (gen_int_mode (args_size.constant
4482 - original_args_size.constant,
4483 Pmode));
3c0fca12 4484
3d9684ae 4485 argnum = nargs - 1;
3c0fca12 4486
f73ad30e
JH
4487#ifdef REG_PARM_STACK_SPACE
4488 if (ACCUMULATE_OUTGOING_ARGS)
4489 {
4490 /* The argument list is the property of the called routine and it
4491 may clobber it. If the fixed area has been used for previous
b820d2b8
AM
4492 parameters, we must save and restore it. */
4493 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4494 &low_to_save, &high_to_save);
3c0fca12
RH
4495 }
4496#endif
f725a3ec 4497
2f21e1ba
BS
4498 /* When expanding a normal call, args are stored in push order,
4499 which is the reverse of what we have here. */
4500 bool any_regs = false;
4501 for (int i = nargs; i-- > 0; )
4502 if (argvec[i].reg != NULL_RTX)
4503 {
4504 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
4505 any_regs = true;
4506 }
4507 if (!any_regs)
4508 targetm.calls.call_args (pc_rtx, NULL_TREE);
4509
3c0fca12
RH
4510 /* Push the args that need to be pushed. */
4511
0ed4bf92
BS
4512 have_push_fusage = false;
4513
3c0fca12
RH
4514 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4515 are to be pushed. */
3d9684ae 4516 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 4517 {
ef4bddc2 4518 machine_mode mode = argvec[argnum].mode;
b3694847 4519 rtx val = argvec[argnum].value;
3c0fca12
RH
4520 rtx reg = argvec[argnum].reg;
4521 int partial = argvec[argnum].partial;
6bdf8c2e 4522 unsigned int parm_align = argvec[argnum].locate.boundary;
a20c5714 4523 poly_int64 lower_bound = 0, upper_bound = 0;
3c0fca12
RH
4524
4525 if (! (reg != 0 && partial == 0))
4526 {
2b1c5433
JJ
4527 rtx use;
4528
f73ad30e
JH
4529 if (ACCUMULATE_OUTGOING_ARGS)
4530 {
f8a097cd
JH
4531 /* If this is being stored into a pre-allocated, fixed-size,
4532 stack area, save any previous data at that location. */
3c0fca12 4533
6dad9361
TS
4534 if (ARGS_GROW_DOWNWARD)
4535 {
4536 /* stack_slot is negative, but we want to index stack_usage_map
4537 with positive values. */
4538 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
4539 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4540 }
4541 else
4542 {
4543 lower_bound = argvec[argnum].locate.slot_offset.constant;
4544 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4545 }
3c0fca12 4546
a20c5714
RS
4547 if (stack_region_maybe_used_p (lower_bound, upper_bound,
4548 reg_parm_stack_space))
f73ad30e 4549 {
e7949876 4550 /* We need to make a save area. */
a20c5714 4551 poly_uint64 size
e7949876 4552 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
ef4bddc2 4553 machine_mode save_mode
f4b31647 4554 = int_mode_for_size (size, 1).else_blk ();
e7949876 4555 rtx adr
0a81f074 4556 = plus_constant (Pmode, argblock,
e7949876 4557 argvec[argnum].locate.offset.constant);
f73ad30e 4558 rtx stack_area
e7949876 4559 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
f73ad30e 4560
9778f2f8
JH
4561 if (save_mode == BLKmode)
4562 {
4563 argvec[argnum].save_area
4564 = assign_stack_temp (BLKmode,
9474e8ab
MM
4565 argvec[argnum].locate.size.constant
4566 );
9778f2f8 4567
1a8cb155
RS
4568 emit_block_move (validize_mem
4569 (copy_rtx (argvec[argnum].save_area)),
c22cacf3 4570 stack_area,
a20c5714
RS
4571 (gen_int_mode
4572 (argvec[argnum].locate.size.constant,
4573 Pmode)),
9778f2f8
JH
4574 BLOCK_OP_CALL_PARM);
4575 }
4576 else
4577 {
4578 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4579
4580 emit_move_insn (argvec[argnum].save_area, stack_area);
4581 }
f73ad30e 4582 }
3c0fca12 4583 }
19caa751 4584
6bdf8c2e 4585 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
44bb111a 4586 partial, reg, 0, argblock,
a20c5714
RS
4587 (gen_int_mode
4588 (argvec[argnum].locate.offset.constant, Pmode)),
e7949876 4589 reg_parm_stack_space,
99206968 4590 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
3c0fca12 4591
3c0fca12 4592 /* Now mark the segment we just used. */
f73ad30e 4593 if (ACCUMULATE_OUTGOING_ARGS)
a20c5714 4594 mark_stack_region_used (lower_bound, upper_bound);
3c0fca12
RH
4595
4596 NO_DEFER_POP;
475a3eef 4597
e53b6e56 4598 /* Indicate argument access so that alias.cc knows that these
2b1c5433
JJ
4599 values are live. */
4600 if (argblock)
0a81f074 4601 use = plus_constant (Pmode, argblock,
2b1c5433 4602 argvec[argnum].locate.offset.constant);
0ed4bf92
BS
4603 else if (have_push_fusage)
4604 continue;
2b1c5433 4605 else
0ed4bf92 4606 {
e53b6e56 4607 /* When arguments are pushed, trying to tell alias.cc where
0ed4bf92
BS
4608 exactly this argument is won't work, because the
4609 auto-increment causes confusion. So we merely indicate
4610 that we access something with a known mode somewhere on
4611 the stack. */
4612 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4613 gen_rtx_SCRATCH (Pmode));
4614 have_push_fusage = true;
4615 }
2b1c5433
JJ
4616 use = gen_rtx_MEM (argvec[argnum].mode, use);
4617 use = gen_rtx_USE (VOIDmode, use);
4618 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3c0fca12
RH
4619 }
4620 }
4621
3d9684ae 4622 argnum = nargs - 1;
3c0fca12 4623
531ca746 4624 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
3c0fca12
RH
4625
4626 /* Now load any reg parms into their regs. */
4627
4628 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4629 are to be pushed. */
3d9684ae 4630 for (count = 0; count < nargs; count++, argnum--)
3c0fca12 4631 {
ef4bddc2 4632 machine_mode mode = argvec[argnum].mode;
b3694847 4633 rtx val = argvec[argnum].value;
3c0fca12
RH
4634 rtx reg = argvec[argnum].reg;
4635 int partial = argvec[argnum].partial;
460b171d 4636
3c0fca12
RH
4637 /* Handle calls that pass values in multiple non-contiguous
4638 locations. The PA64 has examples of this for library calls. */
4639 if (reg != 0 && GET_CODE (reg) == PARALLEL)
ff15c351 4640 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3c0fca12 4641 else if (reg != 0 && partial == 0)
460b171d
JB
4642 {
4643 emit_move_insn (reg, val);
4644#ifdef BLOCK_REG_PADDING
cf098191 4645 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
460b171d
JB
4646
4647 /* Copied from load_register_parameters. */
4648
4649 /* Handle case where we have a value that needs shifting
4650 up to the msb. eg. a QImode value and we're padding
4651 upward on a BYTES_BIG_ENDIAN machine. */
cf098191 4652 if (known_lt (size, UNITS_PER_WORD)
460b171d 4653 && (argvec[argnum].locate.where_pad
76b0cbf8 4654 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
460b171d
JB
4655 {
4656 rtx x;
cf098191 4657 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
460b171d
JB
4658
4659 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4660 report the whole reg as used. Strictly speaking, the
4661 call only uses SIZE bytes at the msb end, but it doesn't
4662 seem worth generating rtl to say that. */
4663 reg = gen_rtx_REG (word_mode, REGNO (reg));
4664 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4665 if (x != reg)
4666 emit_move_insn (reg, x);
4667 }
4668#endif
4669 }
3c0fca12
RH
4670
4671 NO_DEFER_POP;
4672 }
4673
3c0fca12
RH
4674 /* Any regs containing parms remain in use through the call. */
4675 for (count = 0; count < nargs; count++)
4676 {
4677 rtx reg = argvec[count].reg;
4678 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4679 use_group_regs (&call_fusage, reg);
4680 else if (reg != 0)
3b1bf459
BS
4681 {
4682 int partial = argvec[count].partial;
4683 if (partial)
4684 {
4685 int nregs;
4686 gcc_assert (partial % UNITS_PER_WORD == 0);
4687 nregs = partial / UNITS_PER_WORD;
4688 use_regs (&call_fusage, REGNO (reg), nregs);
4689 }
4690 else
4691 use_reg (&call_fusage, reg);
4692 }
3c0fca12
RH
4693 }
4694
4695 /* Pass the function the address in which to return a structure value. */
61f71b34 4696 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3c0fca12 4697 {
61f71b34 4698 emit_move_insn (struct_value,
3c0fca12
RH
4699 force_reg (Pmode,
4700 force_operand (XEXP (mem_value, 0),
4701 NULL_RTX)));
f8cfc6aa 4702 if (REG_P (struct_value))
61f71b34 4703 use_reg (&call_fusage, struct_value);
3c0fca12
RH
4704 }
4705
4706 /* Don't allow popping to be deferred, since then
4707 cse'ing of library calls could delete a call and leave the pop. */
4708 NO_DEFER_POP;
5591ee6f 4709 valreg = (mem_value == 0 && outmode != VOIDmode
390b17c2 4710 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
3c0fca12 4711
ce48579b 4712 /* Stack must be properly aligned now. */
a20c5714
RS
4713 gcc_assert (multiple_p (stack_pointer_delta,
4714 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
ebcd0b57 4715
695ee791
RH
4716 before_call = get_last_insn ();
4717
3cf3da88
EB
4718 if (flag_callgraph_info)
4719 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
4720
3c0fca12
RH
4721 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4722 will set inhibit_defer_pop to that value. */
de76b467
JH
4723 /* The return type is needed to decide how many bytes the function pops.
4724 Signedness plays no role in that, so for simplicity, we pretend it's
4725 always signed. We also assume that the list of arguments passed has
4726 no impact, so we pretend it is unknown. */
3c0fca12 4727
6de9cd9a 4728 emit_call_1 (fun, NULL,
f725a3ec 4729 get_identifier (XSTR (orgfun, 0)),
b0c48229 4730 build_function_type (tfom, NULL_TREE),
f725a3ec 4731 original_args_size.constant, args_size.constant,
3c0fca12 4732 struct_value_size,
d5cc9181 4733 targetm.calls.function_arg (args_so_far,
6783fdb7 4734 function_arg_info::end_marker ()),
5591ee6f 4735 valreg,
d5cc9181 4736 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
3c0fca12 4737
1e288103 4738 if (flag_ipa_ra)
4f660b15 4739 {
e67d1102 4740 rtx datum = orgfun;
4f660b15 4741 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
e67d1102 4742 rtx_call_insn *last = last_call_insn ();
4f660b15
RO
4743 add_reg_note (last, REG_CALL_DECL, datum);
4744 }
4745
460b171d
JB
4746 /* Right-shift returned value if necessary. */
4747 if (!pcc_struct_value
4748 && TYPE_MODE (tfom) != BLKmode
4749 && targetm.calls.return_in_msb (tfom))
4750 {
4751 shift_return_value (TYPE_MODE (tfom), false, valreg);
4752 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4753 }
4754
2f21e1ba
BS
4755 targetm.calls.end_call_args ();
4756
e53b6e56 4757 /* For calls to `setjmp', etc., inform function.cc:setjmp_warnings
6fb5fa3c
DB
4758 that it should complain if nonvolatile values are live. For
4759 functions that cannot return, inform flow that control does not
4760 fall through. */
6e14af16 4761 if (flags & ECF_NORETURN)
695ee791 4762 {
570a98eb 4763 /* The barrier note must be emitted
695ee791
RH
4764 immediately after the CALL_INSN. Some ports emit more than
4765 just a CALL_INSN above, so we must search for it here. */
48810515 4766 rtx_insn *last = get_last_insn ();
4b4bf941 4767 while (!CALL_P (last))
695ee791
RH
4768 {
4769 last = PREV_INSN (last);
4770 /* There was no CALL_INSN? */
366de0ce 4771 gcc_assert (last != before_call);
695ee791
RH
4772 }
4773
570a98eb 4774 emit_barrier_after (last);
695ee791
RH
4775 }
4776
85da11a6
EB
4777 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
4778 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
4779 if (flags & ECF_NOTHROW)
4780 {
48810515 4781 rtx_insn *last = get_last_insn ();
85da11a6
EB
4782 while (!CALL_P (last))
4783 {
4784 last = PREV_INSN (last);
4785 /* There was no CALL_INSN? */
4786 gcc_assert (last != before_call);
4787 }
4788
4789 make_reg_eh_region_note_nothrow_nononlocal (last);
4790 }
4791
3c0fca12
RH
4792 /* Now restore inhibit_defer_pop to its actual original value. */
4793 OK_DEFER_POP;
4794
4795 pop_temp_slots ();
4796
4797 /* Copy the value to the right place. */
de76b467 4798 if (outmode != VOIDmode && retval)
3c0fca12
RH
4799 {
4800 if (mem_value)
4801 {
4802 if (value == 0)
4803 value = mem_value;
4804 if (value != mem_value)
4805 emit_move_insn (value, mem_value);
4806 }
c3297561
AO
4807 else if (GET_CODE (valreg) == PARALLEL)
4808 {
4809 if (value == 0)
4810 value = gen_reg_rtx (outmode);
643642eb 4811 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
c3297561 4812 }
3c0fca12 4813 else
7ab0aca2 4814 {
cde0f3fd 4815 /* Convert to the proper mode if a promotion has been active. */
7ab0aca2
RH
4816 if (GET_MODE (valreg) != outmode)
4817 {
4818 int unsignedp = TYPE_UNSIGNED (tfom);
4819
cde0f3fd
PB
4820 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4821 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
7ab0aca2 4822 == GET_MODE (valreg));
7ab0aca2
RH
4823 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4824 }
4825
4826 if (value != 0)
4827 emit_move_insn (value, valreg);
4828 else
4829 value = valreg;
4830 }
3c0fca12
RH
4831 }
4832
f73ad30e 4833 if (ACCUMULATE_OUTGOING_ARGS)
3c0fca12 4834 {
f73ad30e
JH
4835#ifdef REG_PARM_STACK_SPACE
4836 if (save_area)
b820d2b8
AM
4837 restore_fixed_argument_area (save_area, argblock,
4838 high_to_save, low_to_save);
3c0fca12 4839#endif
f725a3ec 4840
f73ad30e
JH
4841 /* If we saved any argument areas, restore them. */
4842 for (count = 0; count < nargs; count++)
4843 if (argvec[count].save_area)
4844 {
ef4bddc2 4845 machine_mode save_mode = GET_MODE (argvec[count].save_area);
0a81f074 4846 rtx adr = plus_constant (Pmode, argblock,
e7949876
AM
4847 argvec[count].locate.offset.constant);
4848 rtx stack_area = gen_rtx_MEM (save_mode,
4849 memory_address (save_mode, adr));
f73ad30e 4850
9778f2f8
JH
4851 if (save_mode == BLKmode)
4852 emit_block_move (stack_area,
1a8cb155
RS
4853 validize_mem
4854 (copy_rtx (argvec[count].save_area)),
a20c5714
RS
4855 (gen_int_mode
4856 (argvec[count].locate.size.constant, Pmode)),
9778f2f8
JH
4857 BLOCK_OP_CALL_PARM);
4858 else
4859 emit_move_insn (stack_area, argvec[count].save_area);
f73ad30e 4860 }
3c0fca12 4861
f73ad30e
JH
4862 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4863 stack_usage_map = initial_stack_usage_map;
a20c5714 4864 stack_usage_watermark = initial_stack_usage_watermark;
f73ad30e 4865 }
43bc5f13 4866
04695783 4867 free (stack_usage_map_buf);
d9725c41 4868
de76b467
JH
4869 return value;
4870
4871}
4872\f
d5e254e1 4873
51bbfa0c
RS
4874/* Store a single argument for a function call
4875 into the register or memory area where it must be passed.
4876 *ARG describes the argument value and where to pass it.
4877
4878 ARGBLOCK is the address of the stack-block for all the arguments,
d45cf215 4879 or 0 on a machine where arguments are pushed individually.
51bbfa0c
RS
4880
4881 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
f725a3ec 4882 so must be careful about how the stack is used.
51bbfa0c
RS
4883
4884 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4885 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4886 that we need not worry about saving and restoring the stack.
4887
4c6b3b2a 4888 FNDECL is the declaration of the function we are calling.
f725a3ec 4889
4c7d264e
UB
4890 Return true if this arg should cause sibcall failure,
4891 false otherwise. */
51bbfa0c 4892
4c7d264e 4893static bool
d329e058
AJ
4894store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4895 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
51bbfa0c 4896{
b3694847 4897 tree pval = arg->tree_value;
51bbfa0c
RS
4898 rtx reg = 0;
4899 int partial = 0;
a20c5714
RS
4900 poly_int64 used = 0;
4901 poly_int64 lower_bound = 0, upper_bound = 0;
4c7d264e 4902 bool sibcall_failure = false;
51bbfa0c
RS
4903
4904 if (TREE_CODE (pval) == ERROR_MARK)
4c7d264e 4905 return true;
51bbfa0c 4906
cc79451b
RK
4907 /* Push a new temporary level for any temporaries we make for
4908 this argument. */
4909 push_temp_slots ();
4910
f8a097cd 4911 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
51bbfa0c 4912 {
f73ad30e
JH
4913 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4914 save any previous data at that location. */
4915 if (argblock && ! variable_size && arg->stack)
4916 {
6dad9361
TS
4917 if (ARGS_GROW_DOWNWARD)
4918 {
4919 /* stack_slot is negative, but we want to index stack_usage_map
4920 with positive values. */
4921 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
a20c5714
RS
4922 {
4923 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
4924 upper_bound = -rtx_to_poly_int64 (offset) + 1;
4925 }
6dad9361
TS
4926 else
4927 upper_bound = 0;
51bbfa0c 4928
6dad9361
TS
4929 lower_bound = upper_bound - arg->locate.size.constant;
4930 }
f73ad30e 4931 else
6dad9361
TS
4932 {
4933 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
a20c5714
RS
4934 {
4935 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
4936 lower_bound = rtx_to_poly_int64 (offset);
4937 }
6dad9361
TS
4938 else
4939 lower_bound = 0;
51bbfa0c 4940
6dad9361
TS
4941 upper_bound = lower_bound + arg->locate.size.constant;
4942 }
51bbfa0c 4943
a20c5714
RS
4944 if (stack_region_maybe_used_p (lower_bound, upper_bound,
4945 reg_parm_stack_space))
51bbfa0c 4946 {
e7949876 4947 /* We need to make a save area. */
a20c5714 4948 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
f4b31647
RS
4949 machine_mode save_mode
4950 = int_mode_for_size (size, 1).else_blk ();
e7949876
AM
4951 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4952 rtx stack_area = gen_rtx_MEM (save_mode, adr);
f73ad30e
JH
4953
4954 if (save_mode == BLKmode)
4955 {
9ee5337d
EB
4956 arg->save_area
4957 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
f73ad30e 4958 preserve_temp_slots (arg->save_area);
1a8cb155
RS
4959 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
4960 stack_area,
a20c5714
RS
4961 (gen_int_mode
4962 (arg->locate.size.constant, Pmode)),
44bb111a 4963 BLOCK_OP_CALL_PARM);
f73ad30e
JH
4964 }
4965 else
4966 {
4967 arg->save_area = gen_reg_rtx (save_mode);
4968 emit_move_insn (arg->save_area, stack_area);
4969 }
51bbfa0c
RS
4970 }
4971 }
4972 }
b564df06 4973
51bbfa0c
RS
4974 /* If this isn't going to be placed on both the stack and in registers,
4975 set up the register and number of words. */
4976 if (! arg->pass_on_stack)
aa7634dd
DM
4977 {
4978 if (flags & ECF_SIBCALL)
4979 reg = arg->tail_call_reg;
4980 else
4981 reg = arg->reg;
4982 partial = arg->partial;
4983 }
51bbfa0c 4984
366de0ce
NS
4985 /* Being passed entirely in a register. We shouldn't be called in
4986 this case. */
4987 gcc_assert (reg == 0 || partial != 0);
c22cacf3 4988
4ab56118
RK
4989 /* If this arg needs special alignment, don't load the registers
4990 here. */
4991 if (arg->n_aligned_regs != 0)
4992 reg = 0;
f725a3ec 4993
4ab56118 4994 /* If this is being passed partially in a register, we can't evaluate
51bbfa0c
RS
4995 it directly into its stack slot. Otherwise, we can. */
4996 if (arg->value == 0)
d64f5a78 4997 {
d64f5a78
RS
4998 /* stack_arg_under_construction is nonzero if a function argument is
4999 being evaluated directly into the outgoing argument list and
5000 expand_call must take special action to preserve the argument list
5001 if it is called recursively.
5002
5003 For scalar function arguments stack_usage_map is sufficient to
5004 determine which stack slots must be saved and restored. Scalar
4c7d264e 5005 arguments in general have pass_on_stack == false.
d64f5a78
RS
5006
5007 If this argument is initialized by a function which takes the
5008 address of the argument (a C++ constructor or a C function
5009 returning a BLKmode structure), then stack_usage_map is
5010 insufficient and expand_call must push the stack around the
4c7d264e 5011 function call. Such arguments have pass_on_stack == true.
d64f5a78
RS
5012
5013 Note that it is always safe to set stack_arg_under_construction,
5014 but this generates suboptimal code if set when not needed. */
5015
5016 if (arg->pass_on_stack)
5017 stack_arg_under_construction++;
f73ad30e 5018
3a08477a
RK
5019 arg->value = expand_expr (pval,
5020 (partial
5021 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5022 ? NULL_RTX : arg->stack,
8403445a 5023 VOIDmode, EXPAND_STACK_PARM);
1efe6448
RK
5024
5025 /* If we are promoting object (or for any other reason) the mode
5026 doesn't agree, convert the mode. */
5027
7373d92d
RK
5028 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5029 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5030 arg->value, arg->unsignedp);
1efe6448 5031
d64f5a78
RS
5032 if (arg->pass_on_stack)
5033 stack_arg_under_construction--;
d64f5a78 5034 }
51bbfa0c 5035
0dc42b03 5036 /* Check for overlap with already clobbered argument area. */
07eef816
KH
5037 if ((flags & ECF_SIBCALL)
5038 && MEM_P (arg->value)
a20c5714
RS
5039 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5040 arg->locate.size.constant))
4c7d264e 5041 sibcall_failure = true;
0dc42b03 5042
51bbfa0c
RS
5043 /* Don't allow anything left on stack from computation
5044 of argument to alloca. */
f8a097cd 5045 if (flags & ECF_MAY_BE_ALLOCA)
51bbfa0c
RS
5046 do_pending_stack_adjust ();
5047
5048 if (arg->value == arg->stack)
37a08a29
RK
5049 /* If the value is already in the stack slot, we are done. */
5050 ;
1efe6448 5051 else if (arg->mode != BLKmode)
51bbfa0c 5052 {
46bd2bee 5053 unsigned int parm_align;
51bbfa0c
RS
5054
5055 /* Argument is a scalar, not entirely passed in registers.
5056 (If part is passed in registers, arg->partial says how much
5057 and emit_push_insn will take care of putting it there.)
f725a3ec 5058
51bbfa0c
RS
5059 Push it, and if its size is less than the
5060 amount of space allocated to it,
5061 also bump stack pointer by the additional space.
5062 Note that in C the default argument promotions
5063 will prevent such mismatches. */
5064
7b4df2bf
RS
5065 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5066 ? 0 : GET_MODE_SIZE (arg->mode));
974aedcc 5067
51bbfa0c
RS
5068 /* Compute how much space the push instruction will push.
5069 On many machines, pushing a byte will advance the stack
5070 pointer by a halfword. */
5071#ifdef PUSH_ROUNDING
5072 size = PUSH_ROUNDING (size);
5073#endif
5074 used = size;
5075
5076 /* Compute how much space the argument should get:
5077 round up to a multiple of the alignment for arguments. */
76b0cbf8
RS
5078 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5079 != PAD_NONE)
7b4df2bf
RS
5080 /* At the moment we don't (need to) support ABIs for which the
5081 padding isn't known at compile time. In principle it should
5082 be easy to add though. */
5083 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
51bbfa0c 5084
46bd2bee
JM
5085 /* Compute the alignment of the pushed argument. */
5086 parm_align = arg->locate.boundary;
76b0cbf8
RS
5087 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5088 == PAD_DOWNWARD)
46bd2bee 5089 {
a20c5714
RS
5090 poly_int64 pad = used - size;
5091 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
5092 if (pad_align != 0)
5093 parm_align = MIN (parm_align, pad_align);
46bd2bee
JM
5094 }
5095
51bbfa0c
RS
5096 /* This isn't already where we want it on the stack, so put it there.
5097 This can either be done with push or copy insns. */
a20c5714 5098 if (maybe_ne (used, 0)
974aedcc
MP
5099 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5100 NULL_RTX, parm_align, partial, reg, used - size,
5101 argblock, ARGS_SIZE_RTX (arg->locate.offset),
5102 reg_parm_stack_space,
5103 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
4c7d264e 5104 sibcall_failure = true;
841404cd
AO
5105
5106 /* Unless this is a partially-in-register argument, the argument is now
5107 in the stack. */
5108 if (partial == 0)
5109 arg->value = arg->stack;
51bbfa0c
RS
5110 }
5111 else
5112 {
5113 /* BLKmode, at least partly to be pushed. */
5114
1b1f20ca 5115 unsigned int parm_align;
a20c5714 5116 poly_int64 excess;
51bbfa0c
RS
5117 rtx size_rtx;
5118
5119 /* Pushing a nonscalar.
5120 If part is passed in registers, PARTIAL says how much
5121 and emit_push_insn will take care of putting it there. */
5122
5123 /* Round its size up to a multiple
5124 of the allocation unit for arguments. */
5125
e7949876 5126 if (arg->locate.size.var != 0)
51bbfa0c
RS
5127 {
5128 excess = 0;
e7949876 5129 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
51bbfa0c
RS
5130 }
5131 else
5132 {
78a52f11
RH
5133 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5134 for BLKmode is careful to avoid it. */
5135 excess = (arg->locate.size.constant
974aedcc 5136 - arg_int_size_in_bytes (TREE_TYPE (pval))
78a52f11 5137 + partial);
974aedcc 5138 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
bbbbb16a
ILT
5139 NULL_RTX, TYPE_MODE (sizetype),
5140 EXPAND_NORMAL);
51bbfa0c
RS
5141 }
5142
bfc45551 5143 parm_align = arg->locate.boundary;
1b1f20ca
RH
5144
5145 /* When an argument is padded down, the block is aligned to
5146 PARM_BOUNDARY, but the actual argument isn't. */
76b0cbf8
RS
5147 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5148 == PAD_DOWNWARD)
1b1f20ca 5149 {
e7949876 5150 if (arg->locate.size.var)
1b1f20ca 5151 parm_align = BITS_PER_UNIT;
a20c5714 5152 else
1b1f20ca 5153 {
a20c5714
RS
5154 unsigned int excess_align
5155 = known_alignment (excess) * BITS_PER_UNIT;
5156 if (excess_align != 0)
5157 parm_align = MIN (parm_align, excess_align);
1b1f20ca
RH
5158 }
5159 }
5160
3c0cb5de 5161 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4c6b3b2a
JJ
5162 {
5163 /* emit_push_insn might not work properly if arg->value and
e7949876 5164 argblock + arg->locate.offset areas overlap. */
4c6b3b2a 5165 rtx x = arg->value;
a20c5714 5166 poly_int64 i = 0;
4c6b3b2a 5167
5284e559
RS
5168 if (strip_offset (XEXP (x, 0), &i)
5169 == crtl->args.internal_arg_pointer)
4c6b3b2a 5170 {
b3877860
KT
5171 /* arg.locate doesn't contain the pretend_args_size offset,
5172 it's part of argblock. Ensure we don't count it in I. */
5173 if (STACK_GROWS_DOWNWARD)
5174 i -= crtl->args.pretend_args_size;
5175 else
5176 i += crtl->args.pretend_args_size;
5177
e0a21ab9 5178 /* expand_call should ensure this. */
366de0ce 5179 gcc_assert (!arg->locate.offset.var
a20c5714
RS
5180 && arg->locate.size.var == 0);
5181 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
4c6b3b2a 5182
a20c5714 5183 if (known_eq (arg->locate.offset.constant, i))
d6c2c77c
JC
5184 {
5185 /* Even though they appear to be at the same location,
5186 if part of the outgoing argument is in registers,
5187 they aren't really at the same location. Check for
5188 this by making sure that the incoming size is the
5189 same as the outgoing size. */
a20c5714 5190 if (maybe_ne (arg->locate.size.constant, size_val))
4c7d264e 5191 sibcall_failure = true;
4c6b3b2a 5192 }
a20c5714
RS
5193 else if (maybe_in_range_p (arg->locate.offset.constant,
5194 i, size_val))
4c7d264e 5195 sibcall_failure = true;
a20c5714
RS
5196 /* Use arg->locate.size.constant instead of size_rtx
5197 because we only care about the part of the argument
5198 on the stack. */
5199 else if (maybe_in_range_p (i, arg->locate.offset.constant,
5200 arg->locate.size.constant))
4c7d264e 5201 sibcall_failure = true;
4c6b3b2a
JJ
5202 }
5203 }
5204
974aedcc
MP
5205 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5206 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5207 parm_align, partial, reg, excess, argblock,
5208 ARGS_SIZE_RTX (arg->locate.offset),
5209 reg_parm_stack_space,
5210 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
e6e6e0a9
JJ
5211 /* If we bypass emit_push_insn because it is a zero sized argument,
5212 we still might need to adjust stack if such argument requires
5213 extra alignment. See PR104558. */
5214 else if ((arg->locate.alignment_pad.var
5215 || maybe_ne (arg->locate.alignment_pad.constant, 0))
5216 && !argblock)
5217 anti_adjust_stack (ARGS_SIZE_RTX (arg->locate.alignment_pad));
51bbfa0c 5218
841404cd
AO
5219 /* Unless this is a partially-in-register argument, the argument is now
5220 in the stack.
51bbfa0c 5221
841404cd
AO
5222 ??? Unlike the case above, in which we want the actual
5223 address of the data, so that we can load it directly into a
5224 register, here we want the address of the stack slot, so that
5225 it's properly aligned for word-by-word copying or something
5226 like that. It's not clear that this is always correct. */
5227 if (partial == 0)
5228 arg->value = arg->stack_slot;
5229 }
8df3dbb7
RH
5230
5231 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5232 {
5233 tree type = TREE_TYPE (arg->tree_value);
5234 arg->parallel_value
5235 = emit_group_load_into_temps (arg->reg, arg->value, type,
5236 int_size_in_bytes (type));
5237 }
51bbfa0c 5238
8403445a
AM
5239 /* Mark all slots this store used. */
5240 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5241 && argblock && ! variable_size && arg->stack)
a20c5714 5242 mark_stack_region_used (lower_bound, upper_bound);
8403445a 5243
51bbfa0c
RS
5244 /* Once we have pushed something, pops can't safely
5245 be deferred during the rest of the arguments. */
5246 NO_DEFER_POP;
5247
9474e8ab 5248 /* Free any temporary slots made in processing this argument. */
cc79451b 5249 pop_temp_slots ();
4c6b3b2a
JJ
5250
5251 return sibcall_failure;
51bbfa0c 5252}
a4b1b92a 5253
0ffef200 5254/* Nonzero if we do not know how to pass ARG solely in registers. */
a4b1b92a 5255
fe984136 5256bool
0ffef200 5257must_pass_in_stack_var_size (const function_arg_info &arg)
fe984136 5258{
0ffef200 5259 if (!arg.type)
fe984136
RH
5260 return false;
5261
5262 /* If the type has variable size... */
c600df9a 5263 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
fe984136 5264 return true;
a4b1b92a 5265
fe984136
RH
5266 /* If the type is marked as addressable (it is required
5267 to be constructed into the stack)... */
0ffef200 5268 if (TREE_ADDRESSABLE (arg.type))
fe984136
RH
5269 return true;
5270
5271 return false;
5272}
a4b1b92a 5273
7ae4ad28 5274/* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
fe984136
RH
5275 takes trailing padding of a structure into account. */
5276/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
a4b1b92a
RH
5277
5278bool
0ffef200 5279must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
a4b1b92a 5280{
0ffef200 5281 if (!arg.type)
40cdfd5a 5282 return false;
a4b1b92a
RH
5283
5284 /* If the type has variable size... */
0ffef200 5285 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
a4b1b92a
RH
5286 return true;
5287
5288 /* If the type is marked as addressable (it is required
5289 to be constructed into the stack)... */
0ffef200 5290 if (TREE_ADDRESSABLE (arg.type))
a4b1b92a
RH
5291 return true;
5292
0ffef200 5293 if (TYPE_EMPTY_P (arg.type))
974aedcc
MP
5294 return false;
5295
a4b1b92a
RH
5296 /* If the padding and mode of the type is such that a copy into
5297 a register would put it into the wrong part of the register. */
0ffef200
RS
5298 if (arg.mode == BLKmode
5299 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5300 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
76b0cbf8 5301 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
a4b1b92a
RH
5302 return true;
5303
5304 return false;
5305}
6bf29a7e 5306
4f53599c
RS
5307/* Return true if TYPE must be passed on the stack when passed to
5308 the "..." arguments of a function. */
5309
5310bool
5311must_pass_va_arg_in_stack (tree type)
5312{
0ffef200
RS
5313 function_arg_info arg (type, /*named=*/false);
5314 return targetm.calls.must_pass_in_stack (arg);
4f53599c
RS
5315}
5316
3bce7904
RS
5317/* Return true if FIELD is the C++17 empty base field that should
5318 be ignored for ABI calling convention decisions in order to
5319 maintain ABI compatibility between C++14 and earlier, which doesn't
5320 add this FIELD to classes with empty bases, and C++17 and later
5321 which does. */
5322
5323bool
5324cxx17_empty_base_field_p (const_tree field)
5325{
5326 return (DECL_FIELD_ABI_IGNORED (field)
5327 && DECL_ARTIFICIAL (field)
5328 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
5329 && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
5330}